aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_hir_ty/src
diff options
context:
space:
mode:
authorZac Pullar-Strecker <[email protected]>2020-08-24 10:19:53 +0100
committerZac Pullar-Strecker <[email protected]>2020-08-24 10:20:13 +0100
commit7bbca7a1b3f9293d2f5cc5745199bc5f8396f2f0 (patch)
treebdb47765991cb973b2cd5481a088fac636bd326c /crates/ra_hir_ty/src
parentca464650eeaca6195891199a93f4f76cf3e7e697 (diff)
parente65d48d1fb3d4d91d9dc1148a7a836ff5c9a3c87 (diff)
Merge remote-tracking branch 'upstream/master' into 503-hover-doc-links
Diffstat (limited to 'crates/ra_hir_ty/src')
-rw-r--r--crates/ra_hir_ty/src/autoderef.rs131
-rw-r--r--crates/ra_hir_ty/src/db.rs159
-rw-r--r--crates/ra_hir_ty/src/diagnostics.rs481
-rw-r--r--crates/ra_hir_ty/src/diagnostics/expr.rs565
-rw-r--r--crates/ra_hir_ty/src/diagnostics/match_check.rs1421
-rw-r--r--crates/ra_hir_ty/src/diagnostics/unsafe_check.rs173
-rw-r--r--crates/ra_hir_ty/src/display.rs631
-rw-r--r--crates/ra_hir_ty/src/infer.rs800
-rw-r--r--crates/ra_hir_ty/src/infer/coerce.rs197
-rw-r--r--crates/ra_hir_ty/src/infer/expr.rs873
-rw-r--r--crates/ra_hir_ty/src/infer/pat.rs241
-rw-r--r--crates/ra_hir_ty/src/infer/path.rs287
-rw-r--r--crates/ra_hir_ty/src/infer/unify.rs474
-rw-r--r--crates/ra_hir_ty/src/lib.rs1078
-rw-r--r--crates/ra_hir_ty/src/lower.rs1239
-rw-r--r--crates/ra_hir_ty/src/method_resolution.rs770
-rw-r--r--crates/ra_hir_ty/src/op.rs58
-rw-r--r--crates/ra_hir_ty/src/primitive.rs139
-rw-r--r--crates/ra_hir_ty/src/test_db.rs136
-rw-r--r--crates/ra_hir_ty/src/tests.rs359
-rw-r--r--crates/ra_hir_ty/src/tests/coercion.rs861
-rw-r--r--crates/ra_hir_ty/src/tests/display_source_code.rs41
-rw-r--r--crates/ra_hir_ty/src/tests/macros.rs787
-rw-r--r--crates/ra_hir_ty/src/tests/method_resolution.rs1053
-rw-r--r--crates/ra_hir_ty/src/tests/never_type.rs409
-rw-r--r--crates/ra_hir_ty/src/tests/patterns.rs656
-rw-r--r--crates/ra_hir_ty/src/tests/regression.rs842
-rw-r--r--crates/ra_hir_ty/src/tests/simple.rs2190
-rw-r--r--crates/ra_hir_ty/src/tests/traits.rs3113
-rw-r--r--crates/ra_hir_ty/src/traits.rs273
-rw-r--r--crates/ra_hir_ty/src/traits/chalk.rs586
-rw-r--r--crates/ra_hir_ty/src/traits/chalk/interner.rs383
-rw-r--r--crates/ra_hir_ty/src/traits/chalk/mapping.rs787
-rw-r--r--crates/ra_hir_ty/src/traits/chalk/tls.rs358
-rw-r--r--crates/ra_hir_ty/src/utils.rs257
35 files changed, 0 insertions, 22808 deletions
diff --git a/crates/ra_hir_ty/src/autoderef.rs b/crates/ra_hir_ty/src/autoderef.rs
deleted file mode 100644
index c727012c6..000000000
--- a/crates/ra_hir_ty/src/autoderef.rs
+++ /dev/null
@@ -1,131 +0,0 @@
1//! In certain situations, rust automatically inserts derefs as necessary: for
2//! example, field accesses `foo.bar` still work when `foo` is actually a
3//! reference to a type with the field `bar`. This is an approximation of the
4//! logic in rustc (which lives in librustc_typeck/check/autoderef.rs).
5
6use std::iter::successors;
7
8use hir_def::lang_item::LangItemTarget;
9use hir_expand::name::name;
10use log::{info, warn};
11use ra_db::CrateId;
12
13use crate::{
14 db::HirDatabase,
15 traits::{InEnvironment, Solution},
16 utils::generics,
17 BoundVar, Canonical, DebruijnIndex, Obligation, Substs, TraitRef, Ty,
18};
19
20const AUTODEREF_RECURSION_LIMIT: usize = 10;
21
22pub fn autoderef<'a>(
23 db: &'a dyn HirDatabase,
24 krate: Option<CrateId>,
25 ty: InEnvironment<Canonical<Ty>>,
26) -> impl Iterator<Item = Canonical<Ty>> + 'a {
27 let InEnvironment { value: ty, environment } = ty;
28 successors(Some(ty), move |ty| {
29 deref(db, krate?, InEnvironment { value: ty, environment: environment.clone() })
30 })
31 .take(AUTODEREF_RECURSION_LIMIT)
32}
33
34pub(crate) fn deref(
35 db: &dyn HirDatabase,
36 krate: CrateId,
37 ty: InEnvironment<&Canonical<Ty>>,
38) -> Option<Canonical<Ty>> {
39 if let Some(derefed) = ty.value.value.builtin_deref() {
40 Some(Canonical { value: derefed, kinds: ty.value.kinds.clone() })
41 } else {
42 deref_by_trait(db, krate, ty)
43 }
44}
45
46fn deref_by_trait(
47 db: &dyn HirDatabase,
48 krate: CrateId,
49 ty: InEnvironment<&Canonical<Ty>>,
50) -> Option<Canonical<Ty>> {
51 let deref_trait = match db.lang_item(krate, "deref".into())? {
52 LangItemTarget::TraitId(it) => it,
53 _ => return None,
54 };
55 let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;
56
57 let generic_params = generics(db.upcast(), target.into());
58 if generic_params.len() != 1 {
59 // the Target type + Deref trait should only have one generic parameter,
60 // namely Deref's Self type
61 return None;
62 }
63
64 // FIXME make the Canonical / bound var handling nicer
65
66 let parameters =
67 Substs::build_for_generics(&generic_params).push(ty.value.value.clone()).build();
68
69 // Check that the type implements Deref at all
70 let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() };
71 let implements_goal = Canonical {
72 kinds: ty.value.kinds.clone(),
73 value: InEnvironment {
74 value: Obligation::Trait(trait_ref),
75 environment: ty.environment.clone(),
76 },
77 };
78 if db.trait_solve(krate, implements_goal).is_none() {
79 return None;
80 }
81
82 // Now do the assoc type projection
83 let projection = super::traits::ProjectionPredicate {
84 ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.kinds.len())),
85 projection_ty: super::ProjectionTy { associated_ty: target, parameters },
86 };
87
88 let obligation = super::Obligation::Projection(projection);
89
90 let in_env = InEnvironment { value: obligation, environment: ty.environment };
91
92 let canonical =
93 Canonical::new(in_env, ty.value.kinds.iter().copied().chain(Some(super::TyKind::General)));
94
95 let solution = db.trait_solve(krate, canonical)?;
96
97 match &solution {
98 Solution::Unique(vars) => {
99 // FIXME: vars may contain solutions for any inference variables
100 // that happened to be inside ty. To correctly handle these, we
101 // would have to pass the solution up to the inference context, but
102 // that requires a larger refactoring (especially if the deref
103 // happens during method resolution). So for the moment, we just
104 // check that we're not in the situation we're we would actually
105 // need to handle the values of the additional variables, i.e.
106 // they're just being 'passed through'. In the 'standard' case where
107 // we have `impl<T> Deref for Foo<T> { Target = T }`, that should be
108 // the case.
109
110 // FIXME: if the trait solver decides to truncate the type, these
111 // assumptions will be broken. We would need to properly introduce
112 // new variables in that case
113
114 for i in 1..vars.0.kinds.len() {
115 if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1))
116 {
117 warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution);
118 return None;
119 }
120 }
121 Some(Canonical {
122 value: vars.0.value[vars.0.value.len() - 1].clone(),
123 kinds: vars.0.kinds.clone(),
124 })
125 }
126 Solution::Ambig(_) => {
127 info!("Ambiguous solution for derefing {:?}: {:?}", ty.value, solution);
128 None
129 }
130 }
131}
diff --git a/crates/ra_hir_ty/src/db.rs b/crates/ra_hir_ty/src/db.rs
deleted file mode 100644
index c773adc67..000000000
--- a/crates/ra_hir_ty/src/db.rs
+++ /dev/null
@@ -1,159 +0,0 @@
1//! FIXME: write short doc here
2
3use std::sync::Arc;
4
5use hir_def::{
6 db::DefDatabase, expr::ExprId, DefWithBodyId, FunctionId, GenericDefId, ImplId, LocalFieldId,
7 TypeParamId, VariantId,
8};
9use ra_arena::map::ArenaMap;
10use ra_db::{impl_intern_key, salsa, CrateId, Upcast};
11use ra_prof::profile;
12
13use crate::{
14 method_resolution::{InherentImpls, TraitImpls},
15 traits::chalk,
16 Binders, CallableDefId, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig,
17 ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId,
18};
19use hir_expand::name::Name;
20
21#[salsa::query_group(HirDatabaseStorage)]
22pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
23 #[salsa::invoke(infer_wait)]
24 #[salsa::transparent]
25 fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
26
27 #[salsa::invoke(crate::infer::infer_query)]
28 fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
29
30 #[salsa::invoke(crate::lower::ty_query)]
31 #[salsa::cycle(crate::lower::ty_recover)]
32 fn ty(&self, def: TyDefId) -> Binders<Ty>;
33
34 #[salsa::invoke(crate::lower::value_ty_query)]
35 fn value_ty(&self, def: ValueTyDefId) -> Binders<Ty>;
36
37 #[salsa::invoke(crate::lower::impl_self_ty_query)]
38 #[salsa::cycle(crate::lower::impl_self_ty_recover)]
39 fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
40
41 #[salsa::invoke(crate::lower::impl_trait_query)]
42 fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
43
44 #[salsa::invoke(crate::lower::field_types_query)]
45 fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
46
47 #[salsa::invoke(crate::callable_item_sig)]
48 fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
49
50 #[salsa::invoke(crate::lower::return_type_impl_traits)]
51 fn return_type_impl_traits(
52 &self,
53 def: FunctionId,
54 ) -> Option<Arc<Binders<ReturnTypeImplTraits>>>;
55
56 #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
57 #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
58 fn generic_predicates_for_param(
59 &self,
60 param_id: TypeParamId,
61 ) -> Arc<[Binders<GenericPredicate>]>;
62
63 #[salsa::invoke(crate::lower::generic_predicates_query)]
64 fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<GenericPredicate>]>;
65
66 #[salsa::invoke(crate::lower::generic_defaults_query)]
67 fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>;
68
69 #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
70 fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
71
72 #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
73 fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
74
75 #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
76 fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<TraitImpls>;
77
78 // Interned IDs for Chalk integration
79 #[salsa::interned]
80 fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId;
81 #[salsa::interned]
82 fn intern_type_param_id(&self, param_id: TypeParamId) -> GlobalTypeParamId;
83 #[salsa::interned]
84 fn intern_impl_trait_id(&self, id: OpaqueTyId) -> InternedOpaqueTyId;
85 #[salsa::interned]
86 fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> ClosureId;
87
88 #[salsa::invoke(chalk::associated_ty_data_query)]
89 fn associated_ty_data(&self, id: chalk::AssocTypeId) -> Arc<chalk::AssociatedTyDatum>;
90
91 #[salsa::invoke(chalk::trait_datum_query)]
92 fn trait_datum(&self, krate: CrateId, trait_id: chalk::TraitId) -> Arc<chalk::TraitDatum>;
93
94 #[salsa::invoke(chalk::struct_datum_query)]
95 fn struct_datum(&self, krate: CrateId, struct_id: chalk::AdtId) -> Arc<chalk::StructDatum>;
96
97 #[salsa::invoke(crate::traits::chalk::impl_datum_query)]
98 fn impl_datum(&self, krate: CrateId, impl_id: chalk::ImplId) -> Arc<chalk::ImplDatum>;
99
100 #[salsa::invoke(crate::traits::chalk::fn_def_datum_query)]
101 fn fn_def_datum(&self, krate: CrateId, fn_def_id: chalk::FnDefId) -> Arc<chalk::FnDefDatum>;
102
103 #[salsa::invoke(crate::traits::chalk::associated_ty_value_query)]
104 fn associated_ty_value(
105 &self,
106 krate: CrateId,
107 id: chalk::AssociatedTyValueId,
108 ) -> Arc<chalk::AssociatedTyValue>;
109
110 #[salsa::invoke(crate::traits::trait_solve_query)]
111 fn trait_solve(
112 &self,
113 krate: CrateId,
114 goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>,
115 ) -> Option<crate::traits::Solution>;
116
117 #[salsa::invoke(crate::traits::chalk::program_clauses_for_chalk_env_query)]
118 fn program_clauses_for_chalk_env(
119 &self,
120 krate: CrateId,
121 env: chalk_ir::Environment<chalk::Interner>,
122 ) -> chalk_ir::ProgramClauses<chalk::Interner>;
123}
124
125fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
126 let _p = profile("infer:wait").detail(|| match def {
127 DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
128 DefWithBodyId::StaticId(it) => {
129 db.static_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
130 }
131 DefWithBodyId::ConstId(it) => {
132 db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
133 }
134 });
135 db.infer_query(def)
136}
137
138#[test]
139fn hir_database_is_object_safe() {
140 fn _assert_object_safe(_: &dyn HirDatabase) {}
141}
142
143#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
144pub struct GlobalTypeParamId(salsa::InternId);
145impl_intern_key!(GlobalTypeParamId);
146
147#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
148pub struct InternedOpaqueTyId(salsa::InternId);
149impl_intern_key!(InternedOpaqueTyId);
150
151#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
152pub struct ClosureId(salsa::InternId);
153impl_intern_key!(ClosureId);
154
155/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
156/// we have different IDs for struct and enum variant constructors.
157#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
158pub struct InternedCallableDefId(salsa::InternId);
159impl_intern_key!(InternedCallableDefId);
diff --git a/crates/ra_hir_ty/src/diagnostics.rs b/crates/ra_hir_ty/src/diagnostics.rs
deleted file mode 100644
index f210c305a..000000000
--- a/crates/ra_hir_ty/src/diagnostics.rs
+++ /dev/null
@@ -1,481 +0,0 @@
1//! FIXME: write short doc here
2mod expr;
3mod match_check;
4mod unsafe_check;
5
6use std::any::Any;
7
8use hir_def::DefWithBodyId;
9use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink};
10use hir_expand::{db::AstDatabase, name::Name, HirFileId, InFile};
11use ra_prof::profile;
12use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr};
13use stdx::format_to;
14
15use crate::db::HirDatabase;
16
17pub use crate::diagnostics::expr::{record_literal_missing_fields, record_pattern_missing_fields};
18
19pub fn validate_body(db: &dyn HirDatabase, owner: DefWithBodyId, sink: &mut DiagnosticSink<'_>) {
20 let _p = profile("validate_body");
21 let infer = db.infer(owner);
22 infer.add_diagnostics(db, owner, sink);
23 let mut validator = expr::ExprValidator::new(owner, infer.clone(), sink);
24 validator.validate_body(db);
25 let mut validator = unsafe_check::UnsafeValidator::new(owner, infer, sink);
26 validator.validate_body(db);
27}
28
29#[derive(Debug)]
30pub struct NoSuchField {
31 pub file: HirFileId,
32 pub field: AstPtr<ast::RecordExprField>,
33}
34
35impl Diagnostic for NoSuchField {
36 fn message(&self) -> String {
37 "no such field".to_string()
38 }
39
40 fn source(&self) -> InFile<SyntaxNodePtr> {
41 InFile::new(self.file, self.field.clone().into())
42 }
43
44 fn as_any(&self) -> &(dyn Any + Send + 'static) {
45 self
46 }
47}
48
49impl AstDiagnostic for NoSuchField {
50 type AST = ast::RecordExprField;
51
52 fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
53 let root = db.parse_or_expand(self.source().file_id).unwrap();
54 let node = self.source().value.to_node(&root);
55 ast::RecordExprField::cast(node).unwrap()
56 }
57}
58
59#[derive(Debug)]
60pub struct MissingFields {
61 pub file: HirFileId,
62 pub field_list: AstPtr<ast::RecordExprFieldList>,
63 pub missed_fields: Vec<Name>,
64}
65
66impl Diagnostic for MissingFields {
67 fn message(&self) -> String {
68 let mut buf = String::from("Missing structure fields:\n");
69 for field in &self.missed_fields {
70 format_to!(buf, "- {}\n", field);
71 }
72 buf
73 }
74 fn source(&self) -> InFile<SyntaxNodePtr> {
75 InFile { file_id: self.file, value: self.field_list.clone().into() }
76 }
77 fn as_any(&self) -> &(dyn Any + Send + 'static) {
78 self
79 }
80}
81
82impl AstDiagnostic for MissingFields {
83 type AST = ast::RecordExprFieldList;
84
85 fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
86 let root = db.parse_or_expand(self.source().file_id).unwrap();
87 let node = self.source().value.to_node(&root);
88 ast::RecordExprFieldList::cast(node).unwrap()
89 }
90}
91
92#[derive(Debug)]
93pub struct MissingPatFields {
94 pub file: HirFileId,
95 pub field_list: AstPtr<ast::RecordFieldPatList>,
96 pub missed_fields: Vec<Name>,
97}
98
99impl Diagnostic for MissingPatFields {
100 fn message(&self) -> String {
101 let mut buf = String::from("Missing structure fields:\n");
102 for field in &self.missed_fields {
103 format_to!(buf, "- {}\n", field);
104 }
105 buf
106 }
107 fn source(&self) -> InFile<SyntaxNodePtr> {
108 InFile { file_id: self.file, value: self.field_list.clone().into() }
109 }
110 fn as_any(&self) -> &(dyn Any + Send + 'static) {
111 self
112 }
113}
114
115#[derive(Debug)]
116pub struct MissingMatchArms {
117 pub file: HirFileId,
118 pub match_expr: AstPtr<ast::Expr>,
119 pub arms: AstPtr<ast::MatchArmList>,
120}
121
122impl Diagnostic for MissingMatchArms {
123 fn message(&self) -> String {
124 String::from("Missing match arm")
125 }
126 fn source(&self) -> InFile<SyntaxNodePtr> {
127 InFile { file_id: self.file, value: self.match_expr.clone().into() }
128 }
129 fn as_any(&self) -> &(dyn Any + Send + 'static) {
130 self
131 }
132}
133
134#[derive(Debug)]
135pub struct MissingOkInTailExpr {
136 pub file: HirFileId,
137 pub expr: AstPtr<ast::Expr>,
138}
139
140impl Diagnostic for MissingOkInTailExpr {
141 fn message(&self) -> String {
142 "wrap return expression in Ok".to_string()
143 }
144 fn source(&self) -> InFile<SyntaxNodePtr> {
145 InFile { file_id: self.file, value: self.expr.clone().into() }
146 }
147 fn as_any(&self) -> &(dyn Any + Send + 'static) {
148 self
149 }
150}
151
152impl AstDiagnostic for MissingOkInTailExpr {
153 type AST = ast::Expr;
154
155 fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
156 let root = db.parse_or_expand(self.file).unwrap();
157 let node = self.source().value.to_node(&root);
158 ast::Expr::cast(node).unwrap()
159 }
160}
161
162#[derive(Debug)]
163pub struct BreakOutsideOfLoop {
164 pub file: HirFileId,
165 pub expr: AstPtr<ast::Expr>,
166}
167
168impl Diagnostic for BreakOutsideOfLoop {
169 fn message(&self) -> String {
170 "break outside of loop".to_string()
171 }
172 fn source(&self) -> InFile<SyntaxNodePtr> {
173 InFile { file_id: self.file, value: self.expr.clone().into() }
174 }
175 fn as_any(&self) -> &(dyn Any + Send + 'static) {
176 self
177 }
178}
179
180impl AstDiagnostic for BreakOutsideOfLoop {
181 type AST = ast::Expr;
182
183 fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
184 let root = db.parse_or_expand(self.file).unwrap();
185 let node = self.source().value.to_node(&root);
186 ast::Expr::cast(node).unwrap()
187 }
188}
189
190#[derive(Debug)]
191pub struct MissingUnsafe {
192 pub file: HirFileId,
193 pub expr: AstPtr<ast::Expr>,
194}
195
196impl Diagnostic for MissingUnsafe {
197 fn message(&self) -> String {
198 format!("This operation is unsafe and requires an unsafe function or block")
199 }
200 fn source(&self) -> InFile<SyntaxNodePtr> {
201 InFile { file_id: self.file, value: self.expr.clone().into() }
202 }
203 fn as_any(&self) -> &(dyn Any + Send + 'static) {
204 self
205 }
206}
207
208impl AstDiagnostic for MissingUnsafe {
209 type AST = ast::Expr;
210
211 fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
212 let root = db.parse_or_expand(self.source().file_id).unwrap();
213 let node = self.source().value.to_node(&root);
214 ast::Expr::cast(node).unwrap()
215 }
216}
217
218#[derive(Debug)]
219pub struct MismatchedArgCount {
220 pub file: HirFileId,
221 pub call_expr: AstPtr<ast::Expr>,
222 pub expected: usize,
223 pub found: usize,
224}
225
226impl Diagnostic for MismatchedArgCount {
227 fn message(&self) -> String {
228 let s = if self.expected == 1 { "" } else { "s" };
229 format!("Expected {} argument{}, found {}", self.expected, s, self.found)
230 }
231 fn source(&self) -> InFile<SyntaxNodePtr> {
232 InFile { file_id: self.file, value: self.call_expr.clone().into() }
233 }
234 fn as_any(&self) -> &(dyn Any + Send + 'static) {
235 self
236 }
237 fn is_experimental(&self) -> bool {
238 true
239 }
240}
241
242impl AstDiagnostic for MismatchedArgCount {
243 type AST = ast::CallExpr;
244 fn ast(&self, db: &dyn AstDatabase) -> Self::AST {
245 let root = db.parse_or_expand(self.source().file_id).unwrap();
246 let node = self.source().value.to_node(&root);
247 ast::CallExpr::cast(node).unwrap()
248 }
249}
250
251#[cfg(test)]
252mod tests {
253 use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId};
254 use hir_expand::diagnostics::{Diagnostic, DiagnosticSinkBuilder};
255 use ra_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt};
256 use ra_syntax::{TextRange, TextSize};
257 use rustc_hash::FxHashMap;
258
259 use crate::{diagnostics::validate_body, test_db::TestDB};
260
261 impl TestDB {
262 fn diagnostics<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) {
263 let crate_graph = self.crate_graph();
264 for krate in crate_graph.iter() {
265 let crate_def_map = self.crate_def_map(krate);
266
267 let mut fns = Vec::new();
268 for (module_id, _) in crate_def_map.modules.iter() {
269 for decl in crate_def_map[module_id].scope.declarations() {
270 if let ModuleDefId::FunctionId(f) = decl {
271 fns.push(f)
272 }
273 }
274
275 for impl_id in crate_def_map[module_id].scope.impls() {
276 let impl_data = self.impl_data(impl_id);
277 for item in impl_data.items.iter() {
278 if let AssocItemId::FunctionId(f) = item {
279 fns.push(*f)
280 }
281 }
282 }
283 }
284
285 for f in fns {
286 let mut sink = DiagnosticSinkBuilder::new().build(&mut cb);
287 validate_body(self, f.into(), &mut sink);
288 }
289 }
290 }
291 }
292
293 pub(crate) fn check_diagnostics(ra_fixture: &str) {
294 let db = TestDB::with_files(ra_fixture);
295 let annotations = db.extract_annotations();
296
297 let mut actual: FxHashMap<FileId, Vec<(TextRange, String)>> = FxHashMap::default();
298 db.diagnostics(|d| {
299 // FXIME: macros...
300 let file_id = d.source().file_id.original_file(&db);
301 let range = d.syntax_node(&db).text_range();
302 let message = d.message().to_owned();
303 actual.entry(file_id).or_default().push((range, message));
304 });
305
306 for (file_id, diags) in actual.iter_mut() {
307 diags.sort_by_key(|it| it.0.start());
308 let text = db.file_text(*file_id);
309 // For multiline spans, place them on line start
310 for (range, content) in diags {
311 if text[*range].contains('\n') {
312 *range = TextRange::new(range.start(), range.start() + TextSize::from(1));
313 *content = format!("... {}", content);
314 }
315 }
316 }
317
318 assert_eq!(annotations, actual);
319 }
320
321 #[test]
322 fn no_such_field_diagnostics() {
323 check_diagnostics(
324 r#"
325struct S { foo: i32, bar: () }
326impl S {
327 fn new() -> S {
328 S {
329 //^... Missing structure fields:
330 //| - bar
331 foo: 92,
332 baz: 62,
333 //^^^^^^^ no such field
334 }
335 }
336}
337"#,
338 );
339 }
340 #[test]
341 fn no_such_field_with_feature_flag_diagnostics() {
342 check_diagnostics(
343 r#"
344//- /lib.rs crate:foo cfg:feature=foo
345struct MyStruct {
346 my_val: usize,
347 #[cfg(feature = "foo")]
348 bar: bool,
349}
350
351impl MyStruct {
352 #[cfg(feature = "foo")]
353 pub(crate) fn new(my_val: usize, bar: bool) -> Self {
354 Self { my_val, bar }
355 }
356 #[cfg(not(feature = "foo"))]
357 pub(crate) fn new(my_val: usize, _bar: bool) -> Self {
358 Self { my_val }
359 }
360}
361"#,
362 );
363 }
364
365 #[test]
366 fn no_such_field_enum_with_feature_flag_diagnostics() {
367 check_diagnostics(
368 r#"
369//- /lib.rs crate:foo cfg:feature=foo
370enum Foo {
371 #[cfg(not(feature = "foo"))]
372 Buz,
373 #[cfg(feature = "foo")]
374 Bar,
375 Baz
376}
377
378fn test_fn(f: Foo) {
379 match f {
380 Foo::Bar => {},
381 Foo::Baz => {},
382 }
383}
384"#,
385 );
386 }
387
388 #[test]
389 fn no_such_field_with_feature_flag_diagnostics_on_struct_lit() {
390 check_diagnostics(
391 r#"
392//- /lib.rs crate:foo cfg:feature=foo
393struct S {
394 #[cfg(feature = "foo")]
395 foo: u32,
396 #[cfg(not(feature = "foo"))]
397 bar: u32,
398}
399
400impl S {
401 #[cfg(feature = "foo")]
402 fn new(foo: u32) -> Self {
403 Self { foo }
404 }
405 #[cfg(not(feature = "foo"))]
406 fn new(bar: u32) -> Self {
407 Self { bar }
408 }
409 fn new2(bar: u32) -> Self {
410 #[cfg(feature = "foo")]
411 { Self { foo: bar } }
412 #[cfg(not(feature = "foo"))]
413 { Self { bar } }
414 }
415 fn new2(val: u32) -> Self {
416 Self {
417 #[cfg(feature = "foo")]
418 foo: val,
419 #[cfg(not(feature = "foo"))]
420 bar: val,
421 }
422 }
423}
424"#,
425 );
426 }
427
428 #[test]
429 fn no_such_field_with_type_macro() {
430 check_diagnostics(
431 r#"
432macro_rules! Type { () => { u32 }; }
433struct Foo { bar: Type![] }
434
435impl Foo {
436 fn new() -> Self {
437 Foo { bar: 0 }
438 }
439}
440"#,
441 );
442 }
443
444 #[test]
445 fn missing_record_pat_field_diagnostic() {
446 check_diagnostics(
447 r#"
448struct S { foo: i32, bar: () }
449fn baz(s: S) {
450 let S { foo: _ } = s;
451 //^^^^^^^^^^ Missing structure fields:
452 // | - bar
453}
454"#,
455 );
456 }
457
458 #[test]
459 fn missing_record_pat_field_no_diagnostic_if_not_exhaustive() {
460 check_diagnostics(
461 r"
462struct S { foo: i32, bar: () }
463fn baz(s: S) -> i32 {
464 match s {
465 S { foo, .. } => foo,
466 }
467}
468",
469 )
470 }
471
472 #[test]
473 fn break_outside_of_loop() {
474 check_diagnostics(
475 r#"
476fn foo() { break; }
477 //^^^^^ break outside of loop
478"#,
479 );
480 }
481}
diff --git a/crates/ra_hir_ty/src/diagnostics/expr.rs b/crates/ra_hir_ty/src/diagnostics/expr.rs
deleted file mode 100644
index f0e0f2988..000000000
--- a/crates/ra_hir_ty/src/diagnostics/expr.rs
+++ /dev/null
@@ -1,565 +0,0 @@
1//! FIXME: write short doc here
2
3use std::sync::Arc;
4
5use hir_def::{path::path, resolver::HasResolver, AdtId, DefWithBodyId};
6use hir_expand::diagnostics::DiagnosticSink;
7use ra_syntax::{ast, AstPtr};
8use rustc_hash::FxHashSet;
9
10use crate::{
11 db::HirDatabase,
12 diagnostics::{
13 match_check::{is_useful, MatchCheckCtx, Matrix, PatStack, Usefulness},
14 MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, MissingPatFields,
15 },
16 utils::variant_data,
17 ApplicationTy, InferenceResult, Ty, TypeCtor,
18};
19
20pub use hir_def::{
21 body::{
22 scope::{ExprScopes, ScopeEntry, ScopeId},
23 Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource,
24 },
25 expr::{
26 ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
27 MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
28 },
29 src::HasSource,
30 LocalFieldId, Lookup, VariantId,
31};
32
33pub(super) struct ExprValidator<'a, 'b: 'a> {
34 owner: DefWithBodyId,
35 infer: Arc<InferenceResult>,
36 sink: &'a mut DiagnosticSink<'b>,
37}
38
39impl<'a, 'b> ExprValidator<'a, 'b> {
40 pub(super) fn new(
41 owner: DefWithBodyId,
42 infer: Arc<InferenceResult>,
43 sink: &'a mut DiagnosticSink<'b>,
44 ) -> ExprValidator<'a, 'b> {
45 ExprValidator { owner, infer, sink }
46 }
47
48 pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
49 let body = db.body(self.owner.into());
50
51 for (id, expr) in body.exprs.iter() {
52 if let Some((variant_def, missed_fields, true)) =
53 record_literal_missing_fields(db, &self.infer, id, expr)
54 {
55 self.create_record_literal_missing_fields_diagnostic(
56 id,
57 db,
58 variant_def,
59 missed_fields,
60 );
61 }
62
63 match expr {
64 Expr::Match { expr, arms } => {
65 self.validate_match(id, *expr, arms, db, self.infer.clone());
66 }
67 Expr::Call { .. } | Expr::MethodCall { .. } => {
68 self.validate_call(db, id, expr);
69 }
70 _ => {}
71 }
72 }
73 for (id, pat) in body.pats.iter() {
74 if let Some((variant_def, missed_fields, true)) =
75 record_pattern_missing_fields(db, &self.infer, id, pat)
76 {
77 self.create_record_pattern_missing_fields_diagnostic(
78 id,
79 db,
80 variant_def,
81 missed_fields,
82 );
83 }
84 }
85 let body_expr = &body[body.body_expr];
86 if let Expr::Block { tail: Some(t), .. } = body_expr {
87 self.validate_results_in_tail_expr(body.body_expr, *t, db);
88 }
89 }
90
91 fn create_record_literal_missing_fields_diagnostic(
92 &mut self,
93 id: ExprId,
94 db: &dyn HirDatabase,
95 variant_def: VariantId,
96 missed_fields: Vec<LocalFieldId>,
97 ) {
98 // XXX: only look at source_map if we do have missing fields
99 let (_, source_map) = db.body_with_source_map(self.owner.into());
100
101 if let Ok(source_ptr) = source_map.expr_syntax(id) {
102 let root = source_ptr.file_syntax(db.upcast());
103 if let ast::Expr::RecordExpr(record_lit) = &source_ptr.value.to_node(&root) {
104 if let Some(field_list) = record_lit.record_expr_field_list() {
105 let variant_data = variant_data(db.upcast(), variant_def);
106 let missed_fields = missed_fields
107 .into_iter()
108 .map(|idx| variant_data.fields()[idx].name.clone())
109 .collect();
110 self.sink.push(MissingFields {
111 file: source_ptr.file_id,
112 field_list: AstPtr::new(&field_list),
113 missed_fields,
114 })
115 }
116 }
117 }
118 }
119
120 fn create_record_pattern_missing_fields_diagnostic(
121 &mut self,
122 id: PatId,
123 db: &dyn HirDatabase,
124 variant_def: VariantId,
125 missed_fields: Vec<LocalFieldId>,
126 ) {
127 // XXX: only look at source_map if we do have missing fields
128 let (_, source_map) = db.body_with_source_map(self.owner.into());
129
130 if let Ok(source_ptr) = source_map.pat_syntax(id) {
131 if let Some(expr) = source_ptr.value.as_ref().left() {
132 let root = source_ptr.file_syntax(db.upcast());
133 if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
134 if let Some(field_list) = record_pat.record_field_pat_list() {
135 let variant_data = variant_data(db.upcast(), variant_def);
136 let missed_fields = missed_fields
137 .into_iter()
138 .map(|idx| variant_data.fields()[idx].name.clone())
139 .collect();
140 self.sink.push(MissingPatFields {
141 file: source_ptr.file_id,
142 field_list: AstPtr::new(&field_list),
143 missed_fields,
144 })
145 }
146 }
147 }
148 }
149 }
150
151 fn validate_call(&mut self, db: &dyn HirDatabase, call_id: ExprId, expr: &Expr) -> Option<()> {
152 // Check that the number of arguments matches the number of parameters.
153
154 // FIXME: Due to shortcomings in the current type system implementation, only emit this
155 // diagnostic if there are no type mismatches in the containing function.
156 if self.infer.type_mismatches.iter().next().is_some() {
157 return Some(());
158 }
159
160 let is_method_call = matches!(expr, Expr::MethodCall { .. });
161 let (sig, args) = match expr {
162 Expr::Call { callee, args } => {
163 let callee = &self.infer.type_of_expr[*callee];
164 let sig = callee.callable_sig(db)?;
165 (sig, args.clone())
166 }
167 Expr::MethodCall { receiver, args, .. } => {
168 let mut args = args.clone();
169 args.insert(0, *receiver);
170
171 // FIXME: note that we erase information about substs here. This
172 // is not right, but, luckily, doesn't matter as we care only
173 // about the number of params
174 let callee = self.infer.method_resolution(call_id)?;
175 let sig = db.callable_item_signature(callee.into()).value;
176
177 (sig, args)
178 }
179 _ => return None,
180 };
181
182 if sig.is_varargs {
183 return None;
184 }
185
186 let params = sig.params();
187
188 let mut param_count = params.len();
189 let mut arg_count = args.len();
190
191 if arg_count != param_count {
192 let (_, source_map) = db.body_with_source_map(self.owner.into());
193 if let Ok(source_ptr) = source_map.expr_syntax(call_id) {
194 if is_method_call {
195 param_count -= 1;
196 arg_count -= 1;
197 }
198 self.sink.push(MismatchedArgCount {
199 file: source_ptr.file_id,
200 call_expr: source_ptr.value,
201 expected: param_count,
202 found: arg_count,
203 });
204 }
205 }
206
207 None
208 }
209
210 fn validate_match(
211 &mut self,
212 id: ExprId,
213 match_expr: ExprId,
214 arms: &[MatchArm],
215 db: &dyn HirDatabase,
216 infer: Arc<InferenceResult>,
217 ) {
218 let (body, source_map): (Arc<Body>, Arc<BodySourceMap>) =
219 db.body_with_source_map(self.owner.into());
220
221 let match_expr_ty = match infer.type_of_expr.get(match_expr) {
222 Some(ty) => ty,
223 // If we can't resolve the type of the match expression
224 // we cannot perform exhaustiveness checks.
225 None => return,
226 };
227
228 let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db };
229 let pats = arms.iter().map(|arm| arm.pat);
230
231 let mut seen = Matrix::empty();
232 for pat in pats {
233 if let Some(pat_ty) = infer.type_of_pat.get(pat) {
234 // We only include patterns whose type matches the type
235 // of the match expression. If we had a InvalidMatchArmPattern
236 // diagnostic or similar we could raise that in an else
237 // block here.
238 //
239 // When comparing the types, we also have to consider that rustc
240 // will automatically de-reference the match expression type if
241 // necessary.
242 //
243 // FIXME we should use the type checker for this.
244 if pat_ty == match_expr_ty
245 || match_expr_ty
246 .as_reference()
247 .map(|(match_expr_ty, _)| match_expr_ty == pat_ty)
248 .unwrap_or(false)
249 {
250 // If we had a NotUsefulMatchArm diagnostic, we could
251 // check the usefulness of each pattern as we added it
252 // to the matrix here.
253 let v = PatStack::from_pattern(pat);
254 seen.push(&cx, v);
255 continue;
256 }
257 }
258
259 // If we can't resolve the type of a pattern, or the pattern type doesn't
260 // fit the match expression, we skip this diagnostic. Skipping the entire
261 // diagnostic rather than just not including this match arm is preferred
262 // to avoid the chance of false positives.
263 return;
264 }
265
266 match is_useful(&cx, &seen, &PatStack::from_wild()) {
267 Ok(Usefulness::Useful) => (),
268 // if a wildcard pattern is not useful, then all patterns are covered
269 Ok(Usefulness::NotUseful) => return,
270 // this path is for unimplemented checks, so we err on the side of not
271 // reporting any errors
272 _ => return,
273 }
274
275 if let Ok(source_ptr) = source_map.expr_syntax(id) {
276 let root = source_ptr.file_syntax(db.upcast());
277 if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) {
278 if let (Some(match_expr), Some(arms)) =
279 (match_expr.expr(), match_expr.match_arm_list())
280 {
281 self.sink.push(MissingMatchArms {
282 file: source_ptr.file_id,
283 match_expr: AstPtr::new(&match_expr),
284 arms: AstPtr::new(&arms),
285 })
286 }
287 }
288 }
289 }
290
291 fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
292 // the mismatch will be on the whole block currently
293 let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
294 Some(m) => m,
295 None => return,
296 };
297
298 let core_result_path = path![core::result::Result];
299
300 let resolver = self.owner.resolver(db.upcast());
301 let core_result_enum = match resolver.resolve_known_enum(db.upcast(), &core_result_path) {
302 Some(it) => it,
303 _ => return,
304 };
305
306 let core_result_ctor = TypeCtor::Adt(AdtId::EnumId(core_result_enum));
307 let params = match &mismatch.expected {
308 Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &core_result_ctor => {
309 parameters
310 }
311 _ => return,
312 };
313
314 if params.len() == 2 && params[0] == mismatch.actual {
315 let (_, source_map) = db.body_with_source_map(self.owner.into());
316
317 if let Ok(source_ptr) = source_map.expr_syntax(id) {
318 self.sink
319 .push(MissingOkInTailExpr { file: source_ptr.file_id, expr: source_ptr.value });
320 }
321 }
322 }
323}
324
325pub fn record_literal_missing_fields(
326 db: &dyn HirDatabase,
327 infer: &InferenceResult,
328 id: ExprId,
329 expr: &Expr,
330) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
331 let (fields, exhausitve) = match expr {
332 Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
333 _ => return None,
334 };
335
336 let variant_def = infer.variant_resolution_for_expr(id)?;
337 if let VariantId::UnionId(_) = variant_def {
338 return None;
339 }
340
341 let variant_data = variant_data(db.upcast(), variant_def);
342
343 let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
344 let missed_fields: Vec<LocalFieldId> = variant_data
345 .fields()
346 .iter()
347 .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
348 .collect();
349 if missed_fields.is_empty() {
350 return None;
351 }
352 Some((variant_def, missed_fields, exhausitve))
353}
354
355pub fn record_pattern_missing_fields(
356 db: &dyn HirDatabase,
357 infer: &InferenceResult,
358 id: PatId,
359 pat: &Pat,
360) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
361 let (fields, exhaustive) = match pat {
362 Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
363 _ => return None,
364 };
365
366 let variant_def = infer.variant_resolution_for_pat(id)?;
367 if let VariantId::UnionId(_) = variant_def {
368 return None;
369 }
370
371 let variant_data = variant_data(db.upcast(), variant_def);
372
373 let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
374 let missed_fields: Vec<LocalFieldId> = variant_data
375 .fields()
376 .iter()
377 .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
378 .collect();
379 if missed_fields.is_empty() {
380 return None;
381 }
382 Some((variant_def, missed_fields, exhaustive))
383}
384
385#[cfg(test)]
386mod tests {
387 use crate::diagnostics::tests::check_diagnostics;
388
389 #[test]
390 fn simple_free_fn_zero() {
391 check_diagnostics(
392 r#"
393fn zero() {}
394fn f() { zero(1); }
395 //^^^^^^^ Expected 0 arguments, found 1
396"#,
397 );
398
399 check_diagnostics(
400 r#"
401fn zero() {}
402fn f() { zero(); }
403"#,
404 );
405 }
406
407 #[test]
408 fn simple_free_fn_one() {
409 check_diagnostics(
410 r#"
411fn one(arg: u8) {}
412fn f() { one(); }
413 //^^^^^ Expected 1 argument, found 0
414"#,
415 );
416
417 check_diagnostics(
418 r#"
419fn one(arg: u8) {}
420fn f() { one(1); }
421"#,
422 );
423 }
424
425 #[test]
426 fn method_as_fn() {
427 check_diagnostics(
428 r#"
429struct S;
430impl S { fn method(&self) {} }
431
432fn f() {
433 S::method();
434} //^^^^^^^^^^^ Expected 1 argument, found 0
435"#,
436 );
437
438 check_diagnostics(
439 r#"
440struct S;
441impl S { fn method(&self) {} }
442
443fn f() {
444 S::method(&S);
445 S.method();
446}
447"#,
448 );
449 }
450
451 #[test]
452 fn method_with_arg() {
453 check_diagnostics(
454 r#"
455struct S;
456impl S { fn method(&self, arg: u8) {} }
457
458 fn f() {
459 S.method();
460 } //^^^^^^^^^^ Expected 1 argument, found 0
461 "#,
462 );
463
464 check_diagnostics(
465 r#"
466struct S;
467impl S { fn method(&self, arg: u8) {} }
468
469fn f() {
470 S::method(&S, 0);
471 S.method(1);
472}
473"#,
474 );
475 }
476
477 #[test]
478 fn tuple_struct() {
479 check_diagnostics(
480 r#"
481struct Tup(u8, u16);
482fn f() {
483 Tup(0);
484} //^^^^^^ Expected 2 arguments, found 1
485"#,
486 )
487 }
488
489 #[test]
490 fn enum_variant() {
491 check_diagnostics(
492 r#"
493enum En { Variant(u8, u16), }
494fn f() {
495 En::Variant(0);
496} //^^^^^^^^^^^^^^ Expected 2 arguments, found 1
497"#,
498 )
499 }
500
501 #[test]
502 fn enum_variant_type_macro() {
503 check_diagnostics(
504 r#"
505macro_rules! Type {
506 () => { u32 };
507}
508enum Foo {
509 Bar(Type![])
510}
511impl Foo {
512 fn new() {
513 Foo::Bar(0);
514 Foo::Bar(0, 1);
515 //^^^^^^^^^^^^^^ Expected 1 argument, found 2
516 Foo::Bar();
517 //^^^^^^^^^^ Expected 1 argument, found 0
518 }
519}
520 "#,
521 );
522 }
523
524 #[test]
525 fn varargs() {
526 check_diagnostics(
527 r#"
528extern "C" {
529 fn fixed(fixed: u8);
530 fn varargs(fixed: u8, ...);
531 fn varargs2(...);
532}
533
534fn f() {
535 unsafe {
536 fixed(0);
537 fixed(0, 1);
538 //^^^^^^^^^^^ Expected 1 argument, found 2
539 varargs(0);
540 varargs(0, 1);
541 varargs2();
542 varargs2(0);
543 varargs2(0, 1);
544 }
545}
546 "#,
547 )
548 }
549
550 #[test]
551 fn arg_count_lambda() {
552 check_diagnostics(
553 r#"
554fn main() {
555 let f = |()| ();
556 f();
557 //^^^ Expected 1 argument, found 0
558 f(());
559 f((), ());
560 //^^^^^^^^^ Expected 1 argument, found 2
561}
562"#,
563 )
564 }
565}
diff --git a/crates/ra_hir_ty/src/diagnostics/match_check.rs b/crates/ra_hir_ty/src/diagnostics/match_check.rs
deleted file mode 100644
index 507edcb7d..000000000
--- a/crates/ra_hir_ty/src/diagnostics/match_check.rs
+++ /dev/null
@@ -1,1421 +0,0 @@
1//! This module implements match statement exhaustiveness checking and usefulness checking
2//! for match arms.
3//!
4//! It is modeled on the rustc module `librustc_mir_build::hair::pattern::_match`, which
5//! contains very detailed documentation about the algorithms used here. I've duplicated
6//! most of that documentation below.
7//!
8//! This file includes the logic for exhaustiveness and usefulness checking for
9//! pattern-matching. Specifically, given a list of patterns for a type, we can
10//! tell whether:
11//! - (a) the patterns cover every possible constructor for the type (exhaustiveness).
12//! - (b) each pattern is necessary (usefulness).
13//!
14//! The algorithm implemented here is a modified version of the one described in
15//! <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
16//! However, to save future implementors from reading the original paper, we
17//! summarise the algorithm here to hopefully save time and be a little clearer
18//! (without being so rigorous).
19//!
20//! The core of the algorithm revolves about a "usefulness" check. In particular, we
21//! are trying to compute a predicate `U(P, p)` where `P` is a list of patterns (we refer to this as
22//! a matrix). `U(P, p)` represents whether, given an existing list of patterns
23//! `P_1 ..= P_m`, adding a new pattern `p` will be "useful" (that is, cover previously-
24//! uncovered values of the type).
25//!
26//! If we have this predicate, then we can easily compute both exhaustiveness of an
27//! entire set of patterns and the individual usefulness of each one.
28//! (a) the set of patterns is exhaustive iff `U(P, _)` is false (i.e., adding a wildcard
29//! match doesn't increase the number of values we're matching)
30//! (b) a pattern `P_i` is not useful if `U(P[0..=(i-1), P_i)` is false (i.e., adding a
31//! pattern to those that have come before it doesn't increase the number of values
32//! we're matching).
33//!
34//! During the course of the algorithm, the rows of the matrix won't just be individual patterns,
35//! but rather partially-deconstructed patterns in the form of a list of patterns. The paper
36//! calls those pattern-vectors, and we will call them pattern-stacks. The same holds for the
37//! new pattern `p`.
38//!
39//! For example, say we have the following:
40//!
41//! ```ignore
42//! // x: (Option<bool>, Result<()>)
43//! match x {
44//! (Some(true), _) => (),
45//! (None, Err(())) => (),
46//! (None, Err(_)) => (),
47//! }
48//! ```
49//!
50//! Here, the matrix `P` starts as:
51//!
52//! ```text
53//! [
54//! [(Some(true), _)],
55//! [(None, Err(()))],
56//! [(None, Err(_))],
57//! ]
58//! ```
59//!
60//! We can tell it's not exhaustive, because `U(P, _)` is true (we're not covering
61//! `[(Some(false), _)]`, for instance). In addition, row 3 is not useful, because
62//! all the values it covers are already covered by row 2.
63//!
64//! A list of patterns can be thought of as a stack, because we are mainly interested in the top of
65//! the stack at any given point, and we can pop or apply constructors to get new pattern-stacks.
66//! To match the paper, the top of the stack is at the beginning / on the left.
67//!
68//! There are two important operations on pattern-stacks necessary to understand the algorithm:
69//!
70//! 1. We can pop a given constructor off the top of a stack. This operation is called
71//! `specialize`, and is denoted `S(c, p)` where `c` is a constructor (like `Some` or
72//! `None`) and `p` a pattern-stack.
73//! If the pattern on top of the stack can cover `c`, this removes the constructor and
74//! pushes its arguments onto the stack. It also expands OR-patterns into distinct patterns.
75//! Otherwise the pattern-stack is discarded.
76//! This essentially filters those pattern-stacks whose top covers the constructor `c` and
77//! discards the others.
78//!
79//! For example, the first pattern above initially gives a stack `[(Some(true), _)]`. If we
80//! pop the tuple constructor, we are left with `[Some(true), _]`, and if we then pop the
81//! `Some` constructor we get `[true, _]`. If we had popped `None` instead, we would get
82//! nothing back.
83//!
84//! This returns zero or more new pattern-stacks, as follows. We look at the pattern `p_1`
85//! on top of the stack, and we have four cases:
86//!
87//! * 1.1. `p_1 = c(r_1, .., r_a)`, i.e. the top of the stack has constructor `c`. We push onto
88//! the stack the arguments of this constructor, and return the result:
89//!
90//! r_1, .., r_a, p_2, .., p_n
91//!
92//! * 1.2. `p_1 = c'(r_1, .., r_a')` where `c ≠ c'`. We discard the current stack and return
93//! nothing.
94//! * 1.3. `p_1 = _`. We push onto the stack as many wildcards as the constructor `c` has
95//! arguments (its arity), and return the resulting stack:
96//!
97//! _, .., _, p_2, .., p_n
98//!
99//! * 1.4. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack:
100//!
101//! S(c, (r_1, p_2, .., p_n))
102//! S(c, (r_2, p_2, .., p_n))
103//!
104//! 2. We can pop a wildcard off the top of the stack. This is called `D(p)`, where `p` is
105//! a pattern-stack.
106//! This is used when we know there are missing constructor cases, but there might be
107//! existing wildcard patterns, so to check the usefulness of the matrix, we have to check
108//! all its *other* components.
109//!
110//! It is computed as follows. We look at the pattern `p_1` on top of the stack,
111//! and we have three cases:
112//! * 1.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing.
113//! * 1.2. `p_1 = _`. We return the rest of the stack:
114//!
115//! p_2, .., p_n
116//!
117//! * 1.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack:
118//!
119//! D((r_1, p_2, .., p_n))
120//! D((r_2, p_2, .., p_n))
121//!
122//! Note that the OR-patterns are not always used directly in Rust, but are used to derive the
123//! exhaustive integer matching rules, so they're written here for posterity.
124//!
125//! Both those operations extend straightforwardly to a list or pattern-stacks, i.e. a matrix, by
126//! working row-by-row. Popping a constructor ends up keeping only the matrix rows that start with
127//! the given constructor, and popping a wildcard keeps those rows that start with a wildcard.
128//!
129//!
130//! The algorithm for computing `U`
131//! -------------------------------
132//! The algorithm is inductive (on the number of columns: i.e., components of tuple patterns).
133//! That means we're going to check the components from left-to-right, so the algorithm
134//! operates principally on the first component of the matrix and new pattern-stack `p`.
135//! This algorithm is realised in the `is_useful` function.
136//!
137//! Base case (`n = 0`, i.e., an empty tuple pattern):
138//! - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), then
139//! `U(P, p)` is false.
140//! - Otherwise, `P` must be empty, so `U(P, p)` is true.
141//!
142//! Inductive step (`n > 0`, i.e., whether there's at least one column [which may then be expanded
143//! into further columns later]). We're going to match on the top of the new pattern-stack, `p_1`:
144//!
145//! - If `p_1 == c(r_1, .., r_a)`, i.e. we have a constructor pattern.
146//! Then, the usefulness of `p_1` can be reduced to whether it is useful when
147//! we ignore all the patterns in the first column of `P` that involve other constructors.
148//! This is where `S(c, P)` comes in:
149//!
150//! ```text
151//! U(P, p) := U(S(c, P), S(c, p))
152//! ```
153//!
154//! This special case is handled in `is_useful_specialized`.
155//!
156//! For example, if `P` is:
157//!
158//! ```text
159//! [
160//! [Some(true), _],
161//! [None, 0],
162//! ]
163//! ```
164//!
165//! and `p` is `[Some(false), 0]`, then we don't care about row 2 since we know `p` only
166//! matches values that row 2 doesn't. For row 1 however, we need to dig into the
167//! arguments of `Some` to know whether some new value is covered. So we compute
168//! `U([[true, _]], [false, 0])`.
169//!
170//! - If `p_1 == _`, then we look at the list of constructors that appear in the first component of
171//! the rows of `P`:
172//! - If there are some constructors that aren't present, then we might think that the
173//! wildcard `_` is useful, since it covers those constructors that weren't covered
174//! before.
175//! That's almost correct, but only works if there were no wildcards in those first
176//! components. So we need to check that `p` is useful with respect to the rows that
177//! start with a wildcard, if there are any. This is where `D` comes in:
178//! `U(P, p) := U(D(P), D(p))`
179//!
180//! For example, if `P` is:
181//! ```text
182//! [
183//! [_, true, _],
184//! [None, false, 1],
185//! ]
186//! ```
187//! and `p` is `[_, false, _]`, the `Some` constructor doesn't appear in `P`. So if we
188//! only had row 2, we'd know that `p` is useful. However row 1 starts with a
189//! wildcard, so we need to check whether `U([[true, _]], [false, 1])`.
190//!
191//! - Otherwise, all possible constructors (for the relevant type) are present. In this
192//! case we must check whether the wildcard pattern covers any unmatched value. For
193//! that, we can think of the `_` pattern as a big OR-pattern that covers all
194//! possible constructors. For `Option`, that would mean `_ = None | Some(_)` for
195//! example. The wildcard pattern is useful in this case if it is useful when
196//! specialized to one of the possible constructors. So we compute:
197//! `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))`
198//!
199//! For example, if `P` is:
200//! ```text
201//! [
202//! [Some(true), _],
203//! [None, false],
204//! ]
205//! ```
206//! and `p` is `[_, false]`, both `None` and `Some` constructors appear in the first
207//! components of `P`. We will therefore try popping both constructors in turn: we
208//! compute `U([[true, _]], [_, false])` for the `Some` constructor, and `U([[false]],
209//! [false])` for the `None` constructor. The first case returns true, so we know that
210//! `p` is useful for `P`. Indeed, it matches `[Some(false), _]` that wasn't matched
211//! before.
212//!
213//! - If `p_1 == r_1 | r_2`, then the usefulness depends on each `r_i` separately:
214//!
215//! ```text
216//! U(P, p) := U(P, (r_1, p_2, .., p_n))
217//! || U(P, (r_2, p_2, .., p_n))
218//! ```
219use std::sync::Arc;
220
221use hir_def::{
222 adt::VariantData,
223 body::Body,
224 expr::{Expr, Literal, Pat, PatId},
225 AdtId, EnumVariantId, VariantId,
226};
227use ra_arena::Idx;
228use smallvec::{smallvec, SmallVec};
229
230use crate::{db::HirDatabase, ApplicationTy, InferenceResult, Ty, TypeCtor};
231
232#[derive(Debug, Clone, Copy)]
233/// Either a pattern from the source code being analyzed, represented as
234/// as `PatId`, or a `Wild` pattern which is created as an intermediate
235/// step in the match checking algorithm and thus is not backed by a
236/// real `PatId`.
237///
238/// Note that it is totally valid for the `PatId` variant to contain
239/// a `PatId` which resolves to a `Wild` pattern, if that wild pattern
240/// exists in the source code being analyzed.
241enum PatIdOrWild {
242 PatId(PatId),
243 Wild,
244}
245
246impl PatIdOrWild {
247 fn as_pat(self, cx: &MatchCheckCtx) -> Pat {
248 match self {
249 PatIdOrWild::PatId(id) => cx.body.pats[id].clone(),
250 PatIdOrWild::Wild => Pat::Wild,
251 }
252 }
253
254 fn as_id(self) -> Option<PatId> {
255 match self {
256 PatIdOrWild::PatId(id) => Some(id),
257 PatIdOrWild::Wild => None,
258 }
259 }
260}
261
262impl From<PatId> for PatIdOrWild {
263 fn from(pat_id: PatId) -> Self {
264 Self::PatId(pat_id)
265 }
266}
267
268impl From<&PatId> for PatIdOrWild {
269 fn from(pat_id: &PatId) -> Self {
270 Self::PatId(*pat_id)
271 }
272}
273
274#[derive(Debug, Clone, Copy, PartialEq)]
275pub(super) enum MatchCheckErr {
276 NotImplemented,
277 MalformedMatchArm,
278 /// Used when type inference cannot resolve the type of
279 /// a pattern or expression.
280 Unknown,
281}
282
283/// The return type of `is_useful` is either an indication of usefulness
284/// of the match arm, or an error in the case the match statement
285/// is made up of types for which exhaustiveness checking is currently
286/// not completely implemented.
287///
288/// The `std::result::Result` type is used here rather than a custom enum
289/// to allow the use of `?`.
290pub(super) type MatchCheckResult<T> = Result<T, MatchCheckErr>;
291
292#[derive(Debug)]
293/// A row in a Matrix.
294///
295/// This type is modeled from the struct of the same name in `rustc`.
296pub(super) struct PatStack(PatStackInner);
297type PatStackInner = SmallVec<[PatIdOrWild; 2]>;
298
299impl PatStack {
300 pub(super) fn from_pattern(pat_id: PatId) -> PatStack {
301 Self(smallvec!(pat_id.into()))
302 }
303
304 pub(super) fn from_wild() -> PatStack {
305 Self(smallvec!(PatIdOrWild::Wild))
306 }
307
308 fn from_slice(slice: &[PatIdOrWild]) -> PatStack {
309 Self(SmallVec::from_slice(slice))
310 }
311
312 fn from_vec(v: PatStackInner) -> PatStack {
313 Self(v)
314 }
315
316 fn get_head(&self) -> Option<PatIdOrWild> {
317 self.0.first().copied()
318 }
319
320 fn tail(&self) -> &[PatIdOrWild] {
321 self.0.get(1..).unwrap_or(&[])
322 }
323
324 fn to_tail(&self) -> PatStack {
325 Self::from_slice(self.tail())
326 }
327
328 fn replace_head_with<I, T>(&self, pats: I) -> PatStack
329 where
330 I: Iterator<Item = T>,
331 T: Into<PatIdOrWild>,
332 {
333 let mut patterns: PatStackInner = smallvec![];
334 for pat in pats {
335 patterns.push(pat.into());
336 }
337 for pat in &self.0[1..] {
338 patterns.push(*pat);
339 }
340 PatStack::from_vec(patterns)
341 }
342
343 /// Computes `D(self)`.
344 ///
345 /// See the module docs and the associated documentation in rustc for details.
346 fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Option<PatStack> {
347 if matches!(self.get_head()?.as_pat(cx), Pat::Wild) {
348 Some(self.to_tail())
349 } else {
350 None
351 }
352 }
353
354 /// Computes `S(constructor, self)`.
355 ///
356 /// See the module docs and the associated documentation in rustc for details.
357 fn specialize_constructor(
358 &self,
359 cx: &MatchCheckCtx,
360 constructor: &Constructor,
361 ) -> MatchCheckResult<Option<PatStack>> {
362 let head = match self.get_head() {
363 Some(head) => head,
364 None => return Ok(None),
365 };
366
367 let head_pat = head.as_pat(cx);
368 let result = match (head_pat, constructor) {
369 (Pat::Tuple { args: ref pat_ids, ellipsis }, Constructor::Tuple { arity: _ }) => {
370 if ellipsis.is_some() {
371 // If there are ellipsis here, we should add the correct number of
372 // Pat::Wild patterns to `pat_ids`. We should be able to use the
373 // constructors arity for this, but at the time of writing we aren't
374 // correctly calculating this arity when ellipsis are present.
375 return Err(MatchCheckErr::NotImplemented);
376 }
377
378 Some(self.replace_head_with(pat_ids.iter()))
379 }
380 (Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => {
381 match cx.body.exprs[lit_expr] {
382 Expr::Literal(Literal::Bool(pat_val)) if *constructor_val == pat_val => {
383 Some(self.to_tail())
384 }
385 // it was a bool but the value doesn't match
386 Expr::Literal(Literal::Bool(_)) => None,
387 // perhaps this is actually unreachable given we have
388 // already checked that these match arms have the appropriate type?
389 _ => return Err(MatchCheckErr::NotImplemented),
390 }
391 }
392 (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?),
393 (Pat::Path(_), Constructor::Enum(constructor)) => {
394 // unit enum variants become `Pat::Path`
395 let pat_id = head.as_id().expect("we know this isn't a wild");
396 if !enum_variant_matches(cx, pat_id, *constructor) {
397 None
398 } else {
399 Some(self.to_tail())
400 }
401 }
402 (
403 Pat::TupleStruct { args: ref pat_ids, ellipsis, .. },
404 Constructor::Enum(enum_constructor),
405 ) => {
406 let pat_id = head.as_id().expect("we know this isn't a wild");
407 if !enum_variant_matches(cx, pat_id, *enum_constructor) {
408 None
409 } else {
410 let constructor_arity = constructor.arity(cx)?;
411 if let Some(ellipsis_position) = ellipsis {
412 // If there are ellipsis in the pattern, the ellipsis must take the place
413 // of at least one sub-pattern, so `pat_ids` should be smaller than the
414 // constructor arity.
415 if pat_ids.len() < constructor_arity {
416 let mut new_patterns: Vec<PatIdOrWild> = vec![];
417
418 for pat_id in &pat_ids[0..ellipsis_position] {
419 new_patterns.push((*pat_id).into());
420 }
421
422 for _ in 0..(constructor_arity - pat_ids.len()) {
423 new_patterns.push(PatIdOrWild::Wild);
424 }
425
426 for pat_id in &pat_ids[ellipsis_position..pat_ids.len()] {
427 new_patterns.push((*pat_id).into());
428 }
429
430 Some(self.replace_head_with(new_patterns.into_iter()))
431 } else {
432 return Err(MatchCheckErr::MalformedMatchArm);
433 }
434 } else {
435 // If there is no ellipsis in the tuple pattern, the number
436 // of patterns must equal the constructor arity.
437 if pat_ids.len() == constructor_arity {
438 Some(self.replace_head_with(pat_ids.into_iter()))
439 } else {
440 return Err(MatchCheckErr::MalformedMatchArm);
441 }
442 }
443 }
444 }
445 (Pat::Record { args: ref arg_patterns, .. }, Constructor::Enum(e)) => {
446 let pat_id = head.as_id().expect("we know this isn't a wild");
447 if !enum_variant_matches(cx, pat_id, *e) {
448 None
449 } else {
450 match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() {
451 VariantData::Record(struct_field_arena) => {
452 // Here we treat any missing fields in the record as the wild pattern, as
453 // if the record has ellipsis. We want to do this here even if the
454 // record does not contain ellipsis, because it allows us to continue
455 // enforcing exhaustiveness for the rest of the match statement.
456 //
457 // Creating the diagnostic for the missing field in the pattern
458 // should be done in a different diagnostic.
459 let patterns = struct_field_arena.iter().map(|(_, struct_field)| {
460 arg_patterns
461 .iter()
462 .find(|pat| pat.name == struct_field.name)
463 .map(|pat| PatIdOrWild::from(pat.pat))
464 .unwrap_or(PatIdOrWild::Wild)
465 });
466
467 Some(self.replace_head_with(patterns))
468 }
469 _ => return Err(MatchCheckErr::Unknown),
470 }
471 }
472 }
473 (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented),
474 (_, _) => return Err(MatchCheckErr::NotImplemented),
475 };
476
477 Ok(result)
478 }
479
480 /// A special case of `specialize_constructor` where the head of the pattern stack
481 /// is a Wild pattern.
482 ///
483 /// Replaces the Wild pattern at the head of the pattern stack with N Wild patterns
484 /// (N >= 0), where N is the arity of the given constructor.
485 fn expand_wildcard(
486 &self,
487 cx: &MatchCheckCtx,
488 constructor: &Constructor,
489 ) -> MatchCheckResult<PatStack> {
490 assert_eq!(
491 Pat::Wild,
492 self.get_head().expect("expand_wildcard called on empty PatStack").as_pat(cx),
493 "expand_wildcard must only be called on PatStack with wild at head",
494 );
495
496 let mut patterns: PatStackInner = smallvec![];
497
498 for _ in 0..constructor.arity(cx)? {
499 patterns.push(PatIdOrWild::Wild);
500 }
501
502 for pat in &self.0[1..] {
503 patterns.push(*pat);
504 }
505
506 Ok(PatStack::from_vec(patterns))
507 }
508}
509
510/// A collection of PatStack.
511///
512/// This type is modeled from the struct of the same name in `rustc`.
513pub(super) struct Matrix(Vec<PatStack>);
514
515impl Matrix {
516 pub(super) fn empty() -> Self {
517 Self(vec![])
518 }
519
520 pub(super) fn push(&mut self, cx: &MatchCheckCtx, row: PatStack) {
521 if let Some(Pat::Or(pat_ids)) = row.get_head().map(|pat_id| pat_id.as_pat(cx)) {
522 // Or patterns are expanded here
523 for pat_id in pat_ids {
524 self.0.push(PatStack::from_pattern(pat_id));
525 }
526 } else {
527 self.0.push(row);
528 }
529 }
530
531 fn is_empty(&self) -> bool {
532 self.0.is_empty()
533 }
534
535 fn heads(&self) -> Vec<PatIdOrWild> {
536 self.0.iter().flat_map(|p| p.get_head()).collect()
537 }
538
539 /// Computes `D(self)` for each contained PatStack.
540 ///
541 /// See the module docs and the associated documentation in rustc for details.
542 fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Self {
543 Self::collect(cx, self.0.iter().filter_map(|r| r.specialize_wildcard(cx)))
544 }
545
546 /// Computes `S(constructor, self)` for each contained PatStack.
547 ///
548 /// See the module docs and the associated documentation in rustc for details.
549 fn specialize_constructor(
550 &self,
551 cx: &MatchCheckCtx,
552 constructor: &Constructor,
553 ) -> MatchCheckResult<Self> {
554 let mut new_matrix = Matrix::empty();
555 for pat in &self.0 {
556 if let Some(pat) = pat.specialize_constructor(cx, constructor)? {
557 new_matrix.push(cx, pat);
558 }
559 }
560
561 Ok(new_matrix)
562 }
563
564 fn collect<T: IntoIterator<Item = PatStack>>(cx: &MatchCheckCtx, iter: T) -> Self {
565 let mut matrix = Matrix::empty();
566
567 for pat in iter {
568 // using push ensures we expand or-patterns
569 matrix.push(cx, pat);
570 }
571
572 matrix
573 }
574}
575
576#[derive(Clone, Debug, PartialEq)]
577/// An indication of the usefulness of a given match arm, where
578/// usefulness is defined as matching some patterns which were
579/// not matched by an prior match arms.
580///
581/// We may eventually need an `Unknown` variant here.
582pub(super) enum Usefulness {
583 Useful,
584 NotUseful,
585}
586
587pub(super) struct MatchCheckCtx<'a> {
588 pub(super) match_expr: Idx<Expr>,
589 pub(super) body: Arc<Body>,
590 pub(super) infer: Arc<InferenceResult>,
591 pub(super) db: &'a dyn HirDatabase,
592}
593
594/// Given a set of patterns `matrix`, and pattern to consider `v`, determines
595/// whether `v` is useful. A pattern is useful if it covers cases which were
596/// not previously covered.
597///
598/// When calling this function externally (that is, not the recursive calls) it
599/// expected that you have already type checked the match arms. All patterns in
600/// matrix should be the same type as v, as well as they should all be the same
601/// type as the match expression.
602pub(super) fn is_useful(
603 cx: &MatchCheckCtx,
604 matrix: &Matrix,
605 v: &PatStack,
606) -> MatchCheckResult<Usefulness> {
607 // Handle two special cases:
608 // - enum with no variants
609 // - `!` type
610 // In those cases, no match arm is useful.
611 match cx.infer[cx.match_expr].strip_references() {
612 Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(AdtId::EnumId(enum_id)), .. }) => {
613 if cx.db.enum_data(*enum_id).variants.is_empty() {
614 return Ok(Usefulness::NotUseful);
615 }
616 }
617 Ty::Apply(ApplicationTy { ctor: TypeCtor::Never, .. }) => {
618 return Ok(Usefulness::NotUseful);
619 }
620 _ => (),
621 }
622
623 let head = match v.get_head() {
624 Some(head) => head,
625 None => {
626 let result = if matrix.is_empty() { Usefulness::Useful } else { Usefulness::NotUseful };
627
628 return Ok(result);
629 }
630 };
631
632 if let Pat::Or(pat_ids) = head.as_pat(cx) {
633 let mut found_unimplemented = false;
634 let any_useful = pat_ids.iter().any(|&pat_id| {
635 let v = PatStack::from_pattern(pat_id);
636
637 match is_useful(cx, matrix, &v) {
638 Ok(Usefulness::Useful) => true,
639 Ok(Usefulness::NotUseful) => false,
640 _ => {
641 found_unimplemented = true;
642 false
643 }
644 }
645 });
646
647 return if any_useful {
648 Ok(Usefulness::Useful)
649 } else if found_unimplemented {
650 Err(MatchCheckErr::NotImplemented)
651 } else {
652 Ok(Usefulness::NotUseful)
653 };
654 }
655
656 if let Some(constructor) = pat_constructor(cx, head)? {
657 let matrix = matrix.specialize_constructor(&cx, &constructor)?;
658 let v = v
659 .specialize_constructor(&cx, &constructor)?
660 .expect("we know this can't fail because we get the constructor from `v.head()` above");
661
662 is_useful(&cx, &matrix, &v)
663 } else {
664 // expanding wildcard
665 let mut used_constructors: Vec<Constructor> = vec![];
666 for pat in matrix.heads() {
667 if let Some(constructor) = pat_constructor(cx, pat)? {
668 used_constructors.push(constructor);
669 }
670 }
671
672 // We assume here that the first constructor is the "correct" type. Since we
673 // only care about the "type" of the constructor (i.e. if it is a bool we
674 // don't care about the value), this assumption should be valid as long as
675 // the match statement is well formed. We currently uphold this invariant by
676 // filtering match arms before calling `is_useful`, only passing in match arms
677 // whose type matches the type of the match expression.
678 match &used_constructors.first() {
679 Some(constructor) if all_constructors_covered(&cx, constructor, &used_constructors) => {
680 // If all constructors are covered, then we need to consider whether
681 // any values are covered by this wildcard.
682 //
683 // For example, with matrix '[[Some(true)], [None]]', all
684 // constructors are covered (`Some`/`None`), so we need
685 // to perform specialization to see that our wildcard will cover
686 // the `Some(false)` case.
687 //
688 // Here we create a constructor for each variant and then check
689 // usefulness after specializing for that constructor.
690 let mut found_unimplemented = false;
691 for constructor in constructor.all_constructors(cx) {
692 let matrix = matrix.specialize_constructor(&cx, &constructor)?;
693 let v = v.expand_wildcard(&cx, &constructor)?;
694
695 match is_useful(&cx, &matrix, &v) {
696 Ok(Usefulness::Useful) => return Ok(Usefulness::Useful),
697 Ok(Usefulness::NotUseful) => continue,
698 _ => found_unimplemented = true,
699 };
700 }
701
702 if found_unimplemented {
703 Err(MatchCheckErr::NotImplemented)
704 } else {
705 Ok(Usefulness::NotUseful)
706 }
707 }
708 _ => {
709 // Either not all constructors are covered, or the only other arms
710 // are wildcards. Either way, this pattern is useful if it is useful
711 // when compared to those arms with wildcards.
712 let matrix = matrix.specialize_wildcard(&cx);
713 let v = v.to_tail();
714
715 is_useful(&cx, &matrix, &v)
716 }
717 }
718 }
719}
720
721#[derive(Debug, Clone, Copy)]
722/// Similar to TypeCtor, but includes additional information about the specific
723/// value being instantiated. For example, TypeCtor::Bool doesn't contain the
724/// boolean value.
725enum Constructor {
726 Bool(bool),
727 Tuple { arity: usize },
728 Enum(EnumVariantId),
729}
730
731impl Constructor {
732 fn arity(&self, cx: &MatchCheckCtx) -> MatchCheckResult<usize> {
733 let arity = match self {
734 Constructor::Bool(_) => 0,
735 Constructor::Tuple { arity } => *arity,
736 Constructor::Enum(e) => {
737 match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() {
738 VariantData::Tuple(struct_field_data) => struct_field_data.len(),
739 VariantData::Record(struct_field_data) => struct_field_data.len(),
740 VariantData::Unit => 0,
741 }
742 }
743 };
744
745 Ok(arity)
746 }
747
748 fn all_constructors(&self, cx: &MatchCheckCtx) -> Vec<Constructor> {
749 match self {
750 Constructor::Bool(_) => vec![Constructor::Bool(true), Constructor::Bool(false)],
751 Constructor::Tuple { .. } => vec![*self],
752 Constructor::Enum(e) => cx
753 .db
754 .enum_data(e.parent)
755 .variants
756 .iter()
757 .map(|(local_id, _)| {
758 Constructor::Enum(EnumVariantId { parent: e.parent, local_id })
759 })
760 .collect(),
761 }
762 }
763}
764
765/// Returns the constructor for the given pattern. Should only return None
766/// in the case of a Wild pattern.
767fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Option<Constructor>> {
768 let res = match pat.as_pat(cx) {
769 Pat::Wild => None,
770 // FIXME somehow create the Tuple constructor with the proper arity. If there are
771 // ellipsis, the arity is not equal to the number of patterns.
772 Pat::Tuple { args: pats, ellipsis } if ellipsis.is_none() => {
773 Some(Constructor::Tuple { arity: pats.len() })
774 }
775 Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] {
776 Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)),
777 _ => return Err(MatchCheckErr::NotImplemented),
778 },
779 Pat::TupleStruct { .. } | Pat::Path(_) | Pat::Record { .. } => {
780 let pat_id = pat.as_id().expect("we already know this pattern is not a wild");
781 let variant_id =
782 cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::Unknown)?;
783 match variant_id {
784 VariantId::EnumVariantId(enum_variant_id) => {
785 Some(Constructor::Enum(enum_variant_id))
786 }
787 _ => return Err(MatchCheckErr::NotImplemented),
788 }
789 }
790 _ => return Err(MatchCheckErr::NotImplemented),
791 };
792
793 Ok(res)
794}
795
796fn all_constructors_covered(
797 cx: &MatchCheckCtx,
798 constructor: &Constructor,
799 used_constructors: &[Constructor],
800) -> bool {
801 match constructor {
802 Constructor::Tuple { arity } => {
803 used_constructors.iter().any(|constructor| match constructor {
804 Constructor::Tuple { arity: used_arity } => arity == used_arity,
805 _ => false,
806 })
807 }
808 Constructor::Bool(_) => {
809 if used_constructors.is_empty() {
810 return false;
811 }
812
813 let covers_true =
814 used_constructors.iter().any(|c| matches!(c, Constructor::Bool(true)));
815 let covers_false =
816 used_constructors.iter().any(|c| matches!(c, Constructor::Bool(false)));
817
818 covers_true && covers_false
819 }
820 Constructor::Enum(e) => cx.db.enum_data(e.parent).variants.iter().all(|(id, _)| {
821 for constructor in used_constructors {
822 if let Constructor::Enum(e) = constructor {
823 if id == e.local_id {
824 return true;
825 }
826 }
827 }
828
829 false
830 }),
831 }
832}
833
834fn enum_variant_matches(cx: &MatchCheckCtx, pat_id: PatId, enum_variant_id: EnumVariantId) -> bool {
835 Some(enum_variant_id.into()) == cx.infer.variant_resolution_for_pat(pat_id)
836}
837
838#[cfg(test)]
839mod tests {
840 use crate::diagnostics::tests::check_diagnostics;
841
842 #[test]
843 fn empty_tuple() {
844 check_diagnostics(
845 r#"
846fn main() {
847 match () { }
848 //^^ Missing match arm
849 match (()) { }
850 //^^^^ Missing match arm
851
852 match () { _ => (), }
853 match () { () => (), }
854 match (()) { (()) => (), }
855}
856"#,
857 );
858 }
859
860 #[test]
861 fn tuple_of_two_empty_tuple() {
862 check_diagnostics(
863 r#"
864fn main() {
865 match ((), ()) { }
866 //^^^^^^^^ Missing match arm
867
868 match ((), ()) { ((), ()) => (), }
869}
870"#,
871 );
872 }
873
874 #[test]
875 fn boolean() {
876 check_diagnostics(
877 r#"
878fn test_main() {
879 match false { }
880 //^^^^^ Missing match arm
881 match false { true => (), }
882 //^^^^^ Missing match arm
883 match (false, true) {}
884 //^^^^^^^^^^^^^ Missing match arm
885 match (false, true) { (true, true) => (), }
886 //^^^^^^^^^^^^^ Missing match arm
887 match (false, true) {
888 //^^^^^^^^^^^^^ Missing match arm
889 (false, true) => (),
890 (false, false) => (),
891 (true, false) => (),
892 }
893 match (false, true) { (true, _x) => (), }
894 //^^^^^^^^^^^^^ Missing match arm
895
896 match false { true => (), false => (), }
897 match (false, true) {
898 (false, _) => (),
899 (true, false) => (),
900 (_, true) => (),
901 }
902 match (false, true) {
903 (true, true) => (),
904 (true, false) => (),
905 (false, true) => (),
906 (false, false) => (),
907 }
908 match (false, true) {
909 (true, _x) => (),
910 (false, true) => (),
911 (false, false) => (),
912 }
913 match (false, true, false) {
914 (false, ..) => (),
915 (true, ..) => (),
916 }
917 match (false, true, false) {
918 (.., false) => (),
919 (.., true) => (),
920 }
921 match (false, true, false) { (..) => (), }
922}
923"#,
924 );
925 }
926
927 #[test]
928 fn tuple_of_tuple_and_bools() {
929 check_diagnostics(
930 r#"
931fn main() {
932 match (false, ((), false)) {}
933 //^^^^^^^^^^^^^^^^^^^^ Missing match arm
934 match (false, ((), false)) { (true, ((), true)) => (), }
935 //^^^^^^^^^^^^^^^^^^^^ Missing match arm
936 match (false, ((), false)) { (true, _) => (), }
937 //^^^^^^^^^^^^^^^^^^^^ Missing match arm
938
939 match (false, ((), false)) {
940 (true, ((), true)) => (),
941 (true, ((), false)) => (),
942 (false, ((), true)) => (),
943 (false, ((), false)) => (),
944 }
945 match (false, ((), false)) {
946 (true, ((), true)) => (),
947 (true, ((), false)) => (),
948 (false, _) => (),
949 }
950}
951"#,
952 );
953 }
954
955 #[test]
956 fn enums() {
957 check_diagnostics(
958 r#"
959enum Either { A, B, }
960
961fn main() {
962 match Either::A { }
963 //^^^^^^^^^ Missing match arm
964 match Either::B { Either::A => (), }
965 //^^^^^^^^^ Missing match arm
966
967 match &Either::B {
968 //^^^^^^^^^^ Missing match arm
969 Either::A => (),
970 }
971
972 match Either::B {
973 Either::A => (), Either::B => (),
974 }
975 match &Either::B {
976 Either::A => (), Either::B => (),
977 }
978}
979"#,
980 );
981 }
982
983 #[test]
984 fn enum_containing_bool() {
985 check_diagnostics(
986 r#"
987enum Either { A(bool), B }
988
989fn main() {
990 match Either::B { }
991 //^^^^^^^^^ Missing match arm
992 match Either::B {
993 //^^^^^^^^^ Missing match arm
994 Either::A(true) => (), Either::B => ()
995 }
996
997 match Either::B {
998 Either::A(true) => (),
999 Either::A(false) => (),
1000 Either::B => (),
1001 }
1002 match Either::B {
1003 Either::B => (),
1004 _ => (),
1005 }
1006 match Either::B {
1007 Either::A(_) => (),
1008 Either::B => (),
1009 }
1010
1011}
1012 "#,
1013 );
1014 }
1015
1016 #[test]
1017 fn enum_different_sizes() {
1018 check_diagnostics(
1019 r#"
1020enum Either { A(bool), B(bool, bool) }
1021
1022fn main() {
1023 match Either::A(false) {
1024 //^^^^^^^^^^^^^^^^ Missing match arm
1025 Either::A(_) => (),
1026 Either::B(false, _) => (),
1027 }
1028
1029 match Either::A(false) {
1030 Either::A(_) => (),
1031 Either::B(true, _) => (),
1032 Either::B(false, _) => (),
1033 }
1034 match Either::A(false) {
1035 Either::A(true) | Either::A(false) => (),
1036 Either::B(true, _) => (),
1037 Either::B(false, _) => (),
1038 }
1039}
1040"#,
1041 );
1042 }
1043
1044 #[test]
1045 fn tuple_of_enum_no_diagnostic() {
1046 check_diagnostics(
1047 r#"
1048enum Either { A(bool), B(bool, bool) }
1049enum Either2 { C, D }
1050
1051fn main() {
1052 match (Either::A(false), Either2::C) {
1053 (Either::A(true), _) | (Either::A(false), _) => (),
1054 (Either::B(true, _), Either2::C) => (),
1055 (Either::B(false, _), Either2::C) => (),
1056 (Either::B(_, _), Either2::D) => (),
1057 }
1058}
1059"#,
1060 );
1061 }
1062
1063 #[test]
1064 fn mismatched_types() {
1065 // Match statements with arms that don't match the
1066 // expression pattern do not fire this diagnostic.
1067 check_diagnostics(
1068 r#"
1069enum Either { A, B }
1070enum Either2 { C, D }
1071
1072fn main() {
1073 match Either::A {
1074 Either2::C => (),
1075 Either2::D => (),
1076 }
1077 match (true, false) {
1078 (true, false, true) => (),
1079 (true) => (),
1080 }
1081 match (0) { () => () }
1082 match Unresolved::Bar { Unresolved::Baz => () }
1083}
1084 "#,
1085 );
1086 }
1087
1088 #[test]
1089 fn malformed_match_arm_tuple_enum_missing_pattern() {
1090 // We are testing to be sure we don't panic here when the match
1091 // arm `Either::B` is missing its pattern.
1092 check_diagnostics(
1093 r#"
1094enum Either { A, B(u32) }
1095
1096fn main() {
1097 match Either::A {
1098 Either::A => (),
1099 Either::B() => (),
1100 }
1101}
1102"#,
1103 );
1104 }
1105
1106 #[test]
1107 fn expr_diverges() {
1108 check_diagnostics(
1109 r#"
1110enum Either { A, B }
1111
1112fn main() {
1113 match loop {} {
1114 Either::A => (),
1115 Either::B => (),
1116 }
1117 match loop {} {
1118 Either::A => (),
1119 }
1120 match loop { break Foo::A } {
1121 //^^^^^^^^^^^^^^^^^^^^^ Missing match arm
1122 Either::A => (),
1123 }
1124 match loop { break Foo::A } {
1125 Either::A => (),
1126 Either::B => (),
1127 }
1128}
1129"#,
1130 );
1131 }
1132
1133 #[test]
1134 fn expr_partially_diverges() {
1135 check_diagnostics(
1136 r#"
1137enum Either<T> { A(T), B }
1138
1139fn foo() -> Either<!> { Either::B }
1140fn main() -> u32 {
1141 match foo() {
1142 Either::A(val) => val,
1143 Either::B => 0,
1144 }
1145}
1146"#,
1147 );
1148 }
1149
1150 #[test]
1151 fn enum_record() {
1152 check_diagnostics(
1153 r#"
1154enum Either { A { foo: bool }, B }
1155
1156fn main() {
1157 let a = Either::A { foo: true };
1158 match a { }
1159 //^ Missing match arm
1160 match a { Either::A { foo: true } => () }
1161 //^ Missing match arm
1162 match a {
1163 Either::A { } => (),
1164 //^^^ Missing structure fields:
1165 // | - foo
1166 Either::B => (),
1167 }
1168 match a {
1169 //^ Missing match arm
1170 Either::A { } => (),
1171 } //^^^ Missing structure fields:
1172 // | - foo
1173
1174 match a {
1175 Either::A { foo: true } => (),
1176 Either::A { foo: false } => (),
1177 Either::B => (),
1178 }
1179 match a {
1180 Either::A { foo: _ } => (),
1181 Either::B => (),
1182 }
1183}
1184"#,
1185 );
1186 }
1187
1188 #[test]
1189 fn enum_record_fields_out_of_order() {
1190 check_diagnostics(
1191 r#"
1192enum Either {
1193 A { foo: bool, bar: () },
1194 B,
1195}
1196
1197fn main() {
1198 let a = Either::A { foo: true, bar: () };
1199 match a {
1200 //^ Missing match arm
1201 Either::A { bar: (), foo: false } => (),
1202 Either::A { foo: true, bar: () } => (),
1203 }
1204
1205 match a {
1206 Either::A { bar: (), foo: false } => (),
1207 Either::A { foo: true, bar: () } => (),
1208 Either::B => (),
1209 }
1210}
1211"#,
1212 );
1213 }
1214
1215 #[test]
1216 fn enum_record_ellipsis() {
1217 check_diagnostics(
1218 r#"
1219enum Either {
1220 A { foo: bool, bar: bool },
1221 B,
1222}
1223
1224fn main() {
1225 let a = Either::B;
1226 match a {
1227 //^ Missing match arm
1228 Either::A { foo: true, .. } => (),
1229 Either::B => (),
1230 }
1231 match a {
1232 //^ Missing match arm
1233 Either::A { .. } => (),
1234 }
1235
1236 match a {
1237 Either::A { foo: true, .. } => (),
1238 Either::A { foo: false, .. } => (),
1239 Either::B => (),
1240 }
1241
1242 match a {
1243 Either::A { .. } => (),
1244 Either::B => (),
1245 }
1246}
1247"#,
1248 );
1249 }
1250
1251 #[test]
1252 fn enum_tuple_partial_ellipsis() {
1253 check_diagnostics(
1254 r#"
1255enum Either {
1256 A(bool, bool, bool, bool),
1257 B,
1258}
1259
1260fn main() {
1261 match Either::B {
1262 //^^^^^^^^^ Missing match arm
1263 Either::A(true, .., true) => (),
1264 Either::A(true, .., false) => (),
1265 Either::A(false, .., false) => (),
1266 Either::B => (),
1267 }
1268 match Either::B {
1269 //^^^^^^^^^ Missing match arm
1270 Either::A(true, .., true) => (),
1271 Either::A(true, .., false) => (),
1272 Either::A(.., true) => (),
1273 Either::B => (),
1274 }
1275
1276 match Either::B {
1277 Either::A(true, .., true) => (),
1278 Either::A(true, .., false) => (),
1279 Either::A(false, .., true) => (),
1280 Either::A(false, .., false) => (),
1281 Either::B => (),
1282 }
1283 match Either::B {
1284 Either::A(true, .., true) => (),
1285 Either::A(true, .., false) => (),
1286 Either::A(.., true) => (),
1287 Either::A(.., false) => (),
1288 Either::B => (),
1289 }
1290}
1291"#,
1292 );
1293 }
1294
1295 #[test]
1296 fn never() {
1297 check_diagnostics(
1298 r#"
1299enum Never {}
1300
1301fn enum_(never: Never) {
1302 match never {}
1303}
1304fn enum_ref(never: &Never) {
1305 match never {}
1306}
1307fn bang(never: !) {
1308 match never {}
1309}
1310"#,
1311 );
1312 }
1313
1314 #[test]
1315 fn or_pattern_panic() {
1316 check_diagnostics(
1317 r#"
1318pub enum Category { Infinity, Zero }
1319
1320fn panic(a: Category, b: Category) {
1321 match (a, b) {
1322 (Category::Zero | Category::Infinity, _) => (),
1323 (_, Category::Zero | Category::Infinity) => (),
1324 }
1325
1326 // FIXME: This is a false positive, but the code used to cause a panic in the match checker,
1327 // so this acts as a regression test for that.
1328 match (a, b) {
1329 //^^^^^^ Missing match arm
1330 (Category::Infinity, Category::Infinity) | (Category::Zero, Category::Zero) => (),
1331 (Category::Infinity | Category::Zero, _) => (),
1332 }
1333}
1334"#,
1335 );
1336 }
1337
1338 mod false_negatives {
1339 //! The implementation of match checking here is a work in progress. As we roll this out, we
1340 //! prefer false negatives to false positives (ideally there would be no false positives). This
1341 //! test module should document known false negatives. Eventually we will have a complete
1342 //! implementation of match checking and this module will be empty.
1343 //!
1344 //! The reasons for documenting known false negatives:
1345 //!
1346 //! 1. It acts as a backlog of work that can be done to improve the behavior of the system.
1347 //! 2. It ensures the code doesn't panic when handling these cases.
1348 use super::*;
1349
1350 #[test]
1351 fn integers() {
1352 // We don't currently check integer exhaustiveness.
1353 check_diagnostics(
1354 r#"
1355fn main() {
1356 match 5 {
1357 10 => (),
1358 11..20 => (),
1359 }
1360}
1361"#,
1362 );
1363 }
1364
1365 #[test]
1366 fn internal_or() {
1367 // We do not currently handle patterns with internal `or`s.
1368 check_diagnostics(
1369 r#"
1370fn main() {
1371 enum Either { A(bool), B }
1372 match Either::B {
1373 Either::A(true | false) => (),
1374 }
1375}
1376"#,
1377 );
1378 }
1379
1380 #[test]
1381 fn tuple_of_bools_with_ellipsis_at_end_missing_arm() {
1382 // We don't currently handle tuple patterns with ellipsis.
1383 check_diagnostics(
1384 r#"
1385fn main() {
1386 match (false, true, false) {
1387 (false, ..) => (),
1388 }
1389}
1390"#,
1391 );
1392 }
1393
1394 #[test]
1395 fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() {
1396 // We don't currently handle tuple patterns with ellipsis.
1397 check_diagnostics(
1398 r#"
1399fn main() {
1400 match (false, true, false) {
1401 (.., false) => (),
1402 }
1403}
1404"#,
1405 );
1406 }
1407
1408 #[test]
1409 fn struct_missing_arm() {
1410 // We don't currently handle structs.
1411 check_diagnostics(
1412 r#"
1413struct Foo { a: bool }
1414fn main(f: Foo) {
1415 match f { Foo { a: true } => () }
1416}
1417"#,
1418 );
1419 }
1420 }
1421}
diff --git a/crates/ra_hir_ty/src/diagnostics/unsafe_check.rs b/crates/ra_hir_ty/src/diagnostics/unsafe_check.rs
deleted file mode 100644
index 5cc76bdce..000000000
--- a/crates/ra_hir_ty/src/diagnostics/unsafe_check.rs
+++ /dev/null
@@ -1,173 +0,0 @@
1//! Provides validations for unsafe code. Currently checks if unsafe functions are missing
2//! unsafe blocks.
3
4use std::sync::Arc;
5
6use hir_def::{
7 body::Body,
8 expr::{Expr, ExprId, UnaryOp},
9 DefWithBodyId,
10};
11use hir_expand::diagnostics::DiagnosticSink;
12
13use crate::{
14 db::HirDatabase, diagnostics::MissingUnsafe, lower::CallableDefId, ApplicationTy,
15 InferenceResult, Ty, TypeCtor,
16};
17
18pub(super) struct UnsafeValidator<'a, 'b: 'a> {
19 owner: DefWithBodyId,
20 infer: Arc<InferenceResult>,
21 sink: &'a mut DiagnosticSink<'b>,
22}
23
24impl<'a, 'b> UnsafeValidator<'a, 'b> {
25 pub(super) fn new(
26 owner: DefWithBodyId,
27 infer: Arc<InferenceResult>,
28 sink: &'a mut DiagnosticSink<'b>,
29 ) -> UnsafeValidator<'a, 'b> {
30 UnsafeValidator { owner, infer, sink }
31 }
32
33 pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
34 let def = self.owner.into();
35 let unsafe_expressions = unsafe_expressions(db, self.infer.as_ref(), def);
36 let is_unsafe = match self.owner {
37 DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe,
38 DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) => false,
39 };
40 if is_unsafe
41 || unsafe_expressions
42 .iter()
43 .filter(|unsafe_expr| !unsafe_expr.inside_unsafe_block)
44 .count()
45 == 0
46 {
47 return;
48 }
49
50 let (_, body_source) = db.body_with_source_map(def);
51 for unsafe_expr in unsafe_expressions {
52 if !unsafe_expr.inside_unsafe_block {
53 if let Ok(in_file) = body_source.as_ref().expr_syntax(unsafe_expr.expr) {
54 self.sink.push(MissingUnsafe { file: in_file.file_id, expr: in_file.value })
55 }
56 }
57 }
58 }
59}
60
61pub struct UnsafeExpr {
62 pub expr: ExprId,
63 pub inside_unsafe_block: bool,
64}
65
66pub fn unsafe_expressions(
67 db: &dyn HirDatabase,
68 infer: &InferenceResult,
69 def: DefWithBodyId,
70) -> Vec<UnsafeExpr> {
71 let mut unsafe_exprs = vec![];
72 let body = db.body(def);
73 walk_unsafe(&mut unsafe_exprs, db, infer, &body, body.body_expr, false);
74
75 unsafe_exprs
76}
77
78fn walk_unsafe(
79 unsafe_exprs: &mut Vec<UnsafeExpr>,
80 db: &dyn HirDatabase,
81 infer: &InferenceResult,
82 body: &Body,
83 current: ExprId,
84 inside_unsafe_block: bool,
85) {
86 let expr = &body.exprs[current];
87 match expr {
88 Expr::Call { callee, .. } => {
89 let ty = &infer[*callee];
90 if let &Ty::Apply(ApplicationTy {
91 ctor: TypeCtor::FnDef(CallableDefId::FunctionId(func)),
92 ..
93 }) = ty
94 {
95 if db.function_data(func).is_unsafe {
96 unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
97 }
98 }
99 }
100 Expr::MethodCall { .. } => {
101 if infer
102 .method_resolution(current)
103 .map(|func| db.function_data(func).is_unsafe)
104 .unwrap_or(false)
105 {
106 unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
107 }
108 }
109 Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
110 if let Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(..), .. }) = &infer[*expr] {
111 unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
112 }
113 }
114 Expr::Unsafe { body: child } => {
115 return walk_unsafe(unsafe_exprs, db, infer, body, *child, true);
116 }
117 _ => {}
118 }
119
120 expr.walk_child_exprs(|child| {
121 walk_unsafe(unsafe_exprs, db, infer, body, child, inside_unsafe_block);
122 });
123}
124
125#[cfg(test)]
126mod tests {
127 use crate::diagnostics::tests::check_diagnostics;
128
129 #[test]
130 fn missing_unsafe_diagnostic_with_raw_ptr() {
131 check_diagnostics(
132 r#"
133fn main() {
134 let x = &5 as *const usize;
135 unsafe { let y = *x; }
136 let z = *x;
137} //^^ This operation is unsafe and requires an unsafe function or block
138"#,
139 )
140 }
141
142 #[test]
143 fn missing_unsafe_diagnostic_with_unsafe_call() {
144 check_diagnostics(
145 r#"
146struct HasUnsafe;
147
148impl HasUnsafe {
149 unsafe fn unsafe_fn(&self) {
150 let x = &5 as *const usize;
151 let y = *x;
152 }
153}
154
155unsafe fn unsafe_fn() {
156 let x = &5 as *const usize;
157 let y = *x;
158}
159
160fn main() {
161 unsafe_fn();
162 //^^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
163 HasUnsafe.unsafe_fn();
164 //^^^^^^^^^^^^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
165 unsafe {
166 unsafe_fn();
167 HasUnsafe.unsafe_fn();
168 }
169}
170"#,
171 );
172 }
173}
diff --git a/crates/ra_hir_ty/src/display.rs b/crates/ra_hir_ty/src/display.rs
deleted file mode 100644
index 19770e609..000000000
--- a/crates/ra_hir_ty/src/display.rs
+++ /dev/null
@@ -1,631 +0,0 @@
1//! FIXME: write short doc here
2
3use std::fmt;
4
5use crate::{
6 db::HirDatabase, utils::generics, ApplicationTy, CallableDefId, FnSig, GenericPredicate,
7 Obligation, OpaqueTyId, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
8};
9use hir_def::{
10 find_path, generics::TypeParamProvenance, item_scope::ItemInNs, AdtId, AssocContainerId,
11 Lookup, ModuleId,
12};
13use hir_expand::name::Name;
14
15pub struct HirFormatter<'a> {
16 pub db: &'a dyn HirDatabase,
17 fmt: &'a mut dyn fmt::Write,
18 buf: String,
19 curr_size: usize,
20 pub(crate) max_size: Option<usize>,
21 omit_verbose_types: bool,
22 display_target: DisplayTarget,
23}
24
25pub trait HirDisplay {
26 fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError>;
27
28 /// Returns a `Display`able type that is human-readable.
29 /// Use this for showing types to the user (e.g. diagnostics)
30 fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
31 where
32 Self: Sized,
33 {
34 HirDisplayWrapper {
35 db,
36 t: self,
37 max_size: None,
38 omit_verbose_types: false,
39 display_target: DisplayTarget::Diagnostics,
40 }
41 }
42
43 /// Returns a `Display`able type that is human-readable and tries to be succinct.
44 /// Use this for showing types to the user where space is constrained (e.g. doc popups)
45 fn display_truncated<'a>(
46 &'a self,
47 db: &'a dyn HirDatabase,
48 max_size: Option<usize>,
49 ) -> HirDisplayWrapper<'a, Self>
50 where
51 Self: Sized,
52 {
53 HirDisplayWrapper {
54 db,
55 t: self,
56 max_size,
57 omit_verbose_types: true,
58 display_target: DisplayTarget::Diagnostics,
59 }
60 }
61
62 /// Returns a String representation of `self` that can be inserted into the given module.
63 /// Use this when generating code (e.g. assists)
64 fn display_source_code<'a>(
65 &'a self,
66 db: &'a dyn HirDatabase,
67 module_id: ModuleId,
68 ) -> Result<String, DisplaySourceCodeError> {
69 let mut result = String::new();
70 match self.hir_fmt(&mut HirFormatter {
71 db,
72 fmt: &mut result,
73 buf: String::with_capacity(20),
74 curr_size: 0,
75 max_size: None,
76 omit_verbose_types: false,
77 display_target: DisplayTarget::SourceCode { module_id },
78 }) {
79 Ok(()) => {}
80 Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"),
81 Err(HirDisplayError::DisplaySourceCodeError(e)) => return Err(e),
82 };
83 Ok(result)
84 }
85}
86
87impl<'a> HirFormatter<'a> {
88 pub fn write_joined<T: HirDisplay>(
89 &mut self,
90 iter: impl IntoIterator<Item = T>,
91 sep: &str,
92 ) -> Result<(), HirDisplayError> {
93 let mut first = true;
94 for e in iter {
95 if !first {
96 write!(self, "{}", sep)?;
97 }
98 first = false;
99 e.hir_fmt(self)?;
100 }
101 Ok(())
102 }
103
104 /// This allows using the `write!` macro directly with a `HirFormatter`.
105 pub fn write_fmt(&mut self, args: fmt::Arguments) -> Result<(), HirDisplayError> {
106 // We write to a buffer first to track output size
107 self.buf.clear();
108 fmt::write(&mut self.buf, args)?;
109 self.curr_size += self.buf.len();
110
111 // Then we write to the internal formatter from the buffer
112 self.fmt.write_str(&self.buf).map_err(HirDisplayError::from)
113 }
114
115 pub fn should_truncate(&self) -> bool {
116 if let Some(max_size) = self.max_size {
117 self.curr_size >= max_size
118 } else {
119 false
120 }
121 }
122
123 pub fn omit_verbose_types(&self) -> bool {
124 self.omit_verbose_types
125 }
126}
127
128#[derive(Clone, Copy)]
129enum DisplayTarget {
130 /// Display types for inlays, doc popups, autocompletion, etc...
131 /// Showing `{unknown}` or not qualifying paths is fine here.
132 /// There's no reason for this to fail.
133 Diagnostics,
134 /// Display types for inserting them in source files.
135 /// The generated code should compile, so paths need to be qualified.
136 SourceCode { module_id: ModuleId },
137}
138
139impl DisplayTarget {
140 fn is_source_code(&self) -> bool {
141 matches!(self, Self::SourceCode {..})
142 }
143}
144
145#[derive(Debug)]
146pub enum DisplaySourceCodeError {
147 PathNotFound,
148}
149
150pub enum HirDisplayError {
151 /// Errors that can occur when generating source code
152 DisplaySourceCodeError(DisplaySourceCodeError),
153 /// `FmtError` is required to be compatible with std::fmt::Display
154 FmtError,
155}
156impl From<fmt::Error> for HirDisplayError {
157 fn from(_: fmt::Error) -> Self {
158 Self::FmtError
159 }
160}
161
162pub struct HirDisplayWrapper<'a, T> {
163 db: &'a dyn HirDatabase,
164 t: &'a T,
165 max_size: Option<usize>,
166 omit_verbose_types: bool,
167 display_target: DisplayTarget,
168}
169
170impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
171where
172 T: HirDisplay,
173{
174 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
175 match self.t.hir_fmt(&mut HirFormatter {
176 db: self.db,
177 fmt: f,
178 buf: String::with_capacity(20),
179 curr_size: 0,
180 max_size: self.max_size,
181 omit_verbose_types: self.omit_verbose_types,
182 display_target: self.display_target,
183 }) {
184 Ok(()) => Ok(()),
185 Err(HirDisplayError::FmtError) => Err(fmt::Error),
186 Err(HirDisplayError::DisplaySourceCodeError(_)) => {
187 // This should never happen
188 panic!("HirDisplay failed when calling Display::fmt!")
189 }
190 }
191 }
192}
193
194const TYPE_HINT_TRUNCATION: &str = "…";
195
196impl HirDisplay for &Ty {
197 fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
198 HirDisplay::hir_fmt(*self, f)
199 }
200}
201
202impl HirDisplay for ApplicationTy {
203 fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
204 if f.should_truncate() {
205 return write!(f, "{}", TYPE_HINT_TRUNCATION);
206 }
207
208 match self.ctor {
209 TypeCtor::Bool => write!(f, "bool")?,
210 TypeCtor::Char => write!(f, "char")?,
211 TypeCtor::Int(t) => write!(f, "{}", t)?,
212 TypeCtor::Float(t) => write!(f, "{}", t)?,
213 TypeCtor::Str => write!(f, "str")?,
214 TypeCtor::Slice => {
215 let t = self.parameters.as_single();
216 write!(f, "[{}]", t.display(f.db))?;
217 }
218 TypeCtor::Array => {
219 let t = self.parameters.as_single();
220 write!(f, "[{}; _]", t.display(f.db))?;
221 }
222 TypeCtor::RawPtr(m) => {
223 let t = self.parameters.as_single();
224 write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
225 }
226 TypeCtor::Ref(m) => {
227 let t = self.parameters.as_single();
228 let ty_display = if f.omit_verbose_types() {
229 t.display_truncated(f.db, f.max_size)
230 } else {
231 t.display(f.db)
232 };
233 write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?;
234 }
235 TypeCtor::Never => write!(f, "!")?,
236 TypeCtor::Tuple { .. } => {
237 let ts = &self.parameters;
238 if ts.len() == 1 {
239 write!(f, "({},)", ts[0].display(f.db))?;
240 } else {
241 write!(f, "(")?;
242 f.write_joined(&*ts.0, ", ")?;
243 write!(f, ")")?;
244 }
245 }
246 TypeCtor::FnPtr { is_varargs, .. } => {
247 let sig = FnSig::from_fn_ptr_substs(&self.parameters, is_varargs);
248 write!(f, "fn(")?;
249 f.write_joined(sig.params(), ", ")?;
250 if is_varargs {
251 if sig.params().is_empty() {
252 write!(f, "...")?;
253 } else {
254 write!(f, ", ...")?;
255 }
256 }
257 write!(f, ")")?;
258 let ret = sig.ret();
259 if *ret != Ty::unit() {
260 let ret_display = if f.omit_verbose_types() {
261 ret.display_truncated(f.db, f.max_size)
262 } else {
263 ret.display(f.db)
264 };
265 write!(f, " -> {}", ret_display)?;
266 }
267 }
268 TypeCtor::FnDef(def) => {
269 let sig = f.db.callable_item_signature(def).subst(&self.parameters);
270 match def {
271 CallableDefId::FunctionId(ff) => {
272 write!(f, "fn {}", f.db.function_data(ff).name)?
273 }
274 CallableDefId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
275 CallableDefId::EnumVariantId(e) => {
276 write!(f, "{}", f.db.enum_data(e.parent).variants[e.local_id].name)?
277 }
278 };
279 if self.parameters.len() > 0 {
280 let generics = generics(f.db.upcast(), def.into());
281 let (parent_params, self_param, type_params, _impl_trait_params) =
282 generics.provenance_split();
283 let total_len = parent_params + self_param + type_params;
284 // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
285 if total_len > 0 {
286 write!(f, "<")?;
287 f.write_joined(&self.parameters.0[..total_len], ", ")?;
288 write!(f, ">")?;
289 }
290 }
291 write!(f, "(")?;
292 f.write_joined(sig.params(), ", ")?;
293 write!(f, ")")?;
294 let ret = sig.ret();
295 if *ret != Ty::unit() {
296 let ret_display = if f.omit_verbose_types() {
297 ret.display_truncated(f.db, f.max_size)
298 } else {
299 ret.display(f.db)
300 };
301 write!(f, " -> {}", ret_display)?;
302 }
303 }
304 TypeCtor::Adt(def_id) => {
305 match f.display_target {
306 DisplayTarget::Diagnostics => {
307 let name = match def_id {
308 AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
309 AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
310 AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
311 };
312 write!(f, "{}", name)?;
313 }
314 DisplayTarget::SourceCode { module_id } => {
315 if let Some(path) = find_path::find_path(
316 f.db.upcast(),
317 ItemInNs::Types(def_id.into()),
318 module_id,
319 ) {
320 write!(f, "{}", path)?;
321 } else {
322 return Err(HirDisplayError::DisplaySourceCodeError(
323 DisplaySourceCodeError::PathNotFound,
324 ));
325 }
326 }
327 }
328
329 if self.parameters.len() > 0 {
330 let parameters_to_write =
331 if f.display_target.is_source_code() || f.omit_verbose_types() {
332 match self
333 .ctor
334 .as_generic_def()
335 .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
336 .filter(|defaults| !defaults.is_empty())
337 {
338 None => self.parameters.0.as_ref(),
339 Some(default_parameters) => {
340 let mut default_from = 0;
341 for (i, parameter) in self.parameters.iter().enumerate() {
342 match (parameter, default_parameters.get(i)) {
343 (&Ty::Unknown, _) | (_, None) => {
344 default_from = i + 1;
345 }
346 (_, Some(default_parameter)) => {
347 let actual_default = default_parameter
348 .clone()
349 .subst(&self.parameters.prefix(i));
350 if parameter != &actual_default {
351 default_from = i + 1;
352 }
353 }
354 }
355 }
356 &self.parameters.0[0..default_from]
357 }
358 }
359 } else {
360 self.parameters.0.as_ref()
361 };
362 if !parameters_to_write.is_empty() {
363 write!(f, "<")?;
364 f.write_joined(parameters_to_write, ", ")?;
365 write!(f, ">")?;
366 }
367 }
368 }
369 TypeCtor::AssociatedType(type_alias) => {
370 let trait_ = match type_alias.lookup(f.db.upcast()).container {
371 AssocContainerId::TraitId(it) => it,
372 _ => panic!("not an associated type"),
373 };
374 let trait_ = f.db.trait_data(trait_);
375 let type_alias = f.db.type_alias_data(type_alias);
376 write!(f, "{}::{}", trait_.name, type_alias.name)?;
377 if self.parameters.len() > 0 {
378 write!(f, "<")?;
379 f.write_joined(&*self.parameters.0, ", ")?;
380 write!(f, ">")?;
381 }
382 }
383 TypeCtor::OpaqueType(opaque_ty_id) => {
384 let bounds = match opaque_ty_id {
385 OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
386 let datas =
387 f.db.return_type_impl_traits(func).expect("impl trait id without data");
388 let data = (*datas)
389 .as_ref()
390 .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
391 data.subst(&self.parameters)
392 }
393 };
394 write!(f, "impl ")?;
395 write_bounds_like_dyn_trait(&bounds.value, f)?;
396 // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution
397 }
398 TypeCtor::Closure { .. } => {
399 let sig = self.parameters[0].callable_sig(f.db);
400 if let Some(sig) = sig {
401 if sig.params().is_empty() {
402 write!(f, "||")?;
403 } else if f.omit_verbose_types() {
404 write!(f, "|{}|", TYPE_HINT_TRUNCATION)?;
405 } else {
406 write!(f, "|")?;
407 f.write_joined(sig.params(), ", ")?;
408 write!(f, "|")?;
409 };
410
411 let ret_display = if f.omit_verbose_types() {
412 sig.ret().display_truncated(f.db, f.max_size)
413 } else {
414 sig.ret().display(f.db)
415 };
416 write!(f, " -> {}", ret_display)?;
417 } else {
418 write!(f, "{{closure}}")?;
419 }
420 }
421 }
422 Ok(())
423 }
424}
425
426impl HirDisplay for ProjectionTy {
427 fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
428 if f.should_truncate() {
429 return write!(f, "{}", TYPE_HINT_TRUNCATION);
430 }
431
432 let trait_ = f.db.trait_data(self.trait_(f.db));
433 write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_.name)?;
434 if self.parameters.len() > 1 {
435 write!(f, "<")?;
436 f.write_joined(&self.parameters[1..], ", ")?;
437 write!(f, ">")?;
438 }
439 write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?;
440 Ok(())
441 }
442}
443
444impl HirDisplay for Ty {
445 fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
446 if f.should_truncate() {
447 return write!(f, "{}", TYPE_HINT_TRUNCATION);
448 }
449
450 match self {
451 Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
452 Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
453 Ty::Placeholder(id) => {
454 let generics = generics(f.db.upcast(), id.parent);
455 let param_data = &generics.params.types[id.local_id];
456 match param_data.provenance {
457 TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
458 write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))?
459 }
460 TypeParamProvenance::ArgumentImplTrait => {
461 write!(f, "impl ")?;
462 let bounds = f.db.generic_predicates_for_param(*id);
463 let substs = Substs::type_params_for_generics(&generics);
464 write_bounds_like_dyn_trait(
465 &bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(),
466 f,
467 )?;
468 }
469 }
470 }
471 Ty::Bound(idx) => write!(f, "?{}.{}", idx.debruijn.depth(), idx.index)?,
472 Ty::Dyn(predicates) => {
473 write!(f, "dyn ")?;
474 write_bounds_like_dyn_trait(predicates, f)?;
475 }
476 Ty::Opaque(opaque_ty) => {
477 let bounds = match opaque_ty.opaque_ty_id {
478 OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
479 let datas =
480 f.db.return_type_impl_traits(func).expect("impl trait id without data");
481 let data = (*datas)
482 .as_ref()
483 .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
484 data.subst(&opaque_ty.parameters)
485 }
486 };
487 write!(f, "impl ")?;
488 write_bounds_like_dyn_trait(&bounds.value, f)?;
489 }
490 Ty::Unknown => write!(f, "{{unknown}}")?,
491 Ty::Infer(..) => write!(f, "_")?,
492 }
493 Ok(())
494 }
495}
496
497fn write_bounds_like_dyn_trait(
498 predicates: &[GenericPredicate],
499 f: &mut HirFormatter,
500) -> Result<(), HirDisplayError> {
501 // Note: This code is written to produce nice results (i.e.
502 // corresponding to surface Rust) for types that can occur in
503 // actual Rust. It will have weird results if the predicates
504 // aren't as expected (i.e. self types = $0, projection
505 // predicates for a certain trait come after the Implemented
506 // predicate for that trait).
507 let mut first = true;
508 let mut angle_open = false;
509 for p in predicates.iter() {
510 match p {
511 GenericPredicate::Implemented(trait_ref) => {
512 if angle_open {
513 write!(f, ">")?;
514 }
515 if !first {
516 write!(f, " + ")?;
517 }
518 // We assume that the self type is $0 (i.e. the
519 // existential) here, which is the only thing that's
520 // possible in actual Rust, and hence don't print it
521 write!(f, "{}", f.db.trait_data(trait_ref.trait_).name)?;
522 if trait_ref.substs.len() > 1 {
523 write!(f, "<")?;
524 f.write_joined(&trait_ref.substs[1..], ", ")?;
525 // there might be assoc type bindings, so we leave the angle brackets open
526 angle_open = true;
527 }
528 }
529 GenericPredicate::Projection(projection_pred) => {
530 // in types in actual Rust, these will always come
531 // after the corresponding Implemented predicate
532 if angle_open {
533 write!(f, ", ")?;
534 } else {
535 write!(f, "<")?;
536 angle_open = true;
537 }
538 let type_alias = f.db.type_alias_data(projection_pred.projection_ty.associated_ty);
539 write!(f, "{} = ", type_alias.name)?;
540 projection_pred.ty.hir_fmt(f)?;
541 }
542 GenericPredicate::Error => {
543 if angle_open {
544 // impl Trait<X, {error}>
545 write!(f, ", ")?;
546 } else if !first {
547 // impl Trait + {error}
548 write!(f, " + ")?;
549 }
550 p.hir_fmt(f)?;
551 }
552 }
553 first = false;
554 }
555 if angle_open {
556 write!(f, ">")?;
557 }
558 Ok(())
559}
560
561impl TraitRef {
562 fn hir_fmt_ext(&self, f: &mut HirFormatter, use_as: bool) -> Result<(), HirDisplayError> {
563 if f.should_truncate() {
564 return write!(f, "{}", TYPE_HINT_TRUNCATION);
565 }
566
567 self.substs[0].hir_fmt(f)?;
568 if use_as {
569 write!(f, " as ")?;
570 } else {
571 write!(f, ": ")?;
572 }
573 write!(f, "{}", f.db.trait_data(self.trait_).name)?;
574 if self.substs.len() > 1 {
575 write!(f, "<")?;
576 f.write_joined(&self.substs[1..], ", ")?;
577 write!(f, ">")?;
578 }
579 Ok(())
580 }
581}
582
583impl HirDisplay for TraitRef {
584 fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
585 self.hir_fmt_ext(f, false)
586 }
587}
588
589impl HirDisplay for &GenericPredicate {
590 fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
591 HirDisplay::hir_fmt(*self, f)
592 }
593}
594
595impl HirDisplay for GenericPredicate {
596 fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
597 if f.should_truncate() {
598 return write!(f, "{}", TYPE_HINT_TRUNCATION);
599 }
600
601 match self {
602 GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
603 GenericPredicate::Projection(projection_pred) => {
604 write!(f, "<")?;
605 projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?;
606 write!(
607 f,
608 ">::{} = {}",
609 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name,
610 projection_pred.ty.display(f.db)
611 )?;
612 }
613 GenericPredicate::Error => write!(f, "{{error}}")?,
614 }
615 Ok(())
616 }
617}
618
619impl HirDisplay for Obligation {
620 fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
621 Ok(match self {
622 Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db))?,
623 Obligation::Projection(proj) => write!(
624 f,
625 "Normalize({} => {})",
626 proj.projection_ty.display(f.db),
627 proj.ty.display(f.db)
628 )?,
629 })
630 }
631}
diff --git a/crates/ra_hir_ty/src/infer.rs b/crates/ra_hir_ty/src/infer.rs
deleted file mode 100644
index 28f32a0a4..000000000
--- a/crates/ra_hir_ty/src/infer.rs
+++ /dev/null
@@ -1,800 +0,0 @@
1//! Type inference, i.e. the process of walking through the code and determining
2//! the type of each expression and pattern.
3//!
4//! For type inference, compare the implementations in rustc (the various
5//! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and
6//! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for
7//! inference here is the `infer` function, which infers the types of all
8//! expressions in a given function.
9//!
10//! During inference, types (i.e. the `Ty` struct) can contain type 'variables'
11//! which represent currently unknown types; as we walk through the expressions,
12//! we might determine that certain variables need to be equal to each other, or
13//! to certain types. To record this, we use the union-find implementation from
14//! the `ena` crate, which is extracted from rustc.
15
16use std::borrow::Cow;
17use std::mem;
18use std::ops::Index;
19use std::sync::Arc;
20
21use hir_def::{
22 body::Body,
23 data::{ConstData, FunctionData, StaticData},
24 expr::{BindingAnnotation, ExprId, PatId},
25 lang_item::LangItemTarget,
26 path::{path, Path},
27 resolver::{HasResolver, Resolver, TypeNs},
28 type_ref::{Mutability, TypeRef},
29 AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, Lookup, TraitId,
30 TypeAliasId, VariantId,
31};
32use hir_expand::{diagnostics::DiagnosticSink, name::name};
33use ra_arena::map::ArenaMap;
34use ra_prof::profile;
35use ra_syntax::SmolStr;
36use rustc_hash::FxHashMap;
37use stdx::impl_from;
38
39use super::{
40 primitive::{FloatTy, IntTy},
41 traits::{Guidance, Obligation, ProjectionPredicate, Solution},
42 InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk,
43};
44use crate::{
45 db::HirDatabase, infer::diagnostics::InferenceDiagnostic, lower::ImplTraitLoweringMode,
46};
47
48pub(crate) use unify::unify;
49
50macro_rules! ty_app {
51 ($ctor:pat, $param:pat) => {
52 crate::Ty::Apply(crate::ApplicationTy { ctor: $ctor, parameters: $param })
53 };
54 ($ctor:pat) => {
55 ty_app!($ctor, _)
56 };
57}
58
59mod unify;
60mod path;
61mod expr;
62mod pat;
63mod coerce;
64
65/// The entry point of type inference.
66pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
67 let _p = profile("infer_query");
68 let resolver = def.resolver(db.upcast());
69 let mut ctx = InferenceContext::new(db, def, resolver);
70
71 match def {
72 DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
73 DefWithBodyId::FunctionId(f) => ctx.collect_fn(&db.function_data(f)),
74 DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
75 }
76
77 ctx.infer_body();
78
79 Arc::new(ctx.resolve_all())
80}
81
82#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
83enum ExprOrPatId {
84 ExprId(ExprId),
85 PatId(PatId),
86}
87impl_from!(ExprId, PatId for ExprOrPatId);
88
89/// Binding modes inferred for patterns.
90/// https://doc.rust-lang.org/reference/patterns.html#binding-modes
91#[derive(Copy, Clone, Debug, Eq, PartialEq)]
92enum BindingMode {
93 Move,
94 Ref(Mutability),
95}
96
97impl BindingMode {
98 pub fn convert(annotation: BindingAnnotation) -> BindingMode {
99 match annotation {
100 BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move,
101 BindingAnnotation::Ref => BindingMode::Ref(Mutability::Shared),
102 BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut),
103 }
104 }
105}
106
107impl Default for BindingMode {
108 fn default() -> Self {
109 BindingMode::Move
110 }
111}
112
113/// A mismatch between an expected and an inferred type.
114#[derive(Clone, PartialEq, Eq, Debug, Hash)]
115pub struct TypeMismatch {
116 pub expected: Ty,
117 pub actual: Ty,
118}
119
120/// The result of type inference: A mapping from expressions and patterns to types.
121#[derive(Clone, PartialEq, Eq, Debug, Default)]
122pub struct InferenceResult {
123 /// For each method call expr, records the function it resolves to.
124 method_resolutions: FxHashMap<ExprId, FunctionId>,
125 /// For each field access expr, records the field it resolves to.
126 field_resolutions: FxHashMap<ExprId, FieldId>,
127 /// For each field in record literal, records the field it resolves to.
128 record_field_resolutions: FxHashMap<ExprId, FieldId>,
129 record_field_pat_resolutions: FxHashMap<PatId, FieldId>,
130 /// For each struct literal, records the variant it resolves to.
131 variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
132 /// For each associated item record what it resolves to
133 assoc_resolutions: FxHashMap<ExprOrPatId, AssocItemId>,
134 diagnostics: Vec<InferenceDiagnostic>,
135 pub type_of_expr: ArenaMap<ExprId, Ty>,
136 pub type_of_pat: ArenaMap<PatId, Ty>,
137 pub(super) type_mismatches: ArenaMap<ExprId, TypeMismatch>,
138}
139
140impl InferenceResult {
141 pub fn method_resolution(&self, expr: ExprId) -> Option<FunctionId> {
142 self.method_resolutions.get(&expr).copied()
143 }
144 pub fn field_resolution(&self, expr: ExprId) -> Option<FieldId> {
145 self.field_resolutions.get(&expr).copied()
146 }
147 pub fn record_field_resolution(&self, expr: ExprId) -> Option<FieldId> {
148 self.record_field_resolutions.get(&expr).copied()
149 }
150 pub fn record_field_pat_resolution(&self, pat: PatId) -> Option<FieldId> {
151 self.record_field_pat_resolutions.get(&pat).copied()
152 }
153 pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
154 self.variant_resolutions.get(&id.into()).copied()
155 }
156 pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantId> {
157 self.variant_resolutions.get(&id.into()).copied()
158 }
159 pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItemId> {
160 self.assoc_resolutions.get(&id.into()).copied()
161 }
162 pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItemId> {
163 self.assoc_resolutions.get(&id.into()).copied()
164 }
165 pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
166 self.type_mismatches.get(expr)
167 }
168 pub fn add_diagnostics(
169 &self,
170 db: &dyn HirDatabase,
171 owner: DefWithBodyId,
172 sink: &mut DiagnosticSink,
173 ) {
174 self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink))
175 }
176}
177
178impl Index<ExprId> for InferenceResult {
179 type Output = Ty;
180
181 fn index(&self, expr: ExprId) -> &Ty {
182 self.type_of_expr.get(expr).unwrap_or(&Ty::Unknown)
183 }
184}
185
186impl Index<PatId> for InferenceResult {
187 type Output = Ty;
188
189 fn index(&self, pat: PatId) -> &Ty {
190 self.type_of_pat.get(pat).unwrap_or(&Ty::Unknown)
191 }
192}
193
194/// The inference context contains all information needed during type inference.
195#[derive(Clone, Debug)]
196struct InferenceContext<'a> {
197 db: &'a dyn HirDatabase,
198 owner: DefWithBodyId,
199 body: Arc<Body>,
200 resolver: Resolver,
201 table: unify::InferenceTable,
202 trait_env: Arc<TraitEnvironment>,
203 obligations: Vec<Obligation>,
204 result: InferenceResult,
205 /// The return type of the function being inferred, or the closure if we're
206 /// currently within one.
207 ///
208 /// We might consider using a nested inference context for checking
209 /// closures, but currently this is the only field that will change there,
210 /// so it doesn't make sense.
211 return_ty: Ty,
212 diverges: Diverges,
213 breakables: Vec<BreakableContext>,
214}
215
216#[derive(Clone, Debug)]
217struct BreakableContext {
218 pub may_break: bool,
219 pub break_ty: Ty,
220 pub label: Option<name::Name>,
221}
222
223fn find_breakable<'c>(
224 ctxs: &'c mut [BreakableContext],
225 label: Option<&name::Name>,
226) -> Option<&'c mut BreakableContext> {
227 match label {
228 Some(_) => ctxs.iter_mut().rev().find(|ctx| ctx.label.as_ref() == label),
229 None => ctxs.last_mut(),
230 }
231}
232
233impl<'a> InferenceContext<'a> {
234 fn new(db: &'a dyn HirDatabase, owner: DefWithBodyId, resolver: Resolver) -> Self {
235 InferenceContext {
236 result: InferenceResult::default(),
237 table: unify::InferenceTable::new(),
238 obligations: Vec::default(),
239 return_ty: Ty::Unknown, // set in collect_fn_signature
240 trait_env: TraitEnvironment::lower(db, &resolver),
241 db,
242 owner,
243 body: db.body(owner),
244 resolver,
245 diverges: Diverges::Maybe,
246 breakables: Vec::new(),
247 }
248 }
249
250 fn resolve_all(mut self) -> InferenceResult {
251 // FIXME resolve obligations as well (use Guidance if necessary)
252 let mut result = std::mem::take(&mut self.result);
253 for ty in result.type_of_expr.values_mut() {
254 let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
255 *ty = resolved;
256 }
257 for ty in result.type_of_pat.values_mut() {
258 let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
259 *ty = resolved;
260 }
261 result
262 }
263
264 fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) {
265 self.result.type_of_expr.insert(expr, ty);
266 }
267
268 fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId) {
269 self.result.method_resolutions.insert(expr, func);
270 }
271
272 fn write_field_resolution(&mut self, expr: ExprId, field: FieldId) {
273 self.result.field_resolutions.insert(expr, field);
274 }
275
276 fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) {
277 self.result.variant_resolutions.insert(id, variant);
278 }
279
280 fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) {
281 self.result.assoc_resolutions.insert(id, item);
282 }
283
284 fn write_pat_ty(&mut self, pat: PatId, ty: Ty) {
285 self.result.type_of_pat.insert(pat, ty);
286 }
287
288 fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) {
289 self.result.diagnostics.push(diagnostic);
290 }
291
292 fn make_ty_with_mode(
293 &mut self,
294 type_ref: &TypeRef,
295 impl_trait_mode: ImplTraitLoweringMode,
296 ) -> Ty {
297 // FIXME use right resolver for block
298 let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
299 .with_impl_trait_mode(impl_trait_mode);
300 let ty = Ty::from_hir(&ctx, type_ref);
301 let ty = self.insert_type_vars(ty);
302 self.normalize_associated_types_in(ty)
303 }
304
305 fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
306 self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed)
307 }
308
309 /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it.
310 fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
311 match ty {
312 Ty::Unknown => self.table.new_type_var(),
313 _ => ty,
314 }
315 }
316
317 fn insert_type_vars(&mut self, ty: Ty) -> Ty {
318 ty.fold(&mut |ty| self.insert_type_vars_shallow(ty))
319 }
320
321 fn resolve_obligations_as_possible(&mut self) {
322 let obligations = mem::replace(&mut self.obligations, Vec::new());
323 for obligation in obligations {
324 let in_env = InEnvironment::new(self.trait_env.clone(), obligation.clone());
325 let canonicalized = self.canonicalizer().canonicalize_obligation(in_env);
326 let solution =
327 self.db.trait_solve(self.resolver.krate().unwrap(), canonicalized.value.clone());
328
329 match solution {
330 Some(Solution::Unique(substs)) => {
331 canonicalized.apply_solution(self, substs.0);
332 }
333 Some(Solution::Ambig(Guidance::Definite(substs))) => {
334 canonicalized.apply_solution(self, substs.0);
335 self.obligations.push(obligation);
336 }
337 Some(_) => {
338 // FIXME use this when trying to resolve everything at the end
339 self.obligations.push(obligation);
340 }
341 None => {
342 // FIXME obligation cannot be fulfilled => diagnostic
343 }
344 };
345 }
346 }
347
348 fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
349 self.table.unify(ty1, ty2)
350 }
351
352 /// Resolves the type as far as currently possible, replacing type variables
353 /// by their known types. All types returned by the infer_* functions should
354 /// be resolved as far as possible, i.e. contain no type variables with
355 /// known type.
356 fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
357 self.resolve_obligations_as_possible();
358
359 self.table.resolve_ty_as_possible(ty)
360 }
361
362 fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
363 self.table.resolve_ty_shallow(ty)
364 }
365
366 fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option<TypeAliasId>) -> Ty {
367 self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[])
368 }
369
370 fn resolve_associated_type_with_params(
371 &mut self,
372 inner_ty: Ty,
373 assoc_ty: Option<TypeAliasId>,
374 params: &[Ty],
375 ) -> Ty {
376 match assoc_ty {
377 Some(res_assoc_ty) => {
378 let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container {
379 hir_def::AssocContainerId::TraitId(trait_) => trait_,
380 _ => panic!("resolve_associated_type called with non-associated type"),
381 };
382 let ty = self.table.new_type_var();
383 let substs = Substs::build_for_def(self.db, res_assoc_ty)
384 .push(inner_ty)
385 .fill(params.iter().cloned())
386 .build();
387 let trait_ref = TraitRef { trait_, substs: substs.clone() };
388 let projection = ProjectionPredicate {
389 ty: ty.clone(),
390 projection_ty: ProjectionTy { associated_ty: res_assoc_ty, parameters: substs },
391 };
392 self.obligations.push(Obligation::Trait(trait_ref));
393 self.obligations.push(Obligation::Projection(projection));
394 self.resolve_ty_as_possible(ty)
395 }
396 None => Ty::Unknown,
397 }
398 }
399
400 /// Recurses through the given type, normalizing associated types mentioned
401 /// in it by replacing them by type variables and registering obligations to
402 /// resolve later. This should be done once for every type we get from some
403 /// type annotation (e.g. from a let type annotation, field type or function
404 /// call). `make_ty` handles this already, but e.g. for field types we need
405 /// to do it as well.
406 fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
407 let ty = self.resolve_ty_as_possible(ty);
408 ty.fold(&mut |ty| match ty {
409 Ty::Projection(proj_ty) => self.normalize_projection_ty(proj_ty),
410 _ => ty,
411 })
412 }
413
414 fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty {
415 let var = self.table.new_type_var();
416 let predicate = ProjectionPredicate { projection_ty: proj_ty, ty: var.clone() };
417 let obligation = Obligation::Projection(predicate);
418 self.obligations.push(obligation);
419 var
420 }
421
422 fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<VariantId>) {
423 let path = match path {
424 Some(path) => path,
425 None => return (Ty::Unknown, None),
426 };
427 let resolver = &self.resolver;
428 let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
429 // FIXME: this should resolve assoc items as well, see this example:
430 // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
431 let (resolution, unresolved) =
432 match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
433 Some(it) => it,
434 None => return (Ty::Unknown, None),
435 };
436 return match resolution {
437 TypeNs::AdtId(AdtId::StructId(strukt)) => {
438 let substs = Ty::substs_from_path(&ctx, path, strukt.into(), true);
439 let ty = self.db.ty(strukt.into());
440 let ty = self.insert_type_vars(ty.subst(&substs));
441 forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
442 }
443 TypeNs::EnumVariantId(var) => {
444 let substs = Ty::substs_from_path(&ctx, path, var.into(), true);
445 let ty = self.db.ty(var.parent.into());
446 let ty = self.insert_type_vars(ty.subst(&substs));
447 forbid_unresolved_segments((ty, Some(var.into())), unresolved)
448 }
449 TypeNs::SelfType(impl_id) => {
450 let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
451 let substs = Substs::type_params_for_generics(&generics);
452 let ty = self.db.impl_self_ty(impl_id).subst(&substs);
453 match unresolved {
454 None => {
455 let variant = ty_variant(&ty);
456 (ty, variant)
457 }
458 Some(1) => {
459 let segment = path.mod_path().segments.last().unwrap();
460 // this could be an enum variant or associated type
461 if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() {
462 let enum_data = self.db.enum_data(enum_id);
463 if let Some(local_id) = enum_data.variant(segment) {
464 let variant = EnumVariantId { parent: enum_id, local_id };
465 return (ty, Some(variant.into()));
466 }
467 }
468 // FIXME potentially resolve assoc type
469 (Ty::Unknown, None)
470 }
471 Some(_) => {
472 // FIXME diagnostic
473 (Ty::Unknown, None)
474 }
475 }
476 }
477 TypeNs::TypeAliasId(it) => {
478 let substs = Substs::build_for_def(self.db, it)
479 .fill(std::iter::repeat_with(|| self.table.new_type_var()))
480 .build();
481 let ty = self.db.ty(it.into()).subst(&substs);
482 let variant = ty_variant(&ty);
483 forbid_unresolved_segments((ty, variant), unresolved)
484 }
485 TypeNs::AdtSelfType(_) => {
486 // FIXME this could happen in array size expressions, once we're checking them
487 (Ty::Unknown, None)
488 }
489 TypeNs::GenericParam(_) => {
490 // FIXME potentially resolve assoc type
491 (Ty::Unknown, None)
492 }
493 TypeNs::AdtId(AdtId::EnumId(_))
494 | TypeNs::AdtId(AdtId::UnionId(_))
495 | TypeNs::BuiltinType(_)
496 | TypeNs::TraitId(_) => {
497 // FIXME diagnostic
498 (Ty::Unknown, None)
499 }
500 };
501
502 fn forbid_unresolved_segments(
503 result: (Ty, Option<VariantId>),
504 unresolved: Option<usize>,
505 ) -> (Ty, Option<VariantId>) {
506 if unresolved.is_none() {
507 result
508 } else {
509 // FIXME diagnostic
510 (Ty::Unknown, None)
511 }
512 }
513
514 fn ty_variant(ty: &Ty) -> Option<VariantId> {
515 ty.as_adt().and_then(|(adt_id, _)| match adt_id {
516 AdtId::StructId(s) => Some(VariantId::StructId(s)),
517 AdtId::UnionId(u) => Some(VariantId::UnionId(u)),
518 AdtId::EnumId(_) => {
519 // FIXME Error E0071, expected struct, variant or union type, found enum `Foo`
520 None
521 }
522 })
523 }
524 }
525
526 fn collect_const(&mut self, data: &ConstData) {
527 self.return_ty = self.make_ty(&data.type_ref);
528 }
529
530 fn collect_static(&mut self, data: &StaticData) {
531 self.return_ty = self.make_ty(&data.type_ref);
532 }
533
534 fn collect_fn(&mut self, data: &FunctionData) {
535 let body = Arc::clone(&self.body); // avoid borrow checker problem
536 let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
537 .with_impl_trait_mode(ImplTraitLoweringMode::Param);
538 let param_tys =
539 data.params.iter().map(|type_ref| Ty::from_hir(&ctx, type_ref)).collect::<Vec<_>>();
540 for (ty, pat) in param_tys.into_iter().zip(body.params.iter()) {
541 let ty = self.insert_type_vars(ty);
542 let ty = self.normalize_associated_types_in(ty);
543
544 self.infer_pat(*pat, &ty, BindingMode::default());
545 }
546 let return_ty = self.make_ty_with_mode(&data.ret_type, ImplTraitLoweringMode::Disallowed); // FIXME implement RPIT
547 self.return_ty = return_ty;
548 }
549
550 fn infer_body(&mut self) {
551 self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
552 }
553
554 fn resolve_lang_item(&self, name: &str) -> Option<LangItemTarget> {
555 let krate = self.resolver.krate()?;
556 let name = SmolStr::new_inline_from_ascii(name.len(), name.as_bytes());
557 self.db.lang_item(krate, name)
558 }
559
560 fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
561 let path = path![core::iter::IntoIterator];
562 let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
563 self.db.trait_data(trait_).associated_type_by_name(&name![Item])
564 }
565
566 fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
567 let path = path![core::ops::Try];
568 let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
569 self.db.trait_data(trait_).associated_type_by_name(&name![Ok])
570 }
571
572 fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
573 let trait_ = self.resolve_lang_item("neg")?.as_trait()?;
574 self.db.trait_data(trait_).associated_type_by_name(&name![Output])
575 }
576
577 fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
578 let trait_ = self.resolve_lang_item("not")?.as_trait()?;
579 self.db.trait_data(trait_).associated_type_by_name(&name![Output])
580 }
581
582 fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
583 let trait_ = self.resolve_lang_item("future_trait")?.as_trait()?;
584 self.db.trait_data(trait_).associated_type_by_name(&name![Output])
585 }
586
587 fn resolve_boxed_box(&self) -> Option<AdtId> {
588 let struct_ = self.resolve_lang_item("owned_box")?.as_struct()?;
589 Some(struct_.into())
590 }
591
592 fn resolve_range_full(&self) -> Option<AdtId> {
593 let path = path![core::ops::RangeFull];
594 let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
595 Some(struct_.into())
596 }
597
598 fn resolve_range(&self) -> Option<AdtId> {
599 let path = path![core::ops::Range];
600 let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
601 Some(struct_.into())
602 }
603
604 fn resolve_range_inclusive(&self) -> Option<AdtId> {
605 let path = path![core::ops::RangeInclusive];
606 let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
607 Some(struct_.into())
608 }
609
610 fn resolve_range_from(&self) -> Option<AdtId> {
611 let path = path![core::ops::RangeFrom];
612 let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
613 Some(struct_.into())
614 }
615
616 fn resolve_range_to(&self) -> Option<AdtId> {
617 let path = path![core::ops::RangeTo];
618 let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
619 Some(struct_.into())
620 }
621
622 fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
623 let path = path![core::ops::RangeToInclusive];
624 let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
625 Some(struct_.into())
626 }
627
628 fn resolve_ops_index(&self) -> Option<TraitId> {
629 self.resolve_lang_item("index")?.as_trait()
630 }
631
632 fn resolve_ops_index_output(&self) -> Option<TypeAliasId> {
633 let trait_ = self.resolve_ops_index()?;
634 self.db.trait_data(trait_).associated_type_by_name(&name![Output])
635 }
636}
637
638/// The kinds of placeholders we need during type inference. There's separate
639/// values for general types, and for integer and float variables. The latter
640/// two are used for inference of literal values (e.g. `100` could be one of
641/// several integer types).
642#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
643pub enum InferTy {
644 TypeVar(unify::TypeVarId),
645 IntVar(unify::TypeVarId),
646 FloatVar(unify::TypeVarId),
647 MaybeNeverTypeVar(unify::TypeVarId),
648}
649
650impl InferTy {
651 fn to_inner(self) -> unify::TypeVarId {
652 match self {
653 InferTy::TypeVar(ty)
654 | InferTy::IntVar(ty)
655 | InferTy::FloatVar(ty)
656 | InferTy::MaybeNeverTypeVar(ty) => ty,
657 }
658 }
659
660 fn fallback_value(self) -> Ty {
661 match self {
662 InferTy::TypeVar(..) => Ty::Unknown,
663 InferTy::IntVar(..) => Ty::simple(TypeCtor::Int(IntTy::i32())),
664 InferTy::FloatVar(..) => Ty::simple(TypeCtor::Float(FloatTy::f64())),
665 InferTy::MaybeNeverTypeVar(..) => Ty::simple(TypeCtor::Never),
666 }
667 }
668}
669
670/// When inferring an expression, we propagate downward whatever type hint we
671/// are able in the form of an `Expectation`.
672#[derive(Clone, PartialEq, Eq, Debug)]
673struct Expectation {
674 ty: Ty,
675 /// See the `rvalue_hint` method.
676 rvalue_hint: bool,
677}
678
679impl Expectation {
680 /// The expectation that the type of the expression needs to equal the given
681 /// type.
682 fn has_type(ty: Ty) -> Self {
683 Expectation { ty, rvalue_hint: false }
684 }
685
686 /// The following explanation is copied straight from rustc:
687 /// Provides an expectation for an rvalue expression given an *optional*
688 /// hint, which is not required for type safety (the resulting type might
689 /// be checked higher up, as is the case with `&expr` and `box expr`), but
690 /// is useful in determining the concrete type.
691 ///
692 /// The primary use case is where the expected type is a fat pointer,
693 /// like `&[isize]`. For example, consider the following statement:
694 ///
695 /// let x: &[isize] = &[1, 2, 3];
696 ///
697 /// In this case, the expected type for the `&[1, 2, 3]` expression is
698 /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
699 /// expectation `ExpectHasType([isize])`, that would be too strong --
700 /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
701 /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
702 /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
703 /// which still is useful, because it informs integer literals and the like.
704 /// See the test case `test/ui/coerce-expect-unsized.rs` and #20169
705 /// for examples of where this comes up,.
706 fn rvalue_hint(ty: Ty) -> Self {
707 Expectation { ty, rvalue_hint: true }
708 }
709
710 /// This expresses no expectation on the type.
711 fn none() -> Self {
712 Expectation { ty: Ty::Unknown, rvalue_hint: false }
713 }
714
715 fn coercion_target(&self) -> &Ty {
716 if self.rvalue_hint {
717 &Ty::Unknown
718 } else {
719 &self.ty
720 }
721 }
722}
723
724#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
725enum Diverges {
726 Maybe,
727 Always,
728}
729
730impl Diverges {
731 fn is_always(self) -> bool {
732 self == Diverges::Always
733 }
734}
735
736impl std::ops::BitAnd for Diverges {
737 type Output = Self;
738 fn bitand(self, other: Self) -> Self {
739 std::cmp::min(self, other)
740 }
741}
742
743impl std::ops::BitOr for Diverges {
744 type Output = Self;
745 fn bitor(self, other: Self) -> Self {
746 std::cmp::max(self, other)
747 }
748}
749
750impl std::ops::BitAndAssign for Diverges {
751 fn bitand_assign(&mut self, other: Self) {
752 *self = *self & other;
753 }
754}
755
756impl std::ops::BitOrAssign for Diverges {
757 fn bitor_assign(&mut self, other: Self) {
758 *self = *self | other;
759 }
760}
761
762mod diagnostics {
763 use hir_def::{expr::ExprId, DefWithBodyId};
764 use hir_expand::diagnostics::DiagnosticSink;
765
766 use crate::{
767 db::HirDatabase,
768 diagnostics::{BreakOutsideOfLoop, NoSuchField},
769 };
770
771 #[derive(Debug, PartialEq, Eq, Clone)]
772 pub(super) enum InferenceDiagnostic {
773 NoSuchField { expr: ExprId, field: usize },
774 BreakOutsideOfLoop { expr: ExprId },
775 }
776
777 impl InferenceDiagnostic {
778 pub(super) fn add_to(
779 &self,
780 db: &dyn HirDatabase,
781 owner: DefWithBodyId,
782 sink: &mut DiagnosticSink,
783 ) {
784 match self {
785 InferenceDiagnostic::NoSuchField { expr, field } => {
786 let (_, source_map) = db.body_with_source_map(owner);
787 let field = source_map.field_syntax(*expr, *field);
788 sink.push(NoSuchField { file: field.file_id, field: field.value })
789 }
790 InferenceDiagnostic::BreakOutsideOfLoop { expr } => {
791 let (_, source_map) = db.body_with_source_map(owner);
792 let ptr = source_map
793 .expr_syntax(*expr)
794 .expect("break outside of loop in synthetic syntax");
795 sink.push(BreakOutsideOfLoop { file: ptr.file_id, expr: ptr.value })
796 }
797 }
798 }
799 }
800}
diff --git a/crates/ra_hir_ty/src/infer/coerce.rs b/crates/ra_hir_ty/src/infer/coerce.rs
deleted file mode 100644
index 32c7c57cd..000000000
--- a/crates/ra_hir_ty/src/infer/coerce.rs
+++ /dev/null
@@ -1,197 +0,0 @@
1//! Coercion logic. Coercions are certain type conversions that can implicitly
2//! happen in certain places, e.g. weakening `&mut` to `&` or deref coercions
3//! like going from `&Vec<T>` to `&[T]`.
4//!
5//! See: https://doc.rust-lang.org/nomicon/coercions.html
6
7use hir_def::{lang_item::LangItemTarget, type_ref::Mutability};
8use test_utils::mark;
9
10use crate::{autoderef, traits::Solution, Obligation, Substs, TraitRef, Ty, TypeCtor};
11
12use super::{unify::TypeVarValue, InEnvironment, InferTy, InferenceContext};
13
14impl<'a> InferenceContext<'a> {
15 /// Unify two types, but may coerce the first one to the second one
16 /// using "implicit coercion rules" if needed.
17 pub(super) fn coerce(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool {
18 let from_ty = self.resolve_ty_shallow(from_ty).into_owned();
19 let to_ty = self.resolve_ty_shallow(to_ty);
20 self.coerce_inner(from_ty, &to_ty)
21 }
22
23 /// Merge two types from different branches, with possible coercion.
24 ///
25 /// Mostly this means trying to coerce one to the other, but
26 /// - if we have two function types for different functions, we need to
27 /// coerce both to function pointers;
28 /// - if we were concerned with lifetime subtyping, we'd need to look for a
29 /// least upper bound.
30 pub(super) fn coerce_merge_branch(&mut self, ty1: &Ty, ty2: &Ty) -> Ty {
31 if self.coerce(ty1, ty2) {
32 ty2.clone()
33 } else if self.coerce(ty2, ty1) {
34 ty1.clone()
35 } else {
36 if let (ty_app!(TypeCtor::FnDef(_)), ty_app!(TypeCtor::FnDef(_))) = (ty1, ty2) {
37 mark::hit!(coerce_fn_reification);
38 // Special case: two function types. Try to coerce both to
39 // pointers to have a chance at getting a match. See
40 // https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
41 let sig1 = ty1.callable_sig(self.db).expect("FnDef without callable sig");
42 let sig2 = ty2.callable_sig(self.db).expect("FnDef without callable sig");
43 let ptr_ty1 = Ty::fn_ptr(sig1);
44 let ptr_ty2 = Ty::fn_ptr(sig2);
45 self.coerce_merge_branch(&ptr_ty1, &ptr_ty2)
46 } else {
47 mark::hit!(coerce_merge_fail_fallback);
48 ty1.clone()
49 }
50 }
51 }
52
53 fn coerce_inner(&mut self, mut from_ty: Ty, to_ty: &Ty) -> bool {
54 match (&from_ty, to_ty) {
55 // Never type will make type variable to fallback to Never Type instead of Unknown.
56 (ty_app!(TypeCtor::Never), Ty::Infer(InferTy::TypeVar(tv))) => {
57 let var = self.table.new_maybe_never_type_var();
58 self.table.var_unification_table.union_value(*tv, TypeVarValue::Known(var));
59 return true;
60 }
61 (ty_app!(TypeCtor::Never), _) => return true,
62
63 // Trivial cases, this should go after `never` check to
64 // avoid infer result type to be never
65 _ => {
66 if self.table.unify_inner_trivial(&from_ty, &to_ty, 0) {
67 return true;
68 }
69 }
70 }
71
72 // Pointer weakening and function to pointer
73 match (&mut from_ty, to_ty) {
74 // `*mut T`, `&mut T, `&T`` -> `*const T`
75 // `&mut T` -> `&T`
76 // `&mut T` -> `*mut T`
77 (ty_app!(c1@TypeCtor::RawPtr(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared)))
78 | (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared)))
79 | (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::Ref(Mutability::Shared)))
80 | (ty_app!(c1@TypeCtor::Ref(Mutability::Mut)), ty_app!(c2@TypeCtor::RawPtr(_))) => {
81 *c1 = *c2;
82 }
83
84 // Illegal mutablity conversion
85 (
86 ty_app!(TypeCtor::RawPtr(Mutability::Shared)),
87 ty_app!(TypeCtor::RawPtr(Mutability::Mut)),
88 )
89 | (
90 ty_app!(TypeCtor::Ref(Mutability::Shared)),
91 ty_app!(TypeCtor::Ref(Mutability::Mut)),
92 ) => return false,
93
94 // `{function_type}` -> `fn()`
95 (ty_app!(TypeCtor::FnDef(_)), ty_app!(TypeCtor::FnPtr { .. })) => {
96 match from_ty.callable_sig(self.db) {
97 None => return false,
98 Some(sig) => {
99 from_ty = Ty::fn_ptr(sig);
100 }
101 }
102 }
103
104 (ty_app!(TypeCtor::Closure { .. }, params), ty_app!(TypeCtor::FnPtr { .. })) => {
105 from_ty = params[0].clone();
106 }
107
108 _ => {}
109 }
110
111 if let Some(ret) = self.try_coerce_unsized(&from_ty, &to_ty) {
112 return ret;
113 }
114
115 // Auto Deref if cannot coerce
116 match (&from_ty, to_ty) {
117 // FIXME: DerefMut
118 (ty_app!(TypeCtor::Ref(_), st1), ty_app!(TypeCtor::Ref(_), st2)) => {
119 self.unify_autoderef_behind_ref(&st1[0], &st2[0])
120 }
121
122 // Otherwise, normal unify
123 _ => self.unify(&from_ty, to_ty),
124 }
125 }
126
127 /// Coerce a type using `from_ty: CoerceUnsized<ty_ty>`
128 ///
129 /// See: https://doc.rust-lang.org/nightly/std/marker/trait.CoerceUnsized.html
130 fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> Option<bool> {
131 let krate = self.resolver.krate().unwrap();
132 let coerce_unsized_trait = match self.db.lang_item(krate, "coerce_unsized".into()) {
133 Some(LangItemTarget::TraitId(trait_)) => trait_,
134 _ => return None,
135 };
136
137 let generic_params = crate::utils::generics(self.db.upcast(), coerce_unsized_trait.into());
138 if generic_params.len() != 2 {
139 // The CoerceUnsized trait should have two generic params: Self and T.
140 return None;
141 }
142
143 let substs = Substs::build_for_generics(&generic_params)
144 .push(from_ty.clone())
145 .push(to_ty.clone())
146 .build();
147 let trait_ref = TraitRef { trait_: coerce_unsized_trait, substs };
148 let goal = InEnvironment::new(self.trait_env.clone(), Obligation::Trait(trait_ref));
149
150 let canonicalizer = self.canonicalizer();
151 let canonicalized = canonicalizer.canonicalize_obligation(goal);
152
153 let solution = self.db.trait_solve(krate, canonicalized.value.clone())?;
154
155 match solution {
156 Solution::Unique(v) => {
157 canonicalized.apply_solution(self, v.0);
158 }
159 _ => return None,
160 };
161
162 Some(true)
163 }
164
165 /// Unify `from_ty` to `to_ty` with optional auto Deref
166 ///
167 /// Note that the parameters are already stripped the outer reference.
168 fn unify_autoderef_behind_ref(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool {
169 let canonicalized = self.canonicalizer().canonicalize_ty(from_ty.clone());
170 let to_ty = self.resolve_ty_shallow(&to_ty);
171 // FIXME: Auto DerefMut
172 for derefed_ty in autoderef::autoderef(
173 self.db,
174 self.resolver.krate(),
175 InEnvironment {
176 value: canonicalized.value.clone(),
177 environment: self.trait_env.clone(),
178 },
179 ) {
180 let derefed_ty = canonicalized.decanonicalize_ty(derefed_ty.value);
181 match (&*self.resolve_ty_shallow(&derefed_ty), &*to_ty) {
182 // Stop when constructor matches.
183 (ty_app!(from_ctor, st1), ty_app!(to_ctor, st2)) if from_ctor == to_ctor => {
184 // It will not recurse to `coerce`.
185 return self.table.unify_substs(st1, st2, 0);
186 }
187 _ => {
188 if self.table.unify_inner_trivial(&derefed_ty, &to_ty, 0) {
189 return true;
190 }
191 }
192 }
193 }
194
195 false
196 }
197}
diff --git a/crates/ra_hir_ty/src/infer/expr.rs b/crates/ra_hir_ty/src/infer/expr.rs
deleted file mode 100644
index 731b062c2..000000000
--- a/crates/ra_hir_ty/src/infer/expr.rs
+++ /dev/null
@@ -1,873 +0,0 @@
1//! Type inference for expressions.
2
3use std::iter::{repeat, repeat_with};
4use std::{mem, sync::Arc};
5
6use hir_def::{
7 builtin_type::Signedness,
8 expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
9 path::{GenericArg, GenericArgs},
10 resolver::resolver_for_expr,
11 AdtId, AssocContainerId, FieldId, Lookup,
12};
13use hir_expand::name::{name, Name};
14use ra_syntax::ast::RangeOp;
15
16use crate::{
17 autoderef, method_resolution, op,
18 traits::{FnTrait, InEnvironment},
19 utils::{generics, variant_data, Generics},
20 ApplicationTy, Binders, CallableDefId, InferTy, IntTy, Mutability, Obligation, Rawness, Substs,
21 TraitRef, Ty, TypeCtor,
22};
23
24use super::{
25 find_breakable, BindingMode, BreakableContext, Diverges, Expectation, InferenceContext,
26 InferenceDiagnostic, TypeMismatch,
27};
28
29impl<'a> InferenceContext<'a> {
30 pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
31 let ty = self.infer_expr_inner(tgt_expr, expected);
32 if ty.is_never() {
33 // Any expression that produces a value of type `!` must have diverged
34 self.diverges = Diverges::Always;
35 }
36 let could_unify = self.unify(&ty, &expected.ty);
37 if !could_unify {
38 self.result.type_mismatches.insert(
39 tgt_expr,
40 TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() },
41 );
42 }
43 self.resolve_ty_as_possible(ty)
44 }
45
46 /// Infer type of expression with possibly implicit coerce to the expected type.
47 /// Return the type after possible coercion.
48 pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
49 let ty = self.infer_expr_inner(expr, &expected);
50 let ty = if !self.coerce(&ty, &expected.coercion_target()) {
51 self.result
52 .type_mismatches
53 .insert(expr, TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() });
54 // Return actual type when type mismatch.
55 // This is needed for diagnostic when return type mismatch.
56 ty
57 } else if expected.coercion_target() == &Ty::Unknown {
58 ty
59 } else {
60 expected.ty.clone()
61 };
62
63 self.resolve_ty_as_possible(ty)
64 }
65
66 fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
67 let krate = self.resolver.krate()?;
68 let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?;
69 let output_assoc_type =
70 self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
71 let generic_params = generics(self.db.upcast(), fn_once_trait.into());
72 if generic_params.len() != 2 {
73 return None;
74 }
75
76 let mut param_builder = Substs::builder(num_args);
77 let mut arg_tys = vec![];
78 for _ in 0..num_args {
79 let arg = self.table.new_type_var();
80 param_builder = param_builder.push(arg.clone());
81 arg_tys.push(arg);
82 }
83 let parameters = param_builder.build();
84 let arg_ty = Ty::Apply(ApplicationTy {
85 ctor: TypeCtor::Tuple { cardinality: num_args as u16 },
86 parameters,
87 });
88 let substs =
89 Substs::build_for_generics(&generic_params).push(ty.clone()).push(arg_ty).build();
90
91 let trait_env = Arc::clone(&self.trait_env);
92 let implements_fn_trait =
93 Obligation::Trait(TraitRef { trait_: fn_once_trait, substs: substs.clone() });
94 let goal = self.canonicalizer().canonicalize_obligation(InEnvironment {
95 value: implements_fn_trait.clone(),
96 environment: trait_env,
97 });
98 if self.db.trait_solve(krate, goal.value).is_some() {
99 self.obligations.push(implements_fn_trait);
100 let output_proj_ty =
101 crate::ProjectionTy { associated_ty: output_assoc_type, parameters: substs };
102 let return_ty = self.normalize_projection_ty(output_proj_ty);
103 Some((arg_tys, return_ty))
104 } else {
105 None
106 }
107 }
108
109 pub fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
110 match ty.callable_sig(self.db) {
111 Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())),
112 None => self.callable_sig_from_fn_trait(ty, num_args),
113 }
114 }
115
116 fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
117 let body = Arc::clone(&self.body); // avoid borrow checker problem
118 let ty = match &body[tgt_expr] {
119 Expr::Missing => Ty::Unknown,
120 Expr::If { condition, then_branch, else_branch } => {
121 // if let is desugared to match, so this is always simple if
122 self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool)));
123
124 let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
125 let mut both_arms_diverge = Diverges::Always;
126
127 let then_ty = self.infer_expr_inner(*then_branch, &expected);
128 both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
129 let else_ty = match else_branch {
130 Some(else_branch) => self.infer_expr_inner(*else_branch, &expected),
131 None => Ty::unit(),
132 };
133 both_arms_diverge &= self.diverges;
134
135 self.diverges = condition_diverges | both_arms_diverge;
136
137 self.coerce_merge_branch(&then_ty, &else_ty)
138 }
139 Expr::Block { statements, tail, .. } => {
140 // FIXME: Breakable block inference
141 self.infer_block(statements, *tail, expected)
142 }
143 Expr::Unsafe { body } => self.infer_expr(*body, expected),
144 Expr::TryBlock { body } => {
145 let _inner = self.infer_expr(*body, expected);
146 // FIXME should be std::result::Result<{inner}, _>
147 Ty::Unknown
148 }
149 Expr::Loop { body, label } => {
150 self.breakables.push(BreakableContext {
151 may_break: false,
152 break_ty: self.table.new_type_var(),
153 label: label.clone(),
154 });
155 self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
156
157 let ctxt = self.breakables.pop().expect("breakable stack broken");
158 if ctxt.may_break {
159 self.diverges = Diverges::Maybe;
160 }
161
162 if ctxt.may_break {
163 ctxt.break_ty
164 } else {
165 Ty::simple(TypeCtor::Never)
166 }
167 }
168 Expr::While { condition, body, label } => {
169 self.breakables.push(BreakableContext {
170 may_break: false,
171 break_ty: Ty::Unknown,
172 label: label.clone(),
173 });
174 // while let is desugared to a match loop, so this is always simple while
175 self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool)));
176 self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
177 let _ctxt = self.breakables.pop().expect("breakable stack broken");
178 // the body may not run, so it diverging doesn't mean we diverge
179 self.diverges = Diverges::Maybe;
180 Ty::unit()
181 }
182 Expr::For { iterable, body, pat, label } => {
183 let iterable_ty = self.infer_expr(*iterable, &Expectation::none());
184
185 self.breakables.push(BreakableContext {
186 may_break: false,
187 break_ty: Ty::Unknown,
188 label: label.clone(),
189 });
190 let pat_ty =
191 self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
192
193 self.infer_pat(*pat, &pat_ty, BindingMode::default());
194
195 self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
196 let _ctxt = self.breakables.pop().expect("breakable stack broken");
197 // the body may not run, so it diverging doesn't mean we diverge
198 self.diverges = Diverges::Maybe;
199 Ty::unit()
200 }
201 Expr::Lambda { body, args, ret_type, arg_types } => {
202 assert_eq!(args.len(), arg_types.len());
203
204 let mut sig_tys = Vec::new();
205
206 // collect explicitly written argument types
207 for arg_type in arg_types.iter() {
208 let arg_ty = if let Some(type_ref) = arg_type {
209 self.make_ty(type_ref)
210 } else {
211 self.table.new_type_var()
212 };
213 sig_tys.push(arg_ty);
214 }
215
216 // add return type
217 let ret_ty = match ret_type {
218 Some(type_ref) => self.make_ty(type_ref),
219 None => self.table.new_type_var(),
220 };
221 sig_tys.push(ret_ty.clone());
222 let sig_ty = Ty::apply(
223 TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1, is_varargs: false },
224 Substs(sig_tys.clone().into()),
225 );
226 let closure_ty =
227 Ty::apply_one(TypeCtor::Closure { def: self.owner, expr: tgt_expr }, sig_ty);
228
229 // Eagerly try to relate the closure type with the expected
230 // type, otherwise we often won't have enough information to
231 // infer the body.
232 self.coerce(&closure_ty, &expected.ty);
233
234 // Now go through the argument patterns
235 for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
236 let resolved = self.resolve_ty_as_possible(arg_ty);
237 self.infer_pat(*arg_pat, &resolved, BindingMode::default());
238 }
239
240 let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
241 let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
242
243 self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
244
245 self.diverges = prev_diverges;
246 self.return_ty = prev_ret_ty;
247
248 closure_ty
249 }
250 Expr::Call { callee, args } => {
251 let callee_ty = self.infer_expr(*callee, &Expectation::none());
252 let canonicalized = self.canonicalizer().canonicalize_ty(callee_ty.clone());
253 let mut derefs = autoderef(
254 self.db,
255 self.resolver.krate(),
256 InEnvironment {
257 value: canonicalized.value.clone(),
258 environment: self.trait_env.clone(),
259 },
260 );
261 let (param_tys, ret_ty): (Vec<Ty>, Ty) = derefs
262 .find_map(|callee_deref_ty| {
263 self.callable_sig(
264 &canonicalized.decanonicalize_ty(callee_deref_ty.value),
265 args.len(),
266 )
267 })
268 .unwrap_or((Vec::new(), Ty::Unknown));
269 self.register_obligations_for_call(&callee_ty);
270 self.check_call_arguments(args, &param_tys);
271 self.normalize_associated_types_in(ret_ty)
272 }
273 Expr::MethodCall { receiver, args, method_name, generic_args } => self
274 .infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()),
275 Expr::Match { expr, arms } => {
276 let input_ty = self.infer_expr(*expr, &Expectation::none());
277
278 let mut result_ty = if arms.is_empty() {
279 Ty::simple(TypeCtor::Never)
280 } else {
281 self.table.new_type_var()
282 };
283
284 let matchee_diverges = self.diverges;
285 let mut all_arms_diverge = Diverges::Always;
286
287 for arm in arms {
288 self.diverges = Diverges::Maybe;
289 let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
290 if let Some(guard_expr) = arm.guard {
291 self.infer_expr(
292 guard_expr,
293 &Expectation::has_type(Ty::simple(TypeCtor::Bool)),
294 );
295 }
296
297 let arm_ty = self.infer_expr_inner(arm.expr, &expected);
298 all_arms_diverge &= self.diverges;
299 result_ty = self.coerce_merge_branch(&result_ty, &arm_ty);
300 }
301
302 self.diverges = matchee_diverges | all_arms_diverge;
303
304 result_ty
305 }
306 Expr::Path(p) => {
307 // FIXME this could be more efficient...
308 let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
309 self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown)
310 }
311 Expr::Continue { .. } => Ty::simple(TypeCtor::Never),
312 Expr::Break { expr, label } => {
313 let val_ty = if let Some(expr) = expr {
314 self.infer_expr(*expr, &Expectation::none())
315 } else {
316 Ty::unit()
317 };
318
319 let last_ty =
320 if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
321 ctxt.break_ty.clone()
322 } else {
323 Ty::Unknown
324 };
325
326 let merged_type = self.coerce_merge_branch(&last_ty, &val_ty);
327
328 if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
329 ctxt.break_ty = merged_type;
330 ctxt.may_break = true;
331 } else {
332 self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
333 expr: tgt_expr,
334 });
335 }
336
337 Ty::simple(TypeCtor::Never)
338 }
339 Expr::Return { expr } => {
340 if let Some(expr) = expr {
341 self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
342 } else {
343 let unit = Ty::unit();
344 self.coerce(&unit, &self.return_ty.clone());
345 }
346 Ty::simple(TypeCtor::Never)
347 }
348 Expr::RecordLit { path, fields, spread } => {
349 let (ty, def_id) = self.resolve_variant(path.as_ref());
350 if let Some(variant) = def_id {
351 self.write_variant_resolution(tgt_expr.into(), variant);
352 }
353
354 self.unify(&ty, &expected.ty);
355
356 let substs = ty.substs().unwrap_or_else(Substs::empty);
357 let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
358 let variant_data = def_id.map(|it| variant_data(self.db.upcast(), it));
359 for (field_idx, field) in fields.iter().enumerate() {
360 let field_def =
361 variant_data.as_ref().and_then(|it| match it.field(&field.name) {
362 Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }),
363 None => {
364 self.push_diagnostic(InferenceDiagnostic::NoSuchField {
365 expr: tgt_expr,
366 field: field_idx,
367 });
368 None
369 }
370 });
371 if let Some(field_def) = field_def {
372 self.result.record_field_resolutions.insert(field.expr, field_def);
373 }
374 let field_ty = field_def
375 .map_or(Ty::Unknown, |it| field_types[it.local_id].clone().subst(&substs));
376 self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
377 }
378 if let Some(expr) = spread {
379 self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
380 }
381 ty
382 }
383 Expr::Field { expr, name } => {
384 let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
385 let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty);
386 let ty = autoderef::autoderef(
387 self.db,
388 self.resolver.krate(),
389 InEnvironment {
390 value: canonicalized.value.clone(),
391 environment: self.trait_env.clone(),
392 },
393 )
394 .find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) {
395 Ty::Apply(a_ty) => match a_ty.ctor {
396 TypeCtor::Tuple { .. } => name
397 .as_tuple_index()
398 .and_then(|idx| a_ty.parameters.0.get(idx).cloned()),
399 TypeCtor::Adt(AdtId::StructId(s)) => {
400 self.db.struct_data(s).variant_data.field(name).map(|local_id| {
401 let field = FieldId { parent: s.into(), local_id };
402 self.write_field_resolution(tgt_expr, field);
403 self.db.field_types(s.into())[field.local_id]
404 .clone()
405 .subst(&a_ty.parameters)
406 })
407 }
408 TypeCtor::Adt(AdtId::UnionId(u)) => {
409 self.db.union_data(u).variant_data.field(name).map(|local_id| {
410 let field = FieldId { parent: u.into(), local_id };
411 self.write_field_resolution(tgt_expr, field);
412 self.db.field_types(u.into())[field.local_id]
413 .clone()
414 .subst(&a_ty.parameters)
415 })
416 }
417 _ => None,
418 },
419 _ => None,
420 })
421 .unwrap_or(Ty::Unknown);
422 let ty = self.insert_type_vars(ty);
423 self.normalize_associated_types_in(ty)
424 }
425 Expr::Await { expr } => {
426 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
427 self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
428 }
429 Expr::Try { expr } => {
430 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
431 self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
432 }
433 Expr::Cast { expr, type_ref } => {
434 let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
435 let cast_ty = self.make_ty(type_ref);
436 // FIXME check the cast...
437 cast_ty
438 }
439 Expr::Ref { expr, rawness, mutability } => {
440 let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) =
441 &expected.ty.as_reference_or_ptr()
442 {
443 if *exp_mutability == Mutability::Mut && *mutability == Mutability::Shared {
444 // FIXME: throw type error - expected mut reference but found shared ref,
445 // which cannot be coerced
446 }
447 if *exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr {
448 // FIXME: throw type error - expected reference but found ptr,
449 // which cannot be coerced
450 }
451 Expectation::rvalue_hint(Ty::clone(exp_inner))
452 } else {
453 Expectation::none()
454 };
455 let inner_ty = self.infer_expr_inner(*expr, &expectation);
456 let ty = match rawness {
457 Rawness::RawPtr => TypeCtor::RawPtr(*mutability),
458 Rawness::Ref => TypeCtor::Ref(*mutability),
459 };
460 Ty::apply_one(ty, inner_ty)
461 }
462 Expr::Box { expr } => {
463 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
464 if let Some(box_) = self.resolve_boxed_box() {
465 Ty::apply_one(TypeCtor::Adt(box_), inner_ty)
466 } else {
467 Ty::Unknown
468 }
469 }
470 Expr::UnaryOp { expr, op } => {
471 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
472 match op {
473 UnaryOp::Deref => match self.resolver.krate() {
474 Some(krate) => {
475 let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty);
476 match autoderef::deref(
477 self.db,
478 krate,
479 InEnvironment {
480 value: &canonicalized.value,
481 environment: self.trait_env.clone(),
482 },
483 ) {
484 Some(derefed_ty) => {
485 canonicalized.decanonicalize_ty(derefed_ty.value)
486 }
487 None => Ty::Unknown,
488 }
489 }
490 None => Ty::Unknown,
491 },
492 UnaryOp::Neg => {
493 match &inner_ty {
494 // Fast path for builtins
495 Ty::Apply(ApplicationTy {
496 ctor: TypeCtor::Int(IntTy { signedness: Signedness::Signed, .. }),
497 ..
498 })
499 | Ty::Apply(ApplicationTy { ctor: TypeCtor::Float(_), .. })
500 | Ty::Infer(InferTy::IntVar(..))
501 | Ty::Infer(InferTy::FloatVar(..)) => inner_ty,
502 // Otherwise we resolve via the std::ops::Neg trait
503 _ => self
504 .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
505 }
506 }
507 UnaryOp::Not => {
508 match &inner_ty {
509 // Fast path for builtins
510 Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. })
511 | Ty::Apply(ApplicationTy { ctor: TypeCtor::Int(_), .. })
512 | Ty::Infer(InferTy::IntVar(..)) => inner_ty,
513 // Otherwise we resolve via the std::ops::Not trait
514 _ => self
515 .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
516 }
517 }
518 }
519 }
520 Expr::BinaryOp { lhs, rhs, op } => match op {
521 Some(op) => {
522 let lhs_expectation = match op {
523 BinaryOp::LogicOp(..) => Expectation::has_type(Ty::simple(TypeCtor::Bool)),
524 _ => Expectation::none(),
525 };
526 let lhs_ty = self.infer_expr(*lhs, &lhs_expectation);
527 // FIXME: find implementation of trait corresponding to operation
528 // symbol and resolve associated `Output` type
529 let rhs_expectation = op::binary_op_rhs_expectation(*op, lhs_ty.clone());
530 let rhs_ty = self.infer_expr(*rhs, &Expectation::has_type(rhs_expectation));
531
532 // FIXME: similar as above, return ty is often associated trait type
533 op::binary_op_return_ty(*op, lhs_ty, rhs_ty)
534 }
535 _ => Ty::Unknown,
536 },
537 Expr::Range { lhs, rhs, range_type } => {
538 let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none()));
539 let rhs_expect = lhs_ty
540 .as_ref()
541 .map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone()));
542 let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
543 match (range_type, lhs_ty, rhs_ty) {
544 (RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
545 Some(adt) => Ty::simple(TypeCtor::Adt(adt)),
546 None => Ty::Unknown,
547 },
548 (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
549 Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
550 None => Ty::Unknown,
551 },
552 (RangeOp::Inclusive, None, Some(ty)) => {
553 match self.resolve_range_to_inclusive() {
554 Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
555 None => Ty::Unknown,
556 }
557 }
558 (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
559 Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
560 None => Ty::Unknown,
561 },
562 (RangeOp::Inclusive, Some(_), Some(ty)) => {
563 match self.resolve_range_inclusive() {
564 Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
565 None => Ty::Unknown,
566 }
567 }
568 (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
569 Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
570 None => Ty::Unknown,
571 },
572 (RangeOp::Inclusive, _, None) => Ty::Unknown,
573 }
574 }
575 Expr::Index { base, index } => {
576 let base_ty = self.infer_expr_inner(*base, &Expectation::none());
577 let index_ty = self.infer_expr(*index, &Expectation::none());
578
579 if let (Some(index_trait), Some(krate)) =
580 (self.resolve_ops_index(), self.resolver.krate())
581 {
582 let canonicalized = self.canonicalizer().canonicalize_ty(base_ty);
583 let self_ty = method_resolution::resolve_indexing_op(
584 self.db,
585 &canonicalized.value,
586 self.trait_env.clone(),
587 krate,
588 index_trait,
589 );
590 let self_ty =
591 self_ty.map_or(Ty::Unknown, |t| canonicalized.decanonicalize_ty(t.value));
592 self.resolve_associated_type_with_params(
593 self_ty,
594 self.resolve_ops_index_output(),
595 &[index_ty],
596 )
597 } else {
598 Ty::Unknown
599 }
600 }
601 Expr::Tuple { exprs } => {
602 let mut tys = match &expected.ty {
603 ty_app!(TypeCtor::Tuple { .. }, st) => st
604 .iter()
605 .cloned()
606 .chain(repeat_with(|| self.table.new_type_var()))
607 .take(exprs.len())
608 .collect::<Vec<_>>(),
609 _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(),
610 };
611
612 for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
613 self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
614 }
615
616 Ty::apply(TypeCtor::Tuple { cardinality: tys.len() as u16 }, Substs(tys.into()))
617 }
618 Expr::Array(array) => {
619 let elem_ty = match &expected.ty {
620 ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => {
621 st.as_single().clone()
622 }
623 _ => self.table.new_type_var(),
624 };
625
626 match array {
627 Array::ElementList(items) => {
628 for expr in items.iter() {
629 self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone()));
630 }
631 }
632 Array::Repeat { initializer, repeat } => {
633 self.infer_expr_coerce(
634 *initializer,
635 &Expectation::has_type(elem_ty.clone()),
636 );
637 self.infer_expr(
638 *repeat,
639 &Expectation::has_type(Ty::simple(TypeCtor::Int(IntTy::usize()))),
640 );
641 }
642 }
643
644 Ty::apply_one(TypeCtor::Array, elem_ty)
645 }
646 Expr::Literal(lit) => match lit {
647 Literal::Bool(..) => Ty::simple(TypeCtor::Bool),
648 Literal::String(..) => {
649 Ty::apply_one(TypeCtor::Ref(Mutability::Shared), Ty::simple(TypeCtor::Str))
650 }
651 Literal::ByteString(..) => {
652 let byte_type = Ty::simple(TypeCtor::Int(IntTy::u8()));
653 let array_type = Ty::apply_one(TypeCtor::Array, byte_type);
654 Ty::apply_one(TypeCtor::Ref(Mutability::Shared), array_type)
655 }
656 Literal::Char(..) => Ty::simple(TypeCtor::Char),
657 Literal::Int(_v, ty) => match ty {
658 Some(int_ty) => Ty::simple(TypeCtor::Int((*int_ty).into())),
659 None => self.table.new_integer_var(),
660 },
661 Literal::Float(_v, ty) => match ty {
662 Some(float_ty) => Ty::simple(TypeCtor::Float((*float_ty).into())),
663 None => self.table.new_float_var(),
664 },
665 },
666 };
667 // use a new type variable if we got Ty::Unknown here
668 let ty = self.insert_type_vars_shallow(ty);
669 let ty = self.resolve_ty_as_possible(ty);
670 self.write_expr_ty(tgt_expr, ty.clone());
671 ty
672 }
673
674 fn infer_block(
675 &mut self,
676 statements: &[Statement],
677 tail: Option<ExprId>,
678 expected: &Expectation,
679 ) -> Ty {
680 for stmt in statements {
681 match stmt {
682 Statement::Let { pat, type_ref, initializer } => {
683 let decl_ty =
684 type_ref.as_ref().map(|tr| self.make_ty(tr)).unwrap_or(Ty::Unknown);
685
686 // Always use the declared type when specified
687 let mut ty = decl_ty.clone();
688
689 if let Some(expr) = initializer {
690 let actual_ty =
691 self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
692 if decl_ty == Ty::Unknown {
693 ty = actual_ty;
694 }
695 }
696
697 let ty = self.resolve_ty_as_possible(ty);
698 self.infer_pat(*pat, &ty, BindingMode::default());
699 }
700 Statement::Expr(expr) => {
701 self.infer_expr(*expr, &Expectation::none());
702 }
703 }
704 }
705
706 let ty = if let Some(expr) = tail {
707 self.infer_expr_coerce(expr, expected)
708 } else {
709 // Citing rustc: if there is no explicit tail expression,
710 // that is typically equivalent to a tail expression
711 // of `()` -- except if the block diverges. In that
712 // case, there is no value supplied from the tail
713 // expression (assuming there are no other breaks,
714 // this implies that the type of the block will be
715 // `!`).
716 if self.diverges.is_always() {
717 // we don't even make an attempt at coercion
718 self.table.new_maybe_never_type_var()
719 } else {
720 self.coerce(&Ty::unit(), expected.coercion_target());
721 Ty::unit()
722 }
723 };
724 ty
725 }
726
727 fn infer_method_call(
728 &mut self,
729 tgt_expr: ExprId,
730 receiver: ExprId,
731 args: &[ExprId],
732 method_name: &Name,
733 generic_args: Option<&GenericArgs>,
734 ) -> Ty {
735 let receiver_ty = self.infer_expr(receiver, &Expectation::none());
736 let canonicalized_receiver = self.canonicalizer().canonicalize_ty(receiver_ty.clone());
737
738 let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
739
740 let resolved = self.resolver.krate().and_then(|krate| {
741 method_resolution::lookup_method(
742 &canonicalized_receiver.value,
743 self.db,
744 self.trait_env.clone(),
745 krate,
746 &traits_in_scope,
747 method_name,
748 )
749 });
750 let (derefed_receiver_ty, method_ty, def_generics) = match resolved {
751 Some((ty, func)) => {
752 let ty = canonicalized_receiver.decanonicalize_ty(ty);
753 self.write_method_resolution(tgt_expr, func);
754 (ty, self.db.value_ty(func.into()), Some(generics(self.db.upcast(), func.into())))
755 }
756 None => (receiver_ty, Binders::new(0, Ty::Unknown), None),
757 };
758 let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty);
759 let method_ty = method_ty.subst(&substs);
760 let method_ty = self.insert_type_vars(method_ty);
761 self.register_obligations_for_call(&method_ty);
762 let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) {
763 Some(sig) => {
764 if !sig.params().is_empty() {
765 (sig.params()[0].clone(), sig.params()[1..].to_vec(), sig.ret().clone())
766 } else {
767 (Ty::Unknown, Vec::new(), sig.ret().clone())
768 }
769 }
770 None => (Ty::Unknown, Vec::new(), Ty::Unknown),
771 };
772 // Apply autoref so the below unification works correctly
773 // FIXME: return correct autorefs from lookup_method
774 let actual_receiver_ty = match expected_receiver_ty.as_reference() {
775 Some((_, mutability)) => Ty::apply_one(TypeCtor::Ref(mutability), derefed_receiver_ty),
776 _ => derefed_receiver_ty,
777 };
778 self.unify(&expected_receiver_ty, &actual_receiver_ty);
779
780 self.check_call_arguments(args, &param_tys);
781 self.normalize_associated_types_in(ret_ty)
782 }
783
784 fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) {
785 // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
786 // We do this in a pretty awful way: first we type-check any arguments
787 // that are not closures, then we type-check the closures. This is so
788 // that we have more information about the types of arguments when we
789 // type-check the functions. This isn't really the right way to do this.
790 for &check_closures in &[false, true] {
791 let param_iter = param_tys.iter().cloned().chain(repeat(Ty::Unknown));
792 for (&arg, param_ty) in args.iter().zip(param_iter) {
793 let is_closure = matches!(&self.body[arg], Expr::Lambda { .. });
794 if is_closure != check_closures {
795 continue;
796 }
797
798 let param_ty = self.normalize_associated_types_in(param_ty);
799 self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone()));
800 }
801 }
802 }
803
804 fn substs_for_method_call(
805 &mut self,
806 def_generics: Option<Generics>,
807 generic_args: Option<&GenericArgs>,
808 receiver_ty: &Ty,
809 ) -> Substs {
810 let (parent_params, self_params, type_params, impl_trait_params) =
811 def_generics.as_ref().map_or((0, 0, 0, 0), |g| g.provenance_split());
812 assert_eq!(self_params, 0); // method shouldn't have another Self param
813 let total_len = parent_params + type_params + impl_trait_params;
814 let mut substs = Vec::with_capacity(total_len);
815 // Parent arguments are unknown, except for the receiver type
816 if let Some(parent_generics) = def_generics.as_ref().map(|p| p.iter_parent()) {
817 for (_id, param) in parent_generics {
818 if param.provenance == hir_def::generics::TypeParamProvenance::TraitSelf {
819 substs.push(receiver_ty.clone());
820 } else {
821 substs.push(Ty::Unknown);
822 }
823 }
824 }
825 // handle provided type arguments
826 if let Some(generic_args) = generic_args {
827 // if args are provided, it should be all of them, but we can't rely on that
828 for arg in generic_args.args.iter().take(type_params) {
829 match arg {
830 GenericArg::Type(type_ref) => {
831 let ty = self.make_ty(type_ref);
832 substs.push(ty);
833 }
834 }
835 }
836 };
837 let supplied_params = substs.len();
838 for _ in supplied_params..total_len {
839 substs.push(Ty::Unknown);
840 }
841 assert_eq!(substs.len(), total_len);
842 Substs(substs.into())
843 }
844
845 fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
846 if let Ty::Apply(a_ty) = callable_ty {
847 if let TypeCtor::FnDef(def) = a_ty.ctor {
848 let generic_predicates = self.db.generic_predicates(def.into());
849 for predicate in generic_predicates.iter() {
850 let predicate = predicate.clone().subst(&a_ty.parameters);
851 if let Some(obligation) = Obligation::from_predicate(predicate) {
852 self.obligations.push(obligation);
853 }
854 }
855 // add obligation for trait implementation, if this is a trait method
856 match def {
857 CallableDefId::FunctionId(f) => {
858 if let AssocContainerId::TraitId(trait_) =
859 f.lookup(self.db.upcast()).container
860 {
861 // construct a TraitDef
862 let substs = a_ty
863 .parameters
864 .prefix(generics(self.db.upcast(), trait_.into()).len());
865 self.obligations.push(Obligation::Trait(TraitRef { trait_, substs }));
866 }
867 }
868 CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {}
869 }
870 }
871 }
872 }
873}
diff --git a/crates/ra_hir_ty/src/infer/pat.rs b/crates/ra_hir_ty/src/infer/pat.rs
deleted file mode 100644
index 4dd4f9802..000000000
--- a/crates/ra_hir_ty/src/infer/pat.rs
+++ /dev/null
@@ -1,241 +0,0 @@
1//! Type inference for patterns.
2
3use std::iter::repeat;
4use std::sync::Arc;
5
6use hir_def::{
7 expr::{BindingAnnotation, Expr, Literal, Pat, PatId, RecordFieldPat},
8 path::Path,
9 type_ref::Mutability,
10 FieldId,
11};
12use hir_expand::name::Name;
13use test_utils::mark;
14
15use super::{BindingMode, Expectation, InferenceContext};
16use crate::{utils::variant_data, Substs, Ty, TypeCtor};
17
18impl<'a> InferenceContext<'a> {
19 fn infer_tuple_struct_pat(
20 &mut self,
21 path: Option<&Path>,
22 subpats: &[PatId],
23 expected: &Ty,
24 default_bm: BindingMode,
25 id: PatId,
26 ) -> Ty {
27 let (ty, def) = self.resolve_variant(path);
28 let var_data = def.map(|it| variant_data(self.db.upcast(), it));
29 if let Some(variant) = def {
30 self.write_variant_resolution(id.into(), variant);
31 }
32 self.unify(&ty, expected);
33
34 let substs = ty.substs().unwrap_or_else(Substs::empty);
35
36 let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
37
38 for (i, &subpat) in subpats.iter().enumerate() {
39 let expected_ty = var_data
40 .as_ref()
41 .and_then(|d| d.field(&Name::new_tuple_field(i)))
42 .map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
43 let expected_ty = self.normalize_associated_types_in(expected_ty);
44 self.infer_pat(subpat, &expected_ty, default_bm);
45 }
46
47 ty
48 }
49
50 fn infer_record_pat(
51 &mut self,
52 path: Option<&Path>,
53 subpats: &[RecordFieldPat],
54 expected: &Ty,
55 default_bm: BindingMode,
56 id: PatId,
57 ) -> Ty {
58 let (ty, def) = self.resolve_variant(path);
59 let var_data = def.map(|it| variant_data(self.db.upcast(), it));
60 if let Some(variant) = def {
61 self.write_variant_resolution(id.into(), variant);
62 }
63
64 self.unify(&ty, expected);
65
66 let substs = ty.substs().unwrap_or_else(Substs::empty);
67
68 let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
69 for subpat in subpats {
70 let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
71 if let Some(local_id) = matching_field {
72 let field_def = FieldId { parent: def.unwrap(), local_id };
73 self.result.record_field_pat_resolutions.insert(subpat.pat, field_def);
74 }
75
76 let expected_ty =
77 matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
78 let expected_ty = self.normalize_associated_types_in(expected_ty);
79 self.infer_pat(subpat.pat, &expected_ty, default_bm);
80 }
81
82 ty
83 }
84
85 pub(super) fn infer_pat(
86 &mut self,
87 pat: PatId,
88 mut expected: &Ty,
89 mut default_bm: BindingMode,
90 ) -> Ty {
91 let body = Arc::clone(&self.body); // avoid borrow checker problem
92
93 if is_non_ref_pat(&body, pat) {
94 while let Some((inner, mutability)) = expected.as_reference() {
95 expected = inner;
96 default_bm = match default_bm {
97 BindingMode::Move => BindingMode::Ref(mutability),
98 BindingMode::Ref(Mutability::Shared) => BindingMode::Ref(Mutability::Shared),
99 BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability),
100 }
101 }
102 } else if let Pat::Ref { .. } = &body[pat] {
103 mark::hit!(match_ergonomics_ref);
104 // When you encounter a `&pat` pattern, reset to Move.
105 // This is so that `w` is by value: `let (_, &w) = &(1, &2);`
106 default_bm = BindingMode::Move;
107 }
108
109 // Lose mutability.
110 let default_bm = default_bm;
111 let expected = expected;
112
113 let ty = match &body[pat] {
114 Pat::Tuple { ref args, .. } => {
115 let expectations = match expected.as_tuple() {
116 Some(parameters) => &*parameters.0,
117 _ => &[],
118 };
119 let expectations_iter = expectations.iter().chain(repeat(&Ty::Unknown));
120
121 let inner_tys = args
122 .iter()
123 .zip(expectations_iter)
124 .map(|(&pat, ty)| self.infer_pat(pat, ty, default_bm))
125 .collect();
126
127 Ty::apply(TypeCtor::Tuple { cardinality: args.len() as u16 }, Substs(inner_tys))
128 }
129 Pat::Or(ref pats) => {
130 if let Some((first_pat, rest)) = pats.split_first() {
131 let ty = self.infer_pat(*first_pat, expected, default_bm);
132 for pat in rest {
133 self.infer_pat(*pat, expected, default_bm);
134 }
135 ty
136 } else {
137 Ty::Unknown
138 }
139 }
140 Pat::Ref { pat, mutability } => {
141 let expectation = match expected.as_reference() {
142 Some((inner_ty, exp_mut)) => {
143 if *mutability != exp_mut {
144 // FIXME: emit type error?
145 }
146 inner_ty
147 }
148 _ => &Ty::Unknown,
149 };
150 let subty = self.infer_pat(*pat, expectation, default_bm);
151 Ty::apply_one(TypeCtor::Ref(*mutability), subty)
152 }
153 Pat::TupleStruct { path: p, args: subpats, .. } => {
154 self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm, pat)
155 }
156 Pat::Record { path: p, args: fields, ellipsis: _ } => {
157 self.infer_record_pat(p.as_ref(), fields, expected, default_bm, pat)
158 }
159 Pat::Path(path) => {
160 // FIXME use correct resolver for the surrounding expression
161 let resolver = self.resolver.clone();
162 self.infer_path(&resolver, &path, pat.into()).unwrap_or(Ty::Unknown)
163 }
164 Pat::Bind { mode, name: _, subpat } => {
165 let mode = if mode == &BindingAnnotation::Unannotated {
166 default_bm
167 } else {
168 BindingMode::convert(*mode)
169 };
170 let inner_ty = if let Some(subpat) = subpat {
171 self.infer_pat(*subpat, expected, default_bm)
172 } else {
173 expected.clone()
174 };
175 let inner_ty = self.insert_type_vars_shallow(inner_ty);
176
177 let bound_ty = match mode {
178 BindingMode::Ref(mutability) => {
179 Ty::apply_one(TypeCtor::Ref(mutability), inner_ty.clone())
180 }
181 BindingMode::Move => inner_ty.clone(),
182 };
183 let bound_ty = self.resolve_ty_as_possible(bound_ty);
184 self.write_pat_ty(pat, bound_ty);
185 return inner_ty;
186 }
187 Pat::Slice { prefix, slice, suffix } => {
188 let (container_ty, elem_ty) = match &expected {
189 ty_app!(TypeCtor::Array, st) => (TypeCtor::Array, st.as_single().clone()),
190 ty_app!(TypeCtor::Slice, st) => (TypeCtor::Slice, st.as_single().clone()),
191 _ => (TypeCtor::Slice, Ty::Unknown),
192 };
193
194 for pat_id in prefix.iter().chain(suffix) {
195 self.infer_pat(*pat_id, &elem_ty, default_bm);
196 }
197
198 let pat_ty = Ty::apply_one(container_ty, elem_ty);
199 if let Some(slice_pat_id) = slice {
200 self.infer_pat(*slice_pat_id, &pat_ty, default_bm);
201 }
202
203 pat_ty
204 }
205 Pat::Wild => expected.clone(),
206 Pat::Range { start, end } => {
207 let start_ty = self.infer_expr(*start, &Expectation::has_type(expected.clone()));
208 let end_ty = self.infer_expr(*end, &Expectation::has_type(start_ty));
209 end_ty
210 }
211 Pat::Lit(expr) => self.infer_expr(*expr, &Expectation::has_type(expected.clone())),
212 Pat::Missing => Ty::Unknown,
213 };
214 // use a new type variable if we got Ty::Unknown here
215 let ty = self.insert_type_vars_shallow(ty);
216 if !self.unify(&ty, expected) {
217 // FIXME record mismatch, we need to change the type of self.type_mismatches for that
218 }
219 let ty = self.resolve_ty_as_possible(ty);
220 self.write_pat_ty(pat, ty.clone());
221 ty
222 }
223}
224
225fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
226 match &body[pat] {
227 Pat::Tuple { .. }
228 | Pat::TupleStruct { .. }
229 | Pat::Record { .. }
230 | Pat::Range { .. }
231 | Pat::Slice { .. } => true,
232 Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
233 // FIXME: Path/Lit might actually evaluate to ref, but inference is unimplemented.
234 Pat::Path(..) => true,
235 Pat::Lit(expr) => match body[*expr] {
236 Expr::Literal(Literal::String(..)) => false,
237 _ => true,
238 },
239 Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Missing => false,
240 }
241}
diff --git a/crates/ra_hir_ty/src/infer/path.rs b/crates/ra_hir_ty/src/infer/path.rs
deleted file mode 100644
index 80d7ed10e..000000000
--- a/crates/ra_hir_ty/src/infer/path.rs
+++ /dev/null
@@ -1,287 +0,0 @@
1//! Path expression resolution.
2
3use std::iter;
4
5use hir_def::{
6 path::{Path, PathSegment},
7 resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
8 AdtId, AssocContainerId, AssocItemId, EnumVariantId, Lookup,
9};
10use hir_expand::name::Name;
11
12use crate::{method_resolution, Substs, Ty, ValueTyDefId};
13
14use super::{ExprOrPatId, InferenceContext, TraitRef};
15
16impl<'a> InferenceContext<'a> {
17 pub(super) fn infer_path(
18 &mut self,
19 resolver: &Resolver,
20 path: &Path,
21 id: ExprOrPatId,
22 ) -> Option<Ty> {
23 let ty = self.resolve_value_path(resolver, path, id)?;
24 let ty = self.insert_type_vars(ty);
25 let ty = self.normalize_associated_types_in(ty);
26 Some(ty)
27 }
28
29 fn resolve_value_path(
30 &mut self,
31 resolver: &Resolver,
32 path: &Path,
33 id: ExprOrPatId,
34 ) -> Option<Ty> {
35 let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
36 if path.segments().is_empty() {
37 // This can't actually happen syntax-wise
38 return None;
39 }
40 let ty = self.make_ty(type_ref);
41 let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
42 let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver);
43 let (ty, _) = Ty::from_type_relative_path(&ctx, ty, None, remaining_segments_for_ty);
44 self.resolve_ty_assoc_item(
45 ty,
46 &path.segments().last().expect("path had at least one segment").name,
47 id,
48 )?
49 } else {
50 let value_or_partial =
51 resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
52
53 match value_or_partial {
54 ResolveValueResult::ValueNs(it) => (it, None),
55 ResolveValueResult::Partial(def, remaining_index) => {
56 self.resolve_assoc_item(def, path, remaining_index, id)?
57 }
58 }
59 };
60
61 let typable: ValueTyDefId = match value {
62 ValueNs::LocalBinding(pat) => {
63 let ty = self.result.type_of_pat.get(pat)?.clone();
64 let ty = self.resolve_ty_as_possible(ty);
65 return Some(ty);
66 }
67 ValueNs::FunctionId(it) => it.into(),
68 ValueNs::ConstId(it) => it.into(),
69 ValueNs::StaticId(it) => it.into(),
70 ValueNs::StructId(it) => {
71 self.write_variant_resolution(id, it.into());
72
73 it.into()
74 }
75 ValueNs::EnumVariantId(it) => {
76 self.write_variant_resolution(id, it.into());
77
78 it.into()
79 }
80 ValueNs::ImplSelf(impl_id) => {
81 let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
82 let substs = Substs::type_params_for_generics(&generics);
83 let ty = self.db.impl_self_ty(impl_id).subst(&substs);
84 if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
85 let ty = self.db.value_ty(struct_id.into()).subst(&substs);
86 return Some(ty);
87 } else {
88 // FIXME: diagnostic, invalid Self reference
89 return None;
90 }
91 }
92 };
93
94 let ty = self.db.value_ty(typable);
95 // self_subst is just for the parent
96 let parent_substs = self_subst.unwrap_or_else(Substs::empty);
97 let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
98 let substs = Ty::substs_from_path(&ctx, path, typable, true);
99 let full_substs = Substs::builder(substs.len())
100 .use_parent_substs(&parent_substs)
101 .fill(substs.0[parent_substs.len()..].iter().cloned())
102 .build();
103 let ty = ty.subst(&full_substs);
104 Some(ty)
105 }
106
107 fn resolve_assoc_item(
108 &mut self,
109 def: TypeNs,
110 path: &Path,
111 remaining_index: usize,
112 id: ExprOrPatId,
113 ) -> Option<(ValueNs, Option<Substs>)> {
114 assert!(remaining_index < path.segments().len());
115 // there may be more intermediate segments between the resolved one and
116 // the end. Only the last segment needs to be resolved to a value; from
117 // the segments before that, we need to get either a type or a trait ref.
118
119 let resolved_segment = path.segments().get(remaining_index - 1).unwrap();
120 let remaining_segments = path.segments().skip(remaining_index);
121 let is_before_last = remaining_segments.len() == 1;
122
123 match (def, is_before_last) {
124 (TypeNs::TraitId(trait_), true) => {
125 let segment =
126 remaining_segments.last().expect("there should be at least one segment here");
127 let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
128 let trait_ref = TraitRef::from_resolved_path(&ctx, trait_, resolved_segment, None);
129 self.resolve_trait_assoc_item(trait_ref, segment, id)
130 }
131 (def, _) => {
132 // Either we already have a type (e.g. `Vec::new`), or we have a
133 // trait but it's not the last segment, so the next segment
134 // should resolve to an associated type of that trait (e.g. `<T
135 // as Iterator>::Item::default`)
136 let remaining_segments_for_ty =
137 remaining_segments.take(remaining_segments.len() - 1);
138 let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
139 let (ty, _) = Ty::from_partly_resolved_hir_path(
140 &ctx,
141 def,
142 resolved_segment,
143 remaining_segments_for_ty,
144 true,
145 );
146 if let Ty::Unknown = ty {
147 return None;
148 }
149
150 let ty = self.insert_type_vars(ty);
151 let ty = self.normalize_associated_types_in(ty);
152
153 let segment =
154 remaining_segments.last().expect("there should be at least one segment here");
155
156 self.resolve_ty_assoc_item(ty, &segment.name, id)
157 }
158 }
159 }
160
161 fn resolve_trait_assoc_item(
162 &mut self,
163 trait_ref: TraitRef,
164 segment: PathSegment<'_>,
165 id: ExprOrPatId,
166 ) -> Option<(ValueNs, Option<Substs>)> {
167 let trait_ = trait_ref.trait_;
168 let item =
169 self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
170 match item {
171 AssocItemId::FunctionId(func) => {
172 if segment.name == &self.db.function_data(func).name {
173 Some(AssocItemId::FunctionId(func))
174 } else {
175 None
176 }
177 }
178
179 AssocItemId::ConstId(konst) => {
180 if self
181 .db
182 .const_data(konst)
183 .name
184 .as_ref()
185 .map_or(false, |n| n == segment.name)
186 {
187 Some(AssocItemId::ConstId(konst))
188 } else {
189 None
190 }
191 }
192 AssocItemId::TypeAliasId(_) => None,
193 }
194 })?;
195 let def = match item {
196 AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
197 AssocItemId::ConstId(c) => ValueNs::ConstId(c),
198 AssocItemId::TypeAliasId(_) => unreachable!(),
199 };
200
201 self.write_assoc_resolution(id, item);
202 Some((def, Some(trait_ref.substs)))
203 }
204
205 fn resolve_ty_assoc_item(
206 &mut self,
207 ty: Ty,
208 name: &Name,
209 id: ExprOrPatId,
210 ) -> Option<(ValueNs, Option<Substs>)> {
211 if let Ty::Unknown = ty {
212 return None;
213 }
214
215 if let Some(result) = self.resolve_enum_variant_on_ty(&ty, name, id) {
216 return Some(result);
217 }
218
219 let canonical_ty = self.canonicalizer().canonicalize_ty(ty.clone());
220 let krate = self.resolver.krate()?;
221 let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
222
223 method_resolution::iterate_method_candidates(
224 &canonical_ty.value,
225 self.db,
226 self.trait_env.clone(),
227 krate,
228 &traits_in_scope,
229 Some(name),
230 method_resolution::LookupMode::Path,
231 move |_ty, item| {
232 let (def, container) = match item {
233 AssocItemId::FunctionId(f) => {
234 (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
235 }
236 AssocItemId::ConstId(c) => {
237 (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
238 }
239 AssocItemId::TypeAliasId(_) => unreachable!(),
240 };
241 let substs = match container {
242 AssocContainerId::ImplId(impl_id) => {
243 let impl_substs = Substs::build_for_def(self.db, impl_id)
244 .fill(iter::repeat_with(|| self.table.new_type_var()))
245 .build();
246 let impl_self_ty = self.db.impl_self_ty(impl_id).subst(&impl_substs);
247 self.unify(&impl_self_ty, &ty);
248 Some(impl_substs)
249 }
250 AssocContainerId::TraitId(trait_) => {
251 // we're picking this method
252 let trait_substs = Substs::build_for_def(self.db, trait_)
253 .push(ty.clone())
254 .fill(std::iter::repeat_with(|| self.table.new_type_var()))
255 .build();
256 self.obligations.push(super::Obligation::Trait(TraitRef {
257 trait_,
258 substs: trait_substs.clone(),
259 }));
260 Some(trait_substs)
261 }
262 AssocContainerId::ContainerId(_) => None,
263 };
264
265 self.write_assoc_resolution(id, item);
266 Some((def, substs))
267 },
268 )
269 }
270
271 fn resolve_enum_variant_on_ty(
272 &mut self,
273 ty: &Ty,
274 name: &Name,
275 id: ExprOrPatId,
276 ) -> Option<(ValueNs, Option<Substs>)> {
277 let (enum_id, subst) = match ty.as_adt() {
278 Some((AdtId::EnumId(e), subst)) => (e, subst),
279 _ => return None,
280 };
281 let enum_data = self.db.enum_data(enum_id);
282 let local_id = enum_data.variant(name)?;
283 let variant = EnumVariantId { parent: enum_id, local_id };
284 self.write_variant_resolution(id, variant.into());
285 Some((ValueNs::EnumVariantId(variant), Some(subst.clone())))
286 }
287}
diff --git a/crates/ra_hir_ty/src/infer/unify.rs b/crates/ra_hir_ty/src/infer/unify.rs
deleted file mode 100644
index 2e895d911..000000000
--- a/crates/ra_hir_ty/src/infer/unify.rs
+++ /dev/null
@@ -1,474 +0,0 @@
1//! Unification and canonicalization logic.
2
3use std::borrow::Cow;
4
5use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};
6
7use test_utils::mark;
8
9use super::{InferenceContext, Obligation};
10use crate::{
11 BoundVar, Canonical, DebruijnIndex, GenericPredicate, InEnvironment, InferTy, Substs, Ty,
12 TyKind, TypeCtor, TypeWalk,
13};
14
15impl<'a> InferenceContext<'a> {
16 pub(super) fn canonicalizer<'b>(&'b mut self) -> Canonicalizer<'a, 'b>
17 where
18 'a: 'b,
19 {
20 Canonicalizer { ctx: self, free_vars: Vec::new(), var_stack: Vec::new() }
21 }
22}
23
24pub(super) struct Canonicalizer<'a, 'b>
25where
26 'a: 'b,
27{
28 ctx: &'b mut InferenceContext<'a>,
29 free_vars: Vec<InferTy>,
30 /// A stack of type variables that is used to detect recursive types (which
31 /// are an error, but we need to protect against them to avoid stack
32 /// overflows).
33 var_stack: Vec<TypeVarId>,
34}
35
36#[derive(Debug)]
37pub(super) struct Canonicalized<T> {
38 pub value: Canonical<T>,
39 free_vars: Vec<InferTy>,
40}
41
42impl<'a, 'b> Canonicalizer<'a, 'b>
43where
44 'a: 'b,
45{
46 fn add(&mut self, free_var: InferTy) -> usize {
47 self.free_vars.iter().position(|&v| v == free_var).unwrap_or_else(|| {
48 let next_index = self.free_vars.len();
49 self.free_vars.push(free_var);
50 next_index
51 })
52 }
53
54 fn do_canonicalize<T: TypeWalk>(&mut self, t: T, binders: DebruijnIndex) -> T {
55 t.fold_binders(
56 &mut |ty, binders| match ty {
57 Ty::Infer(tv) => {
58 let inner = tv.to_inner();
59 if self.var_stack.contains(&inner) {
60 // recursive type
61 return tv.fallback_value();
62 }
63 if let Some(known_ty) =
64 self.ctx.table.var_unification_table.inlined_probe_value(inner).known()
65 {
66 self.var_stack.push(inner);
67 let result = self.do_canonicalize(known_ty.clone(), binders);
68 self.var_stack.pop();
69 result
70 } else {
71 let root = self.ctx.table.var_unification_table.find(inner);
72 let free_var = match tv {
73 InferTy::TypeVar(_) => InferTy::TypeVar(root),
74 InferTy::IntVar(_) => InferTy::IntVar(root),
75 InferTy::FloatVar(_) => InferTy::FloatVar(root),
76 InferTy::MaybeNeverTypeVar(_) => InferTy::MaybeNeverTypeVar(root),
77 };
78 let position = self.add(free_var);
79 Ty::Bound(BoundVar::new(binders, position))
80 }
81 }
82 _ => ty,
83 },
84 binders,
85 )
86 }
87
88 fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> {
89 let kinds = self
90 .free_vars
91 .iter()
92 .map(|v| match v {
93 // mapping MaybeNeverTypeVar to the same kind as general ones
94 // should be fine, because as opposed to int or float type vars,
95 // they don't restrict what kind of type can go into them, they
96 // just affect fallback.
97 InferTy::TypeVar(_) | InferTy::MaybeNeverTypeVar(_) => TyKind::General,
98 InferTy::IntVar(_) => TyKind::Integer,
99 InferTy::FloatVar(_) => TyKind::Float,
100 })
101 .collect();
102 Canonicalized { value: Canonical { value: result, kinds }, free_vars: self.free_vars }
103 }
104
105 pub(crate) fn canonicalize_ty(mut self, ty: Ty) -> Canonicalized<Ty> {
106 let result = self.do_canonicalize(ty, DebruijnIndex::INNERMOST);
107 self.into_canonicalized(result)
108 }
109
110 pub(crate) fn canonicalize_obligation(
111 mut self,
112 obligation: InEnvironment<Obligation>,
113 ) -> Canonicalized<InEnvironment<Obligation>> {
114 let result = match obligation.value {
115 Obligation::Trait(tr) => {
116 Obligation::Trait(self.do_canonicalize(tr, DebruijnIndex::INNERMOST))
117 }
118 Obligation::Projection(pr) => {
119 Obligation::Projection(self.do_canonicalize(pr, DebruijnIndex::INNERMOST))
120 }
121 };
122 self.into_canonicalized(InEnvironment {
123 value: result,
124 environment: obligation.environment,
125 })
126 }
127}
128
129impl<T> Canonicalized<T> {
130 pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty {
131 ty.walk_mut_binders(
132 &mut |ty, binders| {
133 if let &mut Ty::Bound(bound) = ty {
134 if bound.debruijn >= binders {
135 *ty = Ty::Infer(self.free_vars[bound.index]);
136 }
137 }
138 },
139 DebruijnIndex::INNERMOST,
140 );
141 ty
142 }
143
144 pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Substs>) {
145 // the solution may contain new variables, which we need to convert to new inference vars
146 let new_vars = Substs(
147 solution
148 .kinds
149 .iter()
150 .map(|k| match k {
151 TyKind::General => ctx.table.new_type_var(),
152 TyKind::Integer => ctx.table.new_integer_var(),
153 TyKind::Float => ctx.table.new_float_var(),
154 })
155 .collect(),
156 );
157 for (i, ty) in solution.value.into_iter().enumerate() {
158 let var = self.free_vars[i];
159 // eagerly replace projections in the type; we may be getting types
160 // e.g. from where clauses where this hasn't happened yet
161 let ty = ctx.normalize_associated_types_in(ty.clone().subst_bound_vars(&new_vars));
162 ctx.table.unify(&Ty::Infer(var), &ty);
163 }
164 }
165}
166
167pub fn unify(tys: &Canonical<(Ty, Ty)>) -> Option<Substs> {
168 let mut table = InferenceTable::new();
169 let vars = Substs(
170 tys.kinds
171 .iter()
172 // we always use type vars here because we want everything to
173 // fallback to Unknown in the end (kind of hacky, as below)
174 .map(|_| table.new_type_var())
175 .collect(),
176 );
177 let ty1_with_vars = tys.value.0.clone().subst_bound_vars(&vars);
178 let ty2_with_vars = tys.value.1.clone().subst_bound_vars(&vars);
179 if !table.unify(&ty1_with_vars, &ty2_with_vars) {
180 return None;
181 }
182 // default any type vars that weren't unified back to their original bound vars
183 // (kind of hacky)
184 for (i, var) in vars.iter().enumerate() {
185 if &*table.resolve_ty_shallow(var) == var {
186 table.unify(var, &Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i)));
187 }
188 }
189 Some(
190 Substs::builder(tys.kinds.len())
191 .fill(vars.iter().map(|v| table.resolve_ty_completely(v.clone())))
192 .build(),
193 )
194}
195
196#[derive(Clone, Debug)]
197pub(crate) struct InferenceTable {
198 pub(super) var_unification_table: InPlaceUnificationTable<TypeVarId>,
199}
200
201impl InferenceTable {
202 pub fn new() -> Self {
203 InferenceTable { var_unification_table: InPlaceUnificationTable::new() }
204 }
205
206 pub fn new_type_var(&mut self) -> Ty {
207 Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
208 }
209
210 pub fn new_integer_var(&mut self) -> Ty {
211 Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
212 }
213
214 pub fn new_float_var(&mut self) -> Ty {
215 Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
216 }
217
218 pub fn new_maybe_never_type_var(&mut self) -> Ty {
219 Ty::Infer(InferTy::MaybeNeverTypeVar(
220 self.var_unification_table.new_key(TypeVarValue::Unknown),
221 ))
222 }
223
224 pub fn resolve_ty_completely(&mut self, ty: Ty) -> Ty {
225 self.resolve_ty_completely_inner(&mut Vec::new(), ty)
226 }
227
228 pub fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
229 self.resolve_ty_as_possible_inner(&mut Vec::new(), ty)
230 }
231
232 pub fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
233 self.unify_inner(ty1, ty2, 0)
234 }
235
236 pub fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs, depth: usize) -> bool {
237 substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth))
238 }
239
240