aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_hir/src/ty/infer
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2019-11-24 21:45:26 +0000
committerGitHub <[email protected]>2019-11-24 21:45:26 +0000
commitf7f9757b6b144385ab8ce57b15764473b1f57331 (patch)
treedfcb584a03bd4e049291a05912508690cd8f9247 /crates/ra_hir/src/ty/infer
parent6a5dea778b16a35d765a3a98f2ac303c5cc3df25 (diff)
parentd06904e90cdc1603ffcb714e70dab83905221f72 (diff)
Merge #2396
2396: Switch to variant-granularity field type inference r=flodiebold a=matklad r? @flodiebold Previously, we had a `ty` query for each field. This PR switcthes to a query per struct, which returns an `ArenaMap` with `Ty`s. I don't know which approach is better. What is bugging me about the original approach is that, if we do all queries on the "leaf" defs, in practice we get a ton of queries which repeatedly reach into the parent definition to compute module, resolver, etc. This *seems* wasteful (but I don't think this is really what causes any perf problems for us). At the same time, I've been looking at Kotlin, and they seem to use the general pattern of analyzing the *parent* definition, and storing info about children into a `BindingContext`. I don't really which way is preferable. I think I want to try this approach, where query granularity generally mirrors the data granularity. The primary motivation for me here is probably just hope that we can avoid adding a ton of helpers to a `StructField`, and maybe in general avoid the need to switch to a global `StructField`, using `LocalStructFieldId` most of the time internally. For external API (ie, for `ra_ide_api`), I think we should continue with fine-grained `StructField::ty` approach, which internally fetches the table for the whole struct and indexes into it. In terms of actual memory savings, the results are as follows: ``` This PR: 142kb FieldTypesQuery (deps) 38kb FieldTypesQuery Status Quo: 208kb TypeForFieldQuery (deps) 18kb TypeForFieldQuery ``` Note how the table itself occupies more than twice as much space! I don't have an explanation for this: a plausible hypothesis is that single-field structs are very common and for them the table is a pessimisation. THere's noticiable wallclock time difference. Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/ra_hir/src/ty/infer')
-rw-r--r--crates/ra_hir/src/ty/infer/coerce.rs13
-rw-r--r--crates/ra_hir/src/ty/infer/expr.rs11
-rw-r--r--crates/ra_hir/src/ty/infer/pat.rs9
3 files changed, 22 insertions, 11 deletions
diff --git a/crates/ra_hir/src/ty/infer/coerce.rs b/crates/ra_hir/src/ty/infer/coerce.rs
index 4ea038d99..54765da35 100644
--- a/crates/ra_hir/src/ty/infer/coerce.rs
+++ b/crates/ra_hir/src/ty/infer/coerce.rs
@@ -245,14 +245,17 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
245 ty_app!(TypeCtor::Adt(Adt::Struct(struct1)), st1), 245 ty_app!(TypeCtor::Adt(Adt::Struct(struct1)), st1),
246 ty_app!(TypeCtor::Adt(Adt::Struct(struct2)), st2), 246 ty_app!(TypeCtor::Adt(Adt::Struct(struct2)), st2),
247 ) if struct1 == struct2 => { 247 ) if struct1 == struct2 => {
248 let fields = struct1.fields(self.db); 248 let field_tys = self.db.field_types(struct1.id.into());
249 let (last_field, prev_fields) = fields.split_last()?; 249 let struct_data = self.db.struct_data(struct1.id.0);
250
251 let mut fields = struct_data.variant_data.fields().iter();
252 let (last_field_id, _data) = fields.next_back()?;
250 253
251 // Get the generic parameter involved in the last field. 254 // Get the generic parameter involved in the last field.
252 let unsize_generic_index = { 255 let unsize_generic_index = {
253 let mut index = None; 256 let mut index = None;
254 let mut multiple_param = false; 257 let mut multiple_param = false;
255 last_field.ty(self.db).walk(&mut |ty| match ty { 258 field_tys[last_field_id].walk(&mut |ty| match ty {
256 &Ty::Param { idx, .. } => { 259 &Ty::Param { idx, .. } => {
257 if index.is_none() { 260 if index.is_none() {
258 index = Some(idx); 261 index = Some(idx);
@@ -271,8 +274,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
271 274
272 // Check other fields do not involve it. 275 // Check other fields do not involve it.
273 let mut multiple_used = false; 276 let mut multiple_used = false;
274 prev_fields.iter().for_each(|field| { 277 fields.for_each(|(field_id, _data)| {
275 field.ty(self.db).walk(&mut |ty| match ty { 278 field_tys[field_id].walk(&mut |ty| match ty {
276 &Ty::Param { idx, .. } if idx == unsize_generic_index => { 279 &Ty::Param { idx, .. } if idx == unsize_generic_index => {
277 multiple_used = true 280 multiple_used = true
278 } 281 }
diff --git a/crates/ra_hir/src/ty/infer/expr.rs b/crates/ra_hir/src/ty/infer/expr.rs
index 2996920c6..663ff9435 100644
--- a/crates/ra_hir/src/ty/infer/expr.rs
+++ b/crates/ra_hir/src/ty/infer/expr.rs
@@ -214,6 +214,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
214 self.unify(&ty, &expected.ty); 214 self.unify(&ty, &expected.ty);
215 215
216 let substs = ty.substs().unwrap_or_else(Substs::empty); 216 let substs = ty.substs().unwrap_or_else(Substs::empty);
217 let field_types =
218 def_id.map(|it| self.db.field_types(it.into())).unwrap_or_default();
217 for (field_idx, field) in fields.iter().enumerate() { 219 for (field_idx, field) in fields.iter().enumerate() {
218 let field_def = def_id.and_then(|it| match it.field(self.db, &field.name) { 220 let field_def = def_id.and_then(|it| match it.field(self.db, &field.name) {
219 Some(field) => Some(field), 221 Some(field) => Some(field),
@@ -228,8 +230,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
228 if let Some(field_def) = field_def { 230 if let Some(field_def) = field_def {
229 self.result.record_field_resolutions.insert(field.expr, field_def); 231 self.result.record_field_resolutions.insert(field.expr, field_def);
230 } 232 }
231 let field_ty = 233 let field_ty = field_def
232 field_def.map_or(Ty::Unknown, |field| field.ty(self.db)).subst(&substs); 234 .map_or(Ty::Unknown, |it| field_types[it.id].clone())
235 .subst(&substs);
233 self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty)); 236 self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
234 } 237 }
235 if let Some(expr) = spread { 238 if let Some(expr) = spread {
@@ -252,7 +255,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
252 .and_then(|idx| a_ty.parameters.0.get(idx).cloned()), 255 .and_then(|idx| a_ty.parameters.0.get(idx).cloned()),
253 TypeCtor::Adt(Adt::Struct(s)) => s.field(self.db, name).map(|field| { 256 TypeCtor::Adt(Adt::Struct(s)) => s.field(self.db, name).map(|field| {
254 self.write_field_resolution(tgt_expr, field); 257 self.write_field_resolution(tgt_expr, field);
255 field.ty(self.db).subst(&a_ty.parameters) 258 self.db.field_types(s.id.into())[field.id]
259 .clone()
260 .subst(&a_ty.parameters)
256 }), 261 }),
257 _ => None, 262 _ => None,
258 }, 263 },
diff --git a/crates/ra_hir/src/ty/infer/pat.rs b/crates/ra_hir/src/ty/infer/pat.rs
index c125ddfbc..641d61e87 100644
--- a/crates/ra_hir/src/ty/infer/pat.rs
+++ b/crates/ra_hir/src/ty/infer/pat.rs
@@ -27,10 +27,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
27 27
28 let substs = ty.substs().unwrap_or_else(Substs::empty); 28 let substs = ty.substs().unwrap_or_else(Substs::empty);
29 29
30 let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default();
30 for (i, &subpat) in subpats.iter().enumerate() { 31 for (i, &subpat) in subpats.iter().enumerate() {
31 let expected_ty = def 32 let expected_ty = def
32 .and_then(|d| d.field(self.db, &Name::new_tuple_field(i))) 33 .and_then(|d| d.field(self.db, &Name::new_tuple_field(i)))
33 .map_or(Ty::Unknown, |field| field.ty(self.db)) 34 .map_or(Ty::Unknown, |field| field_tys[field.id].clone())
34 .subst(&substs); 35 .subst(&substs);
35 let expected_ty = self.normalize_associated_types_in(expected_ty); 36 let expected_ty = self.normalize_associated_types_in(expected_ty);
36 self.infer_pat(subpat, &expected_ty, default_bm); 37 self.infer_pat(subpat, &expected_ty, default_bm);
@@ -56,10 +57,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
56 57
57 let substs = ty.substs().unwrap_or_else(Substs::empty); 58 let substs = ty.substs().unwrap_or_else(Substs::empty);
58 59
60 let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default();
59 for subpat in subpats { 61 for subpat in subpats {
60 let matching_field = def.and_then(|it| it.field(self.db, &subpat.name)); 62 let matching_field = def.and_then(|it| it.field(self.db, &subpat.name));
61 let expected_ty = 63 let expected_ty = matching_field
62 matching_field.map_or(Ty::Unknown, |field| field.ty(self.db)).subst(&substs); 64 .map_or(Ty::Unknown, |field| field_tys[field.id].clone())
65 .subst(&substs);
63 let expected_ty = self.normalize_associated_types_in(expected_ty); 66 let expected_ty = self.normalize_associated_types_in(expected_ty);
64 self.infer_pat(subpat.pat, &expected_ty, default_bm); 67 self.infer_pat(subpat.pat, &expected_ty, default_bm);
65 } 68 }