diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_hir/src/code_model.rs | 10 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/autoderef.rs | 15 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/infer/unify.rs | 55 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/lib.rs | 22 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/method_resolution.rs | 32 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/tests/traits.rs | 18 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/traits.rs | 16 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/traits/chalk/mapping.rs | 43 | ||||
-rw-r--r-- | crates/ra_ide/src/completion.rs | 8 | ||||
-rw-r--r-- | crates/ra_ide/src/completion/complete_keyword.rs | 6 | ||||
-rw-r--r-- | crates/ra_ide/src/completion/presentation.rs | 50 | ||||
-rw-r--r-- | crates/ra_ide/src/ssr.rs | 12 | ||||
-rw-r--r-- | crates/ra_ssr/Cargo.toml | 1 | ||||
-rw-r--r-- | crates/ra_ssr/src/matching.rs | 39 | ||||
-rw-r--r-- | crates/ra_ssr/src/parsing.rs | 108 | ||||
-rw-r--r-- | crates/ra_ssr/src/tests.rs | 17 |
16 files changed, 362 insertions, 90 deletions
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index cc72964ff..1b3525011 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -1189,7 +1189,7 @@ impl Type { | |||
1189 | None => return false, | 1189 | None => return false, |
1190 | }; | 1190 | }; |
1191 | 1191 | ||
1192 | let canonical_ty = Canonical { value: self.ty.value.clone(), num_vars: 0 }; | 1192 | let canonical_ty = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; |
1193 | method_resolution::implements_trait( | 1193 | method_resolution::implements_trait( |
1194 | &canonical_ty, | 1194 | &canonical_ty, |
1195 | db, | 1195 | db, |
@@ -1213,7 +1213,7 @@ impl Type { | |||
1213 | self.ty.environment.clone(), | 1213 | self.ty.environment.clone(), |
1214 | hir_ty::Obligation::Trait(trait_ref), | 1214 | hir_ty::Obligation::Trait(trait_ref), |
1215 | ), | 1215 | ), |
1216 | num_vars: 0, | 1216 | kinds: Arc::new([]), |
1217 | }; | 1217 | }; |
1218 | 1218 | ||
1219 | db.trait_solve(self.krate, goal).is_some() | 1219 | db.trait_solve(self.krate, goal).is_some() |
@@ -1288,7 +1288,7 @@ impl Type { | |||
1288 | pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a { | 1288 | pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a { |
1289 | // There should be no inference vars in types passed here | 1289 | // There should be no inference vars in types passed here |
1290 | // FIXME check that? | 1290 | // FIXME check that? |
1291 | let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; | 1291 | let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; |
1292 | let environment = self.ty.environment.clone(); | 1292 | let environment = self.ty.environment.clone(); |
1293 | let ty = InEnvironment { value: canonical, environment }; | 1293 | let ty = InEnvironment { value: canonical, environment }; |
1294 | autoderef(db, Some(self.krate), ty) | 1294 | autoderef(db, Some(self.krate), ty) |
@@ -1329,7 +1329,7 @@ impl Type { | |||
1329 | // There should be no inference vars in types passed here | 1329 | // There should be no inference vars in types passed here |
1330 | // FIXME check that? | 1330 | // FIXME check that? |
1331 | // FIXME replace Unknown by bound vars here | 1331 | // FIXME replace Unknown by bound vars here |
1332 | let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; | 1332 | let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; |
1333 | 1333 | ||
1334 | let env = self.ty.environment.clone(); | 1334 | let env = self.ty.environment.clone(); |
1335 | let krate = krate.id; | 1335 | let krate = krate.id; |
@@ -1360,7 +1360,7 @@ impl Type { | |||
1360 | // There should be no inference vars in types passed here | 1360 | // There should be no inference vars in types passed here |
1361 | // FIXME check that? | 1361 | // FIXME check that? |
1362 | // FIXME replace Unknown by bound vars here | 1362 | // FIXME replace Unknown by bound vars here |
1363 | let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; | 1363 | let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; |
1364 | 1364 | ||
1365 | let env = self.ty.environment.clone(); | 1365 | let env = self.ty.environment.clone(); |
1366 | let krate = krate.id; | 1366 | let krate = krate.id; |
diff --git a/crates/ra_hir_ty/src/autoderef.rs b/crates/ra_hir_ty/src/autoderef.rs index 1b0f84c5c..c727012c6 100644 --- a/crates/ra_hir_ty/src/autoderef.rs +++ b/crates/ra_hir_ty/src/autoderef.rs | |||
@@ -37,7 +37,7 @@ pub(crate) fn deref( | |||
37 | ty: InEnvironment<&Canonical<Ty>>, | 37 | ty: InEnvironment<&Canonical<Ty>>, |
38 | ) -> Option<Canonical<Ty>> { | 38 | ) -> Option<Canonical<Ty>> { |
39 | if let Some(derefed) = ty.value.value.builtin_deref() { | 39 | if let Some(derefed) = ty.value.value.builtin_deref() { |
40 | Some(Canonical { value: derefed, num_vars: ty.value.num_vars }) | 40 | Some(Canonical { value: derefed, kinds: ty.value.kinds.clone() }) |
41 | } else { | 41 | } else { |
42 | deref_by_trait(db, krate, ty) | 42 | deref_by_trait(db, krate, ty) |
43 | } | 43 | } |
@@ -68,8 +68,8 @@ fn deref_by_trait( | |||
68 | 68 | ||
69 | // Check that the type implements Deref at all | 69 | // Check that the type implements Deref at all |
70 | let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() }; | 70 | let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() }; |
71 | let implements_goal = super::Canonical { | 71 | let implements_goal = Canonical { |
72 | num_vars: ty.value.num_vars, | 72 | kinds: ty.value.kinds.clone(), |
73 | value: InEnvironment { | 73 | value: InEnvironment { |
74 | value: Obligation::Trait(trait_ref), | 74 | value: Obligation::Trait(trait_ref), |
75 | environment: ty.environment.clone(), | 75 | environment: ty.environment.clone(), |
@@ -81,7 +81,7 @@ fn deref_by_trait( | |||
81 | 81 | ||
82 | // Now do the assoc type projection | 82 | // Now do the assoc type projection |
83 | let projection = super::traits::ProjectionPredicate { | 83 | let projection = super::traits::ProjectionPredicate { |
84 | ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.num_vars)), | 84 | ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.kinds.len())), |
85 | projection_ty: super::ProjectionTy { associated_ty: target, parameters }, | 85 | projection_ty: super::ProjectionTy { associated_ty: target, parameters }, |
86 | }; | 86 | }; |
87 | 87 | ||
@@ -89,7 +89,8 @@ fn deref_by_trait( | |||
89 | 89 | ||
90 | let in_env = InEnvironment { value: obligation, environment: ty.environment }; | 90 | let in_env = InEnvironment { value: obligation, environment: ty.environment }; |
91 | 91 | ||
92 | let canonical = super::Canonical { num_vars: 1 + ty.value.num_vars, value: in_env }; | 92 | let canonical = |
93 | Canonical::new(in_env, ty.value.kinds.iter().copied().chain(Some(super::TyKind::General))); | ||
93 | 94 | ||
94 | let solution = db.trait_solve(krate, canonical)?; | 95 | let solution = db.trait_solve(krate, canonical)?; |
95 | 96 | ||
@@ -110,7 +111,7 @@ fn deref_by_trait( | |||
110 | // assumptions will be broken. We would need to properly introduce | 111 | // assumptions will be broken. We would need to properly introduce |
111 | // new variables in that case | 112 | // new variables in that case |
112 | 113 | ||
113 | for i in 1..vars.0.num_vars { | 114 | for i in 1..vars.0.kinds.len() { |
114 | if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) | 115 | if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) |
115 | { | 116 | { |
116 | warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution); | 117 | warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution); |
@@ -119,7 +120,7 @@ fn deref_by_trait( | |||
119 | } | 120 | } |
120 | Some(Canonical { | 121 | Some(Canonical { |
121 | value: vars.0.value[vars.0.value.len() - 1].clone(), | 122 | value: vars.0.value[vars.0.value.len() - 1].clone(), |
122 | num_vars: vars.0.num_vars, | 123 | kinds: vars.0.kinds.clone(), |
123 | }) | 124 | }) |
124 | } | 125 | } |
125 | Solution::Ambig(_) => { | 126 | Solution::Ambig(_) => { |
diff --git a/crates/ra_hir_ty/src/infer/unify.rs b/crates/ra_hir_ty/src/infer/unify.rs index 269495ca0..2e895d911 100644 --- a/crates/ra_hir_ty/src/infer/unify.rs +++ b/crates/ra_hir_ty/src/infer/unify.rs | |||
@@ -9,7 +9,7 @@ use test_utils::mark; | |||
9 | use super::{InferenceContext, Obligation}; | 9 | use super::{InferenceContext, Obligation}; |
10 | use crate::{ | 10 | use crate::{ |
11 | BoundVar, Canonical, DebruijnIndex, GenericPredicate, InEnvironment, InferTy, Substs, Ty, | 11 | BoundVar, Canonical, DebruijnIndex, GenericPredicate, InEnvironment, InferTy, Substs, Ty, |
12 | TypeCtor, TypeWalk, | 12 | TyKind, TypeCtor, TypeWalk, |
13 | }; | 13 | }; |
14 | 14 | ||
15 | impl<'a> InferenceContext<'a> { | 15 | impl<'a> InferenceContext<'a> { |
@@ -86,10 +86,20 @@ where | |||
86 | } | 86 | } |
87 | 87 | ||
88 | fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> { | 88 | fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> { |
89 | Canonicalized { | 89 | let kinds = self |
90 | value: Canonical { value: result, num_vars: self.free_vars.len() }, | 90 | .free_vars |
91 | free_vars: self.free_vars, | 91 | .iter() |
92 | } | 92 | .map(|v| match v { |
93 | // mapping MaybeNeverTypeVar to the same kind as general ones | ||
94 | // should be fine, because as opposed to int or float type vars, | ||
95 | // they don't restrict what kind of type can go into them, they | ||
96 | // just affect fallback. | ||
97 | InferTy::TypeVar(_) | InferTy::MaybeNeverTypeVar(_) => TyKind::General, | ||
98 | InferTy::IntVar(_) => TyKind::Integer, | ||
99 | InferTy::FloatVar(_) => TyKind::Float, | ||
100 | }) | ||
101 | .collect(); | ||
102 | Canonicalized { value: Canonical { value: result, kinds }, free_vars: self.free_vars } | ||
93 | } | 103 | } |
94 | 104 | ||
95 | pub(crate) fn canonicalize_ty(mut self, ty: Ty) -> Canonicalized<Ty> { | 105 | pub(crate) fn canonicalize_ty(mut self, ty: Ty) -> Canonicalized<Ty> { |
@@ -131,26 +141,41 @@ impl<T> Canonicalized<T> { | |||
131 | ty | 141 | ty |
132 | } | 142 | } |
133 | 143 | ||
134 | pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Vec<Ty>>) { | 144 | pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Substs>) { |
135 | // the solution may contain new variables, which we need to convert to new inference vars | 145 | // the solution may contain new variables, which we need to convert to new inference vars |
136 | let new_vars = Substs((0..solution.num_vars).map(|_| ctx.table.new_type_var()).collect()); | 146 | let new_vars = Substs( |
147 | solution | ||
148 | .kinds | ||
149 | .iter() | ||
150 | .map(|k| match k { | ||
151 | TyKind::General => ctx.table.new_type_var(), | ||
152 | TyKind::Integer => ctx.table.new_integer_var(), | ||
153 | TyKind::Float => ctx.table.new_float_var(), | ||
154 | }) | ||
155 | .collect(), | ||
156 | ); | ||
137 | for (i, ty) in solution.value.into_iter().enumerate() { | 157 | for (i, ty) in solution.value.into_iter().enumerate() { |
138 | let var = self.free_vars[i]; | 158 | let var = self.free_vars[i]; |
139 | // eagerly replace projections in the type; we may be getting types | 159 | // eagerly replace projections in the type; we may be getting types |
140 | // e.g. from where clauses where this hasn't happened yet | 160 | // e.g. from where clauses where this hasn't happened yet |
141 | let ty = ctx.normalize_associated_types_in(ty.subst_bound_vars(&new_vars)); | 161 | let ty = ctx.normalize_associated_types_in(ty.clone().subst_bound_vars(&new_vars)); |
142 | ctx.table.unify(&Ty::Infer(var), &ty); | 162 | ctx.table.unify(&Ty::Infer(var), &ty); |
143 | } | 163 | } |
144 | } | 164 | } |
145 | } | 165 | } |
146 | 166 | ||
147 | pub fn unify(ty1: &Canonical<Ty>, ty2: &Canonical<Ty>) -> Option<Substs> { | 167 | pub fn unify(tys: &Canonical<(Ty, Ty)>) -> Option<Substs> { |
148 | let mut table = InferenceTable::new(); | 168 | let mut table = InferenceTable::new(); |
149 | let num_vars = ty1.num_vars.max(ty2.num_vars); | 169 | let vars = Substs( |
150 | let vars = | 170 | tys.kinds |
151 | Substs::builder(num_vars).fill(std::iter::repeat_with(|| table.new_type_var())).build(); | 171 | .iter() |
152 | let ty1_with_vars = ty1.value.clone().subst_bound_vars(&vars); | 172 | // we always use type vars here because we want everything to |
153 | let ty2_with_vars = ty2.value.clone().subst_bound_vars(&vars); | 173 | // fallback to Unknown in the end (kind of hacky, as below) |
174 | .map(|_| table.new_type_var()) | ||
175 | .collect(), | ||
176 | ); | ||
177 | let ty1_with_vars = tys.value.0.clone().subst_bound_vars(&vars); | ||
178 | let ty2_with_vars = tys.value.1.clone().subst_bound_vars(&vars); | ||
154 | if !table.unify(&ty1_with_vars, &ty2_with_vars) { | 179 | if !table.unify(&ty1_with_vars, &ty2_with_vars) { |
155 | return None; | 180 | return None; |
156 | } | 181 | } |
@@ -162,7 +187,7 @@ pub fn unify(ty1: &Canonical<Ty>, ty2: &Canonical<Ty>) -> Option<Substs> { | |||
162 | } | 187 | } |
163 | } | 188 | } |
164 | Some( | 189 | Some( |
165 | Substs::builder(ty1.num_vars) | 190 | Substs::builder(tys.kinds.len()) |
166 | .fill(vars.iter().map(|v| table.resolve_ty_completely(v.clone()))) | 191 | .fill(vars.iter().map(|v| table.resolve_ty_completely(v.clone()))) |
167 | .build(), | 192 | .build(), |
168 | ) | 193 | ) |
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs index c9513b752..7f3f5e771 100644 --- a/crates/ra_hir_ty/src/lib.rs +++ b/crates/ra_hir_ty/src/lib.rs | |||
@@ -662,13 +662,27 @@ impl TypeWalk for GenericPredicate { | |||
662 | 662 | ||
663 | /// Basically a claim (currently not validated / checked) that the contained | 663 | /// Basically a claim (currently not validated / checked) that the contained |
664 | /// type / trait ref contains no inference variables; any inference variables it | 664 | /// type / trait ref contains no inference variables; any inference variables it |
665 | /// contained have been replaced by bound variables, and `num_vars` tells us how | 665 | /// contained have been replaced by bound variables, and `kinds` tells us how |
666 | /// many there are. This is used to erase irrelevant differences between types | 666 | /// many there are and whether they were normal or float/int variables. This is |
667 | /// before using them in queries. | 667 | /// used to erase irrelevant differences between types before using them in |
668 | /// queries. | ||
668 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 669 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
669 | pub struct Canonical<T> { | 670 | pub struct Canonical<T> { |
670 | pub value: T, | 671 | pub value: T, |
671 | pub num_vars: usize, | 672 | pub kinds: Arc<[TyKind]>, |
673 | } | ||
674 | |||
675 | impl<T> Canonical<T> { | ||
676 | pub fn new(value: T, kinds: impl IntoIterator<Item = TyKind>) -> Self { | ||
677 | Self { value, kinds: kinds.into_iter().collect() } | ||
678 | } | ||
679 | } | ||
680 | |||
681 | #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] | ||
682 | pub enum TyKind { | ||
683 | General, | ||
684 | Integer, | ||
685 | Float, | ||
672 | } | 686 | } |
673 | 687 | ||
674 | /// A function signature as seen by type inference: Several parameter types and | 688 | /// A function signature as seen by type inference: Several parameter types and |
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs index 5dbabd12b..a45febbf7 100644 --- a/crates/ra_hir_ty/src/method_resolution.rs +++ b/crates/ra_hir_ty/src/method_resolution.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | //! For details about how this works in rustc, see the method lookup page in the | 2 | //! For details about how this works in rustc, see the method lookup page in the |
3 | //! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html) | 3 | //! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html) |
4 | //! and the corresponding code mostly in librustc_typeck/check/method/probe.rs. | 4 | //! and the corresponding code mostly in librustc_typeck/check/method/probe.rs. |
5 | use std::sync::Arc; | 5 | use std::{iter, sync::Arc}; |
6 | 6 | ||
7 | use arrayvec::ArrayVec; | 7 | use arrayvec::ArrayVec; |
8 | use hir_def::{ | 8 | use hir_def::{ |
@@ -17,7 +17,8 @@ use rustc_hash::{FxHashMap, FxHashSet}; | |||
17 | use super::Substs; | 17 | use super::Substs; |
18 | use crate::{ | 18 | use crate::{ |
19 | autoderef, db::HirDatabase, primitive::FloatBitness, utils::all_super_traits, ApplicationTy, | 19 | autoderef, db::HirDatabase, primitive::FloatBitness, utils::all_super_traits, ApplicationTy, |
20 | Canonical, DebruijnIndex, InEnvironment, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, | 20 | Canonical, DebruijnIndex, InEnvironment, TraitEnvironment, TraitRef, Ty, TyKind, TypeCtor, |
21 | TypeWalk, | ||
21 | }; | 22 | }; |
22 | 23 | ||
23 | /// This is used as a key for indexing impls. | 24 | /// This is used as a key for indexing impls. |
@@ -372,7 +373,7 @@ fn iterate_method_candidates_with_autoref( | |||
372 | return true; | 373 | return true; |
373 | } | 374 | } |
374 | let refed = Canonical { | 375 | let refed = Canonical { |
375 | num_vars: deref_chain[0].num_vars, | 376 | kinds: deref_chain[0].kinds.clone(), |
376 | value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()), | 377 | value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()), |
377 | }; | 378 | }; |
378 | if iterate_method_candidates_by_receiver( | 379 | if iterate_method_candidates_by_receiver( |
@@ -388,7 +389,7 @@ fn iterate_method_candidates_with_autoref( | |||
388 | return true; | 389 | return true; |
389 | } | 390 | } |
390 | let ref_muted = Canonical { | 391 | let ref_muted = Canonical { |
391 | num_vars: deref_chain[0].num_vars, | 392 | kinds: deref_chain[0].kinds.clone(), |
392 | value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()), | 393 | value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()), |
393 | }; | 394 | }; |
394 | if iterate_method_candidates_by_receiver( | 395 | if iterate_method_candidates_by_receiver( |
@@ -607,18 +608,19 @@ pub(crate) fn inherent_impl_substs( | |||
607 | // we create a var for each type parameter of the impl; we need to keep in | 608 | // we create a var for each type parameter of the impl; we need to keep in |
608 | // mind here that `self_ty` might have vars of its own | 609 | // mind here that `self_ty` might have vars of its own |
609 | let vars = Substs::build_for_def(db, impl_id) | 610 | let vars = Substs::build_for_def(db, impl_id) |
610 | .fill_with_bound_vars(DebruijnIndex::INNERMOST, self_ty.num_vars) | 611 | .fill_with_bound_vars(DebruijnIndex::INNERMOST, self_ty.kinds.len()) |
611 | .build(); | 612 | .build(); |
612 | let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars); | 613 | let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars); |
613 | let self_ty_with_vars = | 614 | let mut kinds = self_ty.kinds.to_vec(); |
614 | Canonical { num_vars: vars.len() + self_ty.num_vars, value: self_ty_with_vars }; | 615 | kinds.extend(iter::repeat(TyKind::General).take(vars.len())); |
615 | let substs = super::infer::unify(&self_ty_with_vars, self_ty); | 616 | let tys = Canonical { kinds: kinds.into(), value: (self_ty_with_vars, self_ty.value.clone()) }; |
617 | let substs = super::infer::unify(&tys); | ||
616 | // We only want the substs for the vars we added, not the ones from self_ty. | 618 | // We only want the substs for the vars we added, not the ones from self_ty. |
617 | // Also, if any of the vars we added are still in there, we replace them by | 619 | // Also, if any of the vars we added are still in there, we replace them by |
618 | // Unknown. I think this can only really happen if self_ty contained | 620 | // Unknown. I think this can only really happen if self_ty contained |
619 | // Unknown, and in that case we want the result to contain Unknown in those | 621 | // Unknown, and in that case we want the result to contain Unknown in those |
620 | // places again. | 622 | // places again. |
621 | substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.num_vars)) | 623 | substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.kinds.len())) |
622 | } | 624 | } |
623 | 625 | ||
624 | /// This replaces any 'free' Bound vars in `s` (i.e. those with indices past | 626 | /// This replaces any 'free' Bound vars in `s` (i.e. those with indices past |
@@ -678,15 +680,15 @@ fn generic_implements_goal( | |||
678 | trait_: TraitId, | 680 | trait_: TraitId, |
679 | self_ty: Canonical<Ty>, | 681 | self_ty: Canonical<Ty>, |
680 | ) -> Canonical<InEnvironment<super::Obligation>> { | 682 | ) -> Canonical<InEnvironment<super::Obligation>> { |
681 | let num_vars = self_ty.num_vars; | 683 | let mut kinds = self_ty.kinds.to_vec(); |
682 | let substs = super::Substs::build_for_def(db, trait_) | 684 | let substs = super::Substs::build_for_def(db, trait_) |
683 | .push(self_ty.value) | 685 | .push(self_ty.value) |
684 | .fill_with_bound_vars(DebruijnIndex::INNERMOST, num_vars) | 686 | .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len()) |
685 | .build(); | 687 | .build(); |
686 | let num_vars = substs.len() - 1 + self_ty.num_vars; | 688 | kinds.extend(iter::repeat(TyKind::General).take(substs.len() - 1)); |
687 | let trait_ref = TraitRef { trait_, substs }; | 689 | let trait_ref = TraitRef { trait_, substs }; |
688 | let obligation = super::Obligation::Trait(trait_ref); | 690 | let obligation = super::Obligation::Trait(trait_ref); |
689 | Canonical { num_vars, value: InEnvironment::new(env, obligation) } | 691 | Canonical { kinds: kinds.into(), value: InEnvironment::new(env, obligation) } |
690 | } | 692 | } |
691 | 693 | ||
692 | fn autoderef_method_receiver( | 694 | fn autoderef_method_receiver( |
@@ -699,9 +701,9 @@ fn autoderef_method_receiver( | |||
699 | if let Some(Ty::Apply(ApplicationTy { ctor: TypeCtor::Array, parameters })) = | 701 | if let Some(Ty::Apply(ApplicationTy { ctor: TypeCtor::Array, parameters })) = |
700 | deref_chain.last().map(|ty| &ty.value) | 702 | deref_chain.last().map(|ty| &ty.value) |
701 | { | 703 | { |
702 | let num_vars = deref_chain.last().unwrap().num_vars; | 704 | let kinds = deref_chain.last().unwrap().kinds.clone(); |
703 | let unsized_ty = Ty::apply(TypeCtor::Slice, parameters.clone()); | 705 | let unsized_ty = Ty::apply(TypeCtor::Slice, parameters.clone()); |
704 | deref_chain.push(Canonical { value: unsized_ty, num_vars }) | 706 | deref_chain.push(Canonical { value: unsized_ty, kinds }) |
705 | } | 707 | } |
706 | deref_chain | 708 | deref_chain |
707 | } | 709 | } |
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs index 01c919a7e..766790576 100644 --- a/crates/ra_hir_ty/src/tests/traits.rs +++ b/crates/ra_hir_ty/src/tests/traits.rs | |||
@@ -3029,3 +3029,21 @@ fn infer_dyn_fn_output() { | |||
3029 | "### | 3029 | "### |
3030 | ); | 3030 | ); |
3031 | } | 3031 | } |
3032 | |||
3033 | #[test] | ||
3034 | fn variable_kinds() { | ||
3035 | check_types( | ||
3036 | r#" | ||
3037 | trait Trait<T> { fn get(self, t: T) -> T; } | ||
3038 | struct S; | ||
3039 | impl Trait<u128> for S {} | ||
3040 | impl Trait<f32> for S {} | ||
3041 | fn test() { | ||
3042 | S.get(1); | ||
3043 | //^^^^^^^^ u128 | ||
3044 | S.get(1.); | ||
3045 | //^^^^^^^^ f32 | ||
3046 | } | ||
3047 | "#, | ||
3048 | ); | ||
3049 | } | ||
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs index 6f43c3a22..2a6d7faef 100644 --- a/crates/ra_hir_ty/src/traits.rs +++ b/crates/ra_hir_ty/src/traits.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | //! Trait solving using Chalk. | 1 | //! Trait solving using Chalk. |
2 | use std::{panic, sync::Arc}; | 2 | use std::sync::Arc; |
3 | 3 | ||
4 | use chalk_ir::cast::Cast; | 4 | use chalk_ir::cast::Cast; |
5 | use hir_def::{ | 5 | use hir_def::{ |
@@ -8,7 +8,7 @@ use hir_def::{ | |||
8 | use ra_db::{impl_intern_key, salsa, CrateId}; | 8 | use ra_db::{impl_intern_key, salsa, CrateId}; |
9 | use ra_prof::profile; | 9 | use ra_prof::profile; |
10 | 10 | ||
11 | use crate::{db::HirDatabase, DebruijnIndex}; | 11 | use crate::{db::HirDatabase, DebruijnIndex, Substs}; |
12 | 12 | ||
13 | use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk}; | 13 | use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk}; |
14 | 14 | ||
@@ -190,15 +190,7 @@ fn solution_from_chalk( | |||
190 | solution: chalk_solve::Solution<Interner>, | 190 | solution: chalk_solve::Solution<Interner>, |
191 | ) -> Solution { | 191 | ) -> Solution { |
192 | let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| { | 192 | let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| { |
193 | let value = subst | 193 | let result = from_chalk(db, subst); |
194 | .value | ||
195 | .iter(&Interner) | ||
196 | .map(|p| match p.ty(&Interner) { | ||
197 | Some(ty) => from_chalk(db, ty.clone()), | ||
198 | None => unimplemented!(), | ||
199 | }) | ||
200 | .collect(); | ||
201 | let result = Canonical { value, num_vars: subst.binders.len(&Interner) }; | ||
202 | SolutionVariables(result) | 194 | SolutionVariables(result) |
203 | }; | 195 | }; |
204 | match solution { | 196 | match solution { |
@@ -222,7 +214,7 @@ fn solution_from_chalk( | |||
222 | } | 214 | } |
223 | 215 | ||
224 | #[derive(Clone, Debug, PartialEq, Eq)] | 216 | #[derive(Clone, Debug, PartialEq, Eq)] |
225 | pub struct SolutionVariables(pub Canonical<Vec<Ty>>); | 217 | pub struct SolutionVariables(pub Canonical<Substs>); |
226 | 218 | ||
227 | #[derive(Clone, Debug, PartialEq, Eq)] | 219 | #[derive(Clone, Debug, PartialEq, Eq)] |
228 | /// A (possible) solution for a proposed goal. | 220 | /// A (possible) solution for a proposed goal. |
diff --git a/crates/ra_hir_ty/src/traits/chalk/mapping.rs b/crates/ra_hir_ty/src/traits/chalk/mapping.rs index ac82ea831..433d6aa03 100644 --- a/crates/ra_hir_ty/src/traits/chalk/mapping.rs +++ b/crates/ra_hir_ty/src/traits/chalk/mapping.rs | |||
@@ -17,7 +17,7 @@ use crate::{ | |||
17 | primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness}, | 17 | primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness}, |
18 | traits::{builtin, AssocTyValue, Canonical, Impl, Obligation}, | 18 | traits::{builtin, AssocTyValue, Canonical, Impl, Obligation}, |
19 | ApplicationTy, CallableDef, GenericPredicate, InEnvironment, OpaqueTy, OpaqueTyId, | 19 | ApplicationTy, CallableDef, GenericPredicate, InEnvironment, OpaqueTy, OpaqueTyId, |
20 | ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, | 20 | ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TyKind, TypeCtor, |
21 | }; | 21 | }; |
22 | 22 | ||
23 | use super::interner::*; | 23 | use super::interner::*; |
@@ -555,22 +555,39 @@ where | |||
555 | type Chalk = chalk_ir::Canonical<T::Chalk>; | 555 | type Chalk = chalk_ir::Canonical<T::Chalk>; |
556 | 556 | ||
557 | fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> { | 557 | fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> { |
558 | let parameter = chalk_ir::CanonicalVarKind::new( | 558 | let kinds = self |
559 | chalk_ir::VariableKind::Ty(chalk_ir::TyKind::General), | 559 | .kinds |
560 | chalk_ir::UniverseIndex::ROOT, | 560 | .iter() |
561 | ); | 561 | .map(|k| match k { |
562 | TyKind::General => chalk_ir::TyKind::General, | ||
563 | TyKind::Integer => chalk_ir::TyKind::Integer, | ||
564 | TyKind::Float => chalk_ir::TyKind::Float, | ||
565 | }) | ||
566 | .map(|tk| { | ||
567 | chalk_ir::CanonicalVarKind::new( | ||
568 | chalk_ir::VariableKind::Ty(tk), | ||
569 | chalk_ir::UniverseIndex::ROOT, | ||
570 | ) | ||
571 | }); | ||
562 | let value = self.value.to_chalk(db); | 572 | let value = self.value.to_chalk(db); |
563 | chalk_ir::Canonical { | 573 | chalk_ir::Canonical { value, binders: chalk_ir::CanonicalVarKinds::from(&Interner, kinds) } |
564 | value, | ||
565 | binders: chalk_ir::CanonicalVarKinds::from(&Interner, vec![parameter; self.num_vars]), | ||
566 | } | ||
567 | } | 574 | } |
568 | 575 | ||
569 | fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> { | 576 | fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> { |
570 | Canonical { | 577 | let kinds = canonical |
571 | num_vars: canonical.binders.len(&Interner), | 578 | .binders |
572 | value: from_chalk(db, canonical.value), | 579 | .iter(&Interner) |
573 | } | 580 | .map(|k| match k.kind { |
581 | chalk_ir::VariableKind::Ty(tk) => match tk { | ||
582 | chalk_ir::TyKind::General => TyKind::General, | ||
583 | chalk_ir::TyKind::Integer => TyKind::Integer, | ||
584 | chalk_ir::TyKind::Float => TyKind::Float, | ||
585 | }, | ||
586 | chalk_ir::VariableKind::Lifetime => panic!("unexpected lifetime from Chalk"), | ||
587 | chalk_ir::VariableKind::Const(_) => panic!("unexpected const from Chalk"), | ||
588 | }) | ||
589 | .collect(); | ||
590 | Canonical { kinds, value: from_chalk(db, canonical.value) } | ||
574 | } | 591 | } |
575 | } | 592 | } |
576 | 593 | ||
diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs index 69ea754b3..9ebb8ebb7 100644 --- a/crates/ra_ide/src/completion.rs +++ b/crates/ra_ide/src/completion.rs | |||
@@ -63,11 +63,11 @@ pub use crate::completion::{ | |||
63 | // There also snippet completions: | 63 | // There also snippet completions: |
64 | // | 64 | // |
65 | // .Expressions | 65 | // .Expressions |
66 | // - `pd` -> `println!("{:?}")` | 66 | // - `pd` -> `eprintln!(" = {:?}", );")` |
67 | // - `ppd` -> `println!("{:#?}")` | 67 | // - `ppd` -> `eprintln!(" = {:#?}", );` |
68 | // | 68 | // |
69 | // .Items | 69 | // .Items |
70 | // - `tfn` -> `#[test] fn f(){}` | 70 | // - `tfn` -> `#[test] fn feature(){}` |
71 | // - `tmod` -> | 71 | // - `tmod` -> |
72 | // ```rust | 72 | // ```rust |
73 | // #[cfg(test)] | 73 | // #[cfg(test)] |
@@ -75,7 +75,7 @@ pub use crate::completion::{ | |||
75 | // use super::*; | 75 | // use super::*; |
76 | // | 76 | // |
77 | // #[test] | 77 | // #[test] |
78 | // fn test_fn() {} | 78 | // fn test_name() {} |
79 | // } | 79 | // } |
80 | // ``` | 80 | // ``` |
81 | 81 | ||
diff --git a/crates/ra_ide/src/completion/complete_keyword.rs b/crates/ra_ide/src/completion/complete_keyword.rs index 3b174f916..e599cc3d1 100644 --- a/crates/ra_ide/src/completion/complete_keyword.rs +++ b/crates/ra_ide/src/completion/complete_keyword.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use ra_syntax::ast; | 3 | use ra_syntax::{ast, SyntaxKind}; |
4 | 4 | ||
5 | use crate::completion::{ | 5 | use crate::completion::{ |
6 | CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, | 6 | CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, |
@@ -37,6 +37,10 @@ pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionC | |||
37 | } | 37 | } |
38 | 38 | ||
39 | pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { | 39 | pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { |
40 | if ctx.token.kind() == SyntaxKind::COMMENT { | ||
41 | return; | ||
42 | } | ||
43 | |||
40 | let has_trait_or_impl_parent = ctx.has_impl_parent || ctx.has_trait_parent; | 44 | let has_trait_or_impl_parent = ctx.has_impl_parent || ctx.has_trait_parent; |
41 | if ctx.trait_as_prev_sibling || ctx.impl_as_prev_sibling { | 45 | if ctx.trait_as_prev_sibling || ctx.impl_as_prev_sibling { |
42 | add_keyword(ctx, acc, "where", "where "); | 46 | add_keyword(ctx, acc, "where", "where "); |
diff --git a/crates/ra_ide/src/completion/presentation.rs b/crates/ra_ide/src/completion/presentation.rs index 4fdc2f0bb..b18279746 100644 --- a/crates/ra_ide/src/completion/presentation.rs +++ b/crates/ra_ide/src/completion/presentation.rs | |||
@@ -1516,4 +1516,54 @@ mod tests { | |||
1516 | "### | 1516 | "### |
1517 | ); | 1517 | ); |
1518 | } | 1518 | } |
1519 | |||
1520 | #[test] | ||
1521 | fn no_keyword_autocompletion_on_line_comments() { | ||
1522 | assert_debug_snapshot!( | ||
1523 | do_completion( | ||
1524 | r" | ||
1525 | fn test() { | ||
1526 | let x = 2; // A comment<|> | ||
1527 | } | ||
1528 | ", | ||
1529 | CompletionKind::Keyword | ||
1530 | ), | ||
1531 | @r###" | ||
1532 | [] | ||
1533 | "### | ||
1534 | ); | ||
1535 | } | ||
1536 | |||
1537 | #[test] | ||
1538 | fn no_keyword_autocompletion_on_multi_line_comments() { | ||
1539 | assert_debug_snapshot!( | ||
1540 | do_completion( | ||
1541 | r" | ||
1542 | /* | ||
1543 | Some multi-line comment<|> | ||
1544 | */ | ||
1545 | ", | ||
1546 | CompletionKind::Keyword | ||
1547 | ), | ||
1548 | @r###" | ||
1549 | [] | ||
1550 | "### | ||
1551 | ); | ||
1552 | } | ||
1553 | |||
1554 | #[test] | ||
1555 | fn no_keyword_autocompletion_on_doc_comments() { | ||
1556 | assert_debug_snapshot!( | ||
1557 | do_completion( | ||
1558 | r" | ||
1559 | /// Some doc comment | ||
1560 | /// let test<|> = 1 | ||
1561 | ", | ||
1562 | CompletionKind::Keyword | ||
1563 | ), | ||
1564 | @r###" | ||
1565 | [] | ||
1566 | "### | ||
1567 | ); | ||
1568 | } | ||
1519 | } | 1569 | } |
diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs index 9f8e540c0..b3e9e5dfe 100644 --- a/crates/ra_ide/src/ssr.rs +++ b/crates/ra_ide/src/ssr.rs | |||
@@ -10,6 +10,18 @@ use ra_ssr::{MatchFinder, SsrError, SsrRule}; | |||
10 | // The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`. | 10 | // The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`. |
11 | // A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. | 11 | // A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. |
12 | // Within a macro call, a placeholder will match up until whatever token follows the placeholder. | 12 | // Within a macro call, a placeholder will match up until whatever token follows the placeholder. |
13 | // | ||
14 | // Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`. | ||
15 | // | ||
16 | // Supported constraints: | ||
17 | // | ||
18 | // |=== | ||
19 | // | Constraint | Restricts placeholder | ||
20 | // | ||
21 | // | kind(literal) | Is a literal (e.g. `42` or `"forty two"`) | ||
22 | // | not(a) | Negates the constraint `a` | ||
23 | // |=== | ||
24 | // | ||
13 | // Available via the command `rust-analyzer.ssr`. | 25 | // Available via the command `rust-analyzer.ssr`. |
14 | // | 26 | // |
15 | // ```rust | 27 | // ```rust |
diff --git a/crates/ra_ssr/Cargo.toml b/crates/ra_ssr/Cargo.toml index 3c2f15a83..fe098aaee 100644 --- a/crates/ra_ssr/Cargo.toml +++ b/crates/ra_ssr/Cargo.toml | |||
@@ -17,3 +17,4 @@ ra_db = { path = "../ra_db" } | |||
17 | ra_ide_db = { path = "../ra_ide_db" } | 17 | ra_ide_db = { path = "../ra_ide_db" } |
18 | hir = { path = "../ra_hir", package = "ra_hir" } | 18 | hir = { path = "../ra_hir", package = "ra_hir" } |
19 | rustc-hash = "1.1.0" | 19 | rustc-hash = "1.1.0" |
20 | test_utils = { path = "../test_utils" } | ||
diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs index 53d802e77..ce53d46d2 100644 --- a/crates/ra_ssr/src/matching.rs +++ b/crates/ra_ssr/src/matching.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | //! process of matching, placeholder values are recorded. | 2 | //! process of matching, placeholder values are recorded. |
3 | 3 | ||
4 | use crate::{ | 4 | use crate::{ |
5 | parsing::{Placeholder, SsrTemplate}, | 5 | parsing::{Constraint, NodeKind, Placeholder, SsrTemplate}, |
6 | SsrMatches, SsrPattern, SsrRule, | 6 | SsrMatches, SsrPattern, SsrRule, |
7 | }; | 7 | }; |
8 | use hir::Semantics; | 8 | use hir::Semantics; |
@@ -11,6 +11,7 @@ use ra_syntax::ast::{AstNode, AstToken}; | |||
11 | use ra_syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; | 11 | use ra_syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; |
12 | use rustc_hash::FxHashMap; | 12 | use rustc_hash::FxHashMap; |
13 | use std::{cell::Cell, iter::Peekable}; | 13 | use std::{cell::Cell, iter::Peekable}; |
14 | use test_utils::mark; | ||
14 | 15 | ||
15 | // Creates a match error. If we're currently attempting to match some code that we thought we were | 16 | // Creates a match error. If we're currently attempting to match some code that we thought we were |
16 | // going to match, as indicated by the --debug-snippet flag, then populate the reason field. | 17 | // going to match, as indicated by the --debug-snippet flag, then populate the reason field. |
@@ -169,6 +170,9 @@ impl<'db, 'sema> MatchState<'db, 'sema> { | |||
169 | if let Some(placeholder) = | 170 | if let Some(placeholder) = |
170 | match_inputs.get_placeholder(&SyntaxElement::Node(pattern.clone())) | 171 | match_inputs.get_placeholder(&SyntaxElement::Node(pattern.clone())) |
171 | { | 172 | { |
173 | for constraint in &placeholder.constraints { | ||
174 | self.check_constraint(constraint, code)?; | ||
175 | } | ||
172 | if self.match_out.is_none() { | 176 | if self.match_out.is_none() { |
173 | return Ok(()); | 177 | return Ok(()); |
174 | } | 178 | } |
@@ -292,6 +296,24 @@ impl<'db, 'sema> MatchState<'db, 'sema> { | |||
292 | Ok(()) | 296 | Ok(()) |
293 | } | 297 | } |
294 | 298 | ||
299 | fn check_constraint( | ||
300 | &self, | ||
301 | constraint: &Constraint, | ||
302 | code: &SyntaxNode, | ||
303 | ) -> Result<(), MatchFailed> { | ||
304 | match constraint { | ||
305 | Constraint::Kind(kind) => { | ||
306 | kind.matches(code)?; | ||
307 | } | ||
308 | Constraint::Not(sub) => { | ||
309 | if self.check_constraint(&*sub, code).is_ok() { | ||
310 | fail_match!("Constraint {:?} failed for '{}'", constraint, code.text()); | ||
311 | } | ||
312 | } | ||
313 | } | ||
314 | Ok(()) | ||
315 | } | ||
316 | |||
295 | /// We want to allow the records to match in any order, so we have special matching logic for | 317 | /// We want to allow the records to match in any order, so we have special matching logic for |
296 | /// them. | 318 | /// them. |
297 | fn attempt_match_record_field_list( | 319 | fn attempt_match_record_field_list( |
@@ -515,6 +537,21 @@ impl SsrPattern { | |||
515 | } | 537 | } |
516 | } | 538 | } |
517 | 539 | ||
540 | impl NodeKind { | ||
541 | fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> { | ||
542 | let ok = match self { | ||
543 | Self::Literal => { | ||
544 | mark::hit!(literal_constraint); | ||
545 | ast::Literal::can_cast(node.kind()) | ||
546 | } | ||
547 | }; | ||
548 | if !ok { | ||
549 | fail_match!("Code '{}' isn't of kind {:?}", node.text(), self); | ||
550 | } | ||
551 | Ok(()) | ||
552 | } | ||
553 | } | ||
554 | |||
518 | // If `node` contains nothing but an ident then return it, otherwise return None. | 555 | // If `node` contains nothing but an ident then return it, otherwise return None. |
519 | fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> { | 556 | fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> { |
520 | match element { | 557 | match element { |
diff --git a/crates/ra_ssr/src/parsing.rs b/crates/ra_ssr/src/parsing.rs index 04d46bd32..5ea125616 100644 --- a/crates/ra_ssr/src/parsing.rs +++ b/crates/ra_ssr/src/parsing.rs | |||
@@ -6,7 +6,7 @@ | |||
6 | //! e.g. expressions, type references etc. | 6 | //! e.g. expressions, type references etc. |
7 | 7 | ||
8 | use crate::{SsrError, SsrPattern, SsrRule}; | 8 | use crate::{SsrError, SsrPattern, SsrRule}; |
9 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind}; | 9 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, T}; |
10 | use rustc_hash::{FxHashMap, FxHashSet}; | 10 | use rustc_hash::{FxHashMap, FxHashSet}; |
11 | use std::str::FromStr; | 11 | use std::str::FromStr; |
12 | 12 | ||
@@ -39,6 +39,18 @@ pub(crate) struct Placeholder { | |||
39 | pub(crate) ident: SmolStr, | 39 | pub(crate) ident: SmolStr, |
40 | /// A unique name used in place of this placeholder when we parse the pattern as Rust code. | 40 | /// A unique name used in place of this placeholder when we parse the pattern as Rust code. |
41 | stand_in_name: String, | 41 | stand_in_name: String, |
42 | pub(crate) constraints: Vec<Constraint>, | ||
43 | } | ||
44 | |||
45 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
46 | pub(crate) enum Constraint { | ||
47 | Kind(NodeKind), | ||
48 | Not(Box<Constraint>), | ||
49 | } | ||
50 | |||
51 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
52 | pub(crate) enum NodeKind { | ||
53 | Literal, | ||
42 | } | 54 | } |
43 | 55 | ||
44 | #[derive(Debug, Clone, PartialEq, Eq)] | 56 | #[derive(Debug, Clone, PartialEq, Eq)] |
@@ -149,7 +161,7 @@ fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> { | |||
149 | let mut placeholder_names = FxHashSet::default(); | 161 | let mut placeholder_names = FxHashSet::default(); |
150 | let mut tokens = tokenize(pattern_str)?.into_iter(); | 162 | let mut tokens = tokenize(pattern_str)?.into_iter(); |
151 | while let Some(token) = tokens.next() { | 163 | while let Some(token) = tokens.next() { |
152 | if token.kind == SyntaxKind::DOLLAR { | 164 | if token.kind == T![$] { |
153 | let placeholder = parse_placeholder(&mut tokens)?; | 165 | let placeholder = parse_placeholder(&mut tokens)?; |
154 | if !placeholder_names.insert(placeholder.ident.clone()) { | 166 | if !placeholder_names.insert(placeholder.ident.clone()) { |
155 | bail!("Name `{}` repeats more than once", placeholder.ident); | 167 | bail!("Name `{}` repeats more than once", placeholder.ident); |
@@ -177,6 +189,9 @@ fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { | |||
177 | if !defined_placeholders.contains(&placeholder.ident) { | 189 | if !defined_placeholders.contains(&placeholder.ident) { |
178 | undefined.push(format!("${}", placeholder.ident)); | 190 | undefined.push(format!("${}", placeholder.ident)); |
179 | } | 191 | } |
192 | if !placeholder.constraints.is_empty() { | ||
193 | bail!("Replacement placeholders cannot have constraints"); | ||
194 | } | ||
180 | } | 195 | } |
181 | } | 196 | } |
182 | if !undefined.is_empty() { | 197 | if !undefined.is_empty() { |
@@ -205,23 +220,90 @@ fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> { | |||
205 | 220 | ||
206 | fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> { | 221 | fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> { |
207 | let mut name = None; | 222 | let mut name = None; |
223 | let mut constraints = Vec::new(); | ||
208 | if let Some(token) = tokens.next() { | 224 | if let Some(token) = tokens.next() { |
209 | match token.kind { | 225 | match token.kind { |
210 | SyntaxKind::IDENT => { | 226 | SyntaxKind::IDENT => { |
211 | name = Some(token.text); | 227 | name = Some(token.text); |
212 | } | 228 | } |
229 | T!['{'] => { | ||
230 | let token = | ||
231 | tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?; | ||
232 | if token.kind == SyntaxKind::IDENT { | ||
233 | name = Some(token.text); | ||
234 | } | ||
235 | loop { | ||
236 | let token = tokens | ||
237 | .next() | ||
238 | .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?; | ||
239 | match token.kind { | ||
240 | T![:] => { | ||
241 | constraints.push(parse_constraint(tokens)?); | ||
242 | } | ||
243 | T!['}'] => break, | ||
244 | _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text), | ||
245 | } | ||
246 | } | ||
247 | } | ||
213 | _ => { | 248 | _ => { |
214 | bail!("Placeholders should be $name"); | 249 | bail!("Placeholders should either be $name or ${name:constraints}"); |
215 | } | 250 | } |
216 | } | 251 | } |
217 | } | 252 | } |
218 | let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; | 253 | let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; |
219 | Ok(Placeholder::new(name)) | 254 | Ok(Placeholder::new(name, constraints)) |
255 | } | ||
256 | |||
257 | fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> { | ||
258 | let constraint_type = tokens | ||
259 | .next() | ||
260 | .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))? | ||
261 | .text | ||
262 | .to_string(); | ||
263 | match constraint_type.as_str() { | ||
264 | "kind" => { | ||
265 | expect_token(tokens, "(")?; | ||
266 | let t = tokens.next().ok_or_else(|| { | ||
267 | SsrError::new("Unexpected end of constraint while looking for kind") | ||
268 | })?; | ||
269 | if t.kind != SyntaxKind::IDENT { | ||
270 | bail!("Expected ident, found {:?} while parsing kind constraint", t.kind); | ||
271 | } | ||
272 | expect_token(tokens, ")")?; | ||
273 | Ok(Constraint::Kind(NodeKind::from(&t.text)?)) | ||
274 | } | ||
275 | "not" => { | ||
276 | expect_token(tokens, "(")?; | ||
277 | let sub = parse_constraint(tokens)?; | ||
278 | expect_token(tokens, ")")?; | ||
279 | Ok(Constraint::Not(Box::new(sub))) | ||
280 | } | ||
281 | x => bail!("Unsupported constraint type '{}'", x), | ||
282 | } | ||
283 | } | ||
284 | |||
285 | fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> { | ||
286 | if let Some(t) = tokens.next() { | ||
287 | if t.text == expected { | ||
288 | return Ok(()); | ||
289 | } | ||
290 | bail!("Expected {} found {}", expected, t.text); | ||
291 | } | ||
292 | bail!("Expected {} found end of stream"); | ||
293 | } | ||
294 | |||
295 | impl NodeKind { | ||
296 | fn from(name: &SmolStr) -> Result<NodeKind, SsrError> { | ||
297 | Ok(match name.as_str() { | ||
298 | "literal" => NodeKind::Literal, | ||
299 | _ => bail!("Unknown node kind '{}'", name), | ||
300 | }) | ||
301 | } | ||
220 | } | 302 | } |
221 | 303 | ||
222 | impl Placeholder { | 304 | impl Placeholder { |
223 | fn new(name: SmolStr) -> Self { | 305 | fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self { |
224 | Self { stand_in_name: format!("__placeholder_{}", name), ident: name } | 306 | Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name } |
225 | } | 307 | } |
226 | } | 308 | } |
227 | 309 | ||
@@ -241,31 +323,31 @@ mod tests { | |||
241 | PatternElement::Token(Token { kind, text: SmolStr::new(text) }) | 323 | PatternElement::Token(Token { kind, text: SmolStr::new(text) }) |
242 | } | 324 | } |
243 | fn placeholder(name: &str) -> PatternElement { | 325 | fn placeholder(name: &str) -> PatternElement { |
244 | PatternElement::Placeholder(Placeholder::new(SmolStr::new(name))) | 326 | PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new())) |
245 | } | 327 | } |
246 | let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); | 328 | let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); |
247 | assert_eq!( | 329 | assert_eq!( |
248 | result.pattern.raw.tokens, | 330 | result.pattern.raw.tokens, |
249 | vec![ | 331 | vec![ |
250 | token(SyntaxKind::IDENT, "foo"), | 332 | token(SyntaxKind::IDENT, "foo"), |
251 | token(SyntaxKind::L_PAREN, "("), | 333 | token(T!['('], "("), |
252 | placeholder("a"), | 334 | placeholder("a"), |
253 | token(SyntaxKind::COMMA, ","), | 335 | token(T![,], ","), |
254 | token(SyntaxKind::WHITESPACE, " "), | 336 | token(SyntaxKind::WHITESPACE, " "), |
255 | placeholder("b"), | 337 | placeholder("b"), |
256 | token(SyntaxKind::R_PAREN, ")"), | 338 | token(T![')'], ")"), |
257 | ] | 339 | ] |
258 | ); | 340 | ); |
259 | assert_eq!( | 341 | assert_eq!( |
260 | result.template.tokens, | 342 | result.template.tokens, |
261 | vec![ | 343 | vec![ |
262 | token(SyntaxKind::IDENT, "bar"), | 344 | token(SyntaxKind::IDENT, "bar"), |
263 | token(SyntaxKind::L_PAREN, "("), | 345 | token(T!['('], "("), |
264 | placeholder("b"), | 346 | placeholder("b"), |
265 | token(SyntaxKind::COMMA, ","), | 347 | token(T![,], ","), |
266 | token(SyntaxKind::WHITESPACE, " "), | 348 | token(SyntaxKind::WHITESPACE, " "), |
267 | placeholder("a"), | 349 | placeholder("a"), |
268 | token(SyntaxKind::R_PAREN, ")"), | 350 | token(T![')'], ")"), |
269 | ] | 351 | ] |
270 | ); | 352 | ); |
271 | } | 353 | } |
diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs index c692c97e2..9568d4432 100644 --- a/crates/ra_ssr/src/tests.rs +++ b/crates/ra_ssr/src/tests.rs | |||
@@ -1,5 +1,6 @@ | |||
1 | use crate::{MatchFinder, SsrRule}; | 1 | use crate::{MatchFinder, SsrRule}; |
2 | use ra_db::{FileId, SourceDatabaseExt}; | 2 | use ra_db::{FileId, SourceDatabaseExt}; |
3 | use test_utils::mark; | ||
3 | 4 | ||
4 | fn parse_error_text(query: &str) -> String { | 5 | fn parse_error_text(query: &str) -> String { |
5 | format!("{}", query.parse::<SsrRule>().unwrap_err()) | 6 | format!("{}", query.parse::<SsrRule>().unwrap_err()) |
@@ -302,6 +303,22 @@ fn match_pattern() { | |||
302 | } | 303 | } |
303 | 304 | ||
304 | #[test] | 305 | #[test] |
306 | fn literal_constraint() { | ||
307 | mark::check!(literal_constraint); | ||
308 | let code = r#" | ||
309 | fn f1() { | ||
310 | let x1 = Some(42); | ||
311 | let x2 = Some("foo"); | ||
312 | let x3 = Some(x1); | ||
313 | let x4 = Some(40 + 2); | ||
314 | let x5 = Some(true); | ||
315 | } | ||
316 | "#; | ||
317 | assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]); | ||
318 | assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]); | ||
319 | } | ||
320 | |||
321 | #[test] | ||
305 | fn match_reordered_struct_instantiation() { | 322 | fn match_reordered_struct_instantiation() { |
306 | assert_matches( | 323 | assert_matches( |
307 | "Foo {aa: 1, b: 2, ccc: 3}", | 324 | "Foo {aa: 1, b: 2, ccc: 3}", |