diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_hir/src/code_model.rs | 31 | ||||
-rw-r--r-- | crates/ra_hir/src/semantics.rs | 28 | ||||
-rw-r--r-- | crates/ra_hir/src/source_analyzer.rs | 90 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/_match.rs | 74 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/expr.rs | 158 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/tests.rs | 2 | ||||
-rw-r--r-- | crates/ra_ide/src/completion/complete_record.rs | 96 | ||||
-rw-r--r-- | crates/ra_ide/src/snapshots/highlight_injection.html | 39 | ||||
-rw-r--r-- | crates/ra_ide/src/snapshots/highlighting.html | 10 | ||||
-rw-r--r-- | crates/ra_ide/src/syntax_highlighting.rs | 59 | ||||
-rw-r--r-- | crates/ra_ide/src/syntax_highlighting/html.rs | 66 | ||||
-rw-r--r-- | crates/ra_ide/src/syntax_highlighting/tests.rs | 25 | ||||
-rw-r--r-- | crates/ra_mbe/src/tests.rs | 17 | ||||
-rw-r--r-- | crates/ra_parser/src/grammar/types.rs | 2 | ||||
-rw-r--r-- | crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs | 8 | ||||
-rw-r--r-- | crates/ra_proc_macro_srv/src/proc_macro/mod.rs | 4 | ||||
-rw-r--r-- | crates/ra_syntax/src/algo.rs | 2 |
17 files changed, 467 insertions, 244 deletions
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index c6f3bdb8e..9baebf643 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -1027,8 +1027,16 @@ impl Type { | |||
1027 | ty: Ty, | 1027 | ty: Ty, |
1028 | ) -> Option<Type> { | 1028 | ) -> Option<Type> { |
1029 | let krate = resolver.krate()?; | 1029 | let krate = resolver.krate()?; |
1030 | Some(Type::new_with_resolver_inner(db, krate, resolver, ty)) | ||
1031 | } | ||
1032 | pub(crate) fn new_with_resolver_inner( | ||
1033 | db: &dyn HirDatabase, | ||
1034 | krate: CrateId, | ||
1035 | resolver: &Resolver, | ||
1036 | ty: Ty, | ||
1037 | ) -> Type { | ||
1030 | let environment = TraitEnvironment::lower(db, &resolver); | 1038 | let environment = TraitEnvironment::lower(db, &resolver); |
1031 | Some(Type { krate, ty: InEnvironment { value: ty, environment } }) | 1039 | Type { krate, ty: InEnvironment { value: ty, environment } } |
1032 | } | 1040 | } |
1033 | 1041 | ||
1034 | fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type { | 1042 | fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type { |
@@ -1152,27 +1160,6 @@ impl Type { | |||
1152 | res | 1160 | res |
1153 | } | 1161 | } |
1154 | 1162 | ||
1155 | pub fn variant_fields( | ||
1156 | &self, | ||
1157 | db: &dyn HirDatabase, | ||
1158 | def: VariantDef, | ||
1159 | ) -> Vec<(StructField, Type)> { | ||
1160 | // FIXME: check that ty and def match | ||
1161 | match &self.ty.value { | ||
1162 | Ty::Apply(a_ty) => { | ||
1163 | let field_types = db.field_types(def.into()); | ||
1164 | def.fields(db) | ||
1165 | .into_iter() | ||
1166 | .map(|it| { | ||
1167 | let ty = field_types[it.id].clone().subst(&a_ty.parameters); | ||
1168 | (it, self.derived(ty)) | ||
1169 | }) | ||
1170 | .collect() | ||
1171 | } | ||
1172 | _ => Vec::new(), | ||
1173 | } | ||
1174 | } | ||
1175 | |||
1176 | pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a { | 1163 | pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a { |
1177 | // There should be no inference vars in types passed here | 1164 | // There should be no inference vars in types passed here |
1178 | // FIXME check that? | 1165 | // FIXME check that? |
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 2ad231d36..2707e422d 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -23,7 +23,7 @@ use crate::{ | |||
23 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, | 23 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, |
24 | source_analyzer::{resolve_hir_path, SourceAnalyzer}, | 24 | source_analyzer::{resolve_hir_path, SourceAnalyzer}, |
25 | AssocItem, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, Name, | 25 | AssocItem, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, Name, |
26 | Origin, Path, ScopeDef, StructField, Trait, Type, TypeParam, VariantDef, | 26 | Origin, Path, ScopeDef, StructField, Trait, Type, TypeParam, |
27 | }; | 27 | }; |
28 | 28 | ||
29 | #[derive(Debug, Clone, PartialEq, Eq)] | 29 | #[derive(Debug, Clone, PartialEq, Eq)] |
@@ -187,14 +187,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
187 | self.analyze(field.syntax()).resolve_record_field(self.db, field) | 187 | self.analyze(field.syntax()).resolve_record_field(self.db, field) |
188 | } | 188 | } |
189 | 189 | ||
190 | pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> { | ||
191 | self.analyze(record_lit.syntax()).resolve_record_literal(self.db, record_lit) | ||
192 | } | ||
193 | |||
194 | pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> { | ||
195 | self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat) | ||
196 | } | ||
197 | |||
198 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { | 190 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { |
199 | let sa = self.analyze(macro_call.syntax()); | 191 | let sa = self.analyze(macro_call.syntax()); |
200 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | 192 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); |
@@ -212,6 +204,24 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
212 | // FIXME: use this instead? | 204 | // FIXME: use this instead? |
213 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; | 205 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; |
214 | 206 | ||
207 | pub fn record_literal_missing_fields( | ||
208 | &self, | ||
209 | literal: &ast::RecordLit, | ||
210 | ) -> Vec<(StructField, Type)> { | ||
211 | self.analyze(literal.syntax()) | ||
212 | .record_literal_missing_fields(self.db, literal) | ||
213 | .unwrap_or_default() | ||
214 | } | ||
215 | |||
216 | pub fn record_pattern_missing_fields( | ||
217 | &self, | ||
218 | pattern: &ast::RecordPat, | ||
219 | ) -> Vec<(StructField, Type)> { | ||
220 | self.analyze(pattern.syntax()) | ||
221 | .record_pattern_missing_fields(self.db, pattern) | ||
222 | .unwrap_or_default() | ||
223 | } | ||
224 | |||
215 | pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { | 225 | pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { |
216 | let src = self.find_file(src.syntax().clone()).with_value(src).cloned(); | 226 | let src = self.find_file(src.syntax().clone()).with_value(src).cloned(); |
217 | T::to_def(self, src) | 227 | T::to_def(self, src) |
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs index 815ca158c..45631f8fd 100644 --- a/crates/ra_hir/src/source_analyzer.rs +++ b/crates/ra_hir/src/source_analyzer.rs | |||
@@ -14,10 +14,13 @@ use hir_def::{ | |||
14 | }, | 14 | }, |
15 | expr::{ExprId, Pat, PatId}, | 15 | expr::{ExprId, Pat, PatId}, |
16 | resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, | 16 | resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, |
17 | AsMacroCall, DefWithBodyId, | 17 | AsMacroCall, DefWithBodyId, LocalStructFieldId, StructFieldId, VariantId, |
18 | }; | 18 | }; |
19 | use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; | 19 | use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; |
20 | use hir_ty::InferenceResult; | 20 | use hir_ty::{ |
21 | expr::{record_literal_missing_fields, record_pattern_missing_fields}, | ||
22 | InferenceResult, Substs, Ty, | ||
23 | }; | ||
21 | use ra_syntax::{ | 24 | use ra_syntax::{ |
22 | ast::{self, AstNode}, | 25 | ast::{self, AstNode}, |
23 | SyntaxNode, SyntaxNodePtr, TextUnit, | 26 | SyntaxNode, SyntaxNodePtr, TextUnit, |
@@ -25,8 +28,10 @@ use ra_syntax::{ | |||
25 | 28 | ||
26 | use crate::{ | 29 | use crate::{ |
27 | db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Function, Local, MacroDef, | 30 | db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Function, Local, MacroDef, |
28 | ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias, TypeParam, | 31 | ModPath, ModuleDef, Path, PathKind, Static, Struct, StructField, Trait, Type, TypeAlias, |
32 | TypeParam, | ||
29 | }; | 33 | }; |
34 | use ra_db::CrateId; | ||
30 | 35 | ||
31 | /// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of | 36 | /// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of |
32 | /// original source files. It should not be used inside the HIR itself. | 37 | /// original source files. It should not be used inside the HIR itself. |
@@ -164,23 +169,6 @@ impl SourceAnalyzer { | |||
164 | Some((struct_field.into(), local)) | 169 | Some((struct_field.into(), local)) |
165 | } | 170 | } |
166 | 171 | ||
167 | pub(crate) fn resolve_record_literal( | ||
168 | &self, | ||
169 | db: &dyn HirDatabase, | ||
170 | record_lit: &ast::RecordLit, | ||
171 | ) -> Option<crate::VariantDef> { | ||
172 | let expr_id = self.expr_id(db, &record_lit.clone().into())?; | ||
173 | self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into()) | ||
174 | } | ||
175 | |||
176 | pub(crate) fn resolve_record_pattern( | ||
177 | &self, | ||
178 | record_pat: &ast::RecordPat, | ||
179 | ) -> Option<crate::VariantDef> { | ||
180 | let pat_id = self.pat_id(&record_pat.clone().into())?; | ||
181 | self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into()) | ||
182 | } | ||
183 | |||
184 | pub(crate) fn resolve_macro_call( | 172 | pub(crate) fn resolve_macro_call( |
185 | &self, | 173 | &self, |
186 | db: &dyn HirDatabase, | 174 | db: &dyn HirDatabase, |
@@ -231,6 +219,68 @@ impl SourceAnalyzer { | |||
231 | resolve_hir_path(db, &self.resolver, &hir_path) | 219 | resolve_hir_path(db, &self.resolver, &hir_path) |
232 | } | 220 | } |
233 | 221 | ||
222 | pub(crate) fn record_literal_missing_fields( | ||
223 | &self, | ||
224 | db: &dyn HirDatabase, | ||
225 | literal: &ast::RecordLit, | ||
226 | ) -> Option<Vec<(StructField, Type)>> { | ||
227 | let krate = self.resolver.krate()?; | ||
228 | let body = self.body.as_ref()?; | ||
229 | let infer = self.infer.as_ref()?; | ||
230 | |||
231 | let expr_id = self.expr_id(db, &literal.clone().into())?; | ||
232 | let substs = match &infer.type_of_expr[expr_id] { | ||
233 | Ty::Apply(a_ty) => &a_ty.parameters, | ||
234 | _ => return None, | ||
235 | }; | ||
236 | |||
237 | let (variant, missing_fields, _exhaustive) = | ||
238 | record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?; | ||
239 | let res = self.missing_fields(db, krate, substs, variant, missing_fields); | ||
240 | Some(res) | ||
241 | } | ||
242 | |||
243 | pub(crate) fn record_pattern_missing_fields( | ||
244 | &self, | ||
245 | db: &dyn HirDatabase, | ||
246 | pattern: &ast::RecordPat, | ||
247 | ) -> Option<Vec<(StructField, Type)>> { | ||
248 | let krate = self.resolver.krate()?; | ||
249 | let body = self.body.as_ref()?; | ||
250 | let infer = self.infer.as_ref()?; | ||
251 | |||
252 | let pat_id = self.pat_id(&pattern.clone().into())?; | ||
253 | let substs = match &infer.type_of_pat[pat_id] { | ||
254 | Ty::Apply(a_ty) => &a_ty.parameters, | ||
255 | _ => return None, | ||
256 | }; | ||
257 | |||
258 | let (variant, missing_fields) = | ||
259 | record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?; | ||
260 | let res = self.missing_fields(db, krate, substs, variant, missing_fields); | ||
261 | Some(res) | ||
262 | } | ||
263 | |||
264 | fn missing_fields( | ||
265 | &self, | ||
266 | db: &dyn HirDatabase, | ||
267 | krate: CrateId, | ||
268 | substs: &Substs, | ||
269 | variant: VariantId, | ||
270 | missing_fields: Vec<LocalStructFieldId>, | ||
271 | ) -> Vec<(StructField, Type)> { | ||
272 | let field_types = db.field_types(variant); | ||
273 | |||
274 | missing_fields | ||
275 | .into_iter() | ||
276 | .map(|local_id| { | ||
277 | let field = StructFieldId { parent: variant, local_id }; | ||
278 | let ty = field_types[local_id].clone().subst(substs); | ||
279 | (field.into(), Type::new_with_resolver_inner(db, krate, &self.resolver, ty)) | ||
280 | }) | ||
281 | .collect() | ||
282 | } | ||
283 | |||
234 | pub(crate) fn expand( | 284 | pub(crate) fn expand( |
235 | &self, | 285 | &self, |
236 | db: &dyn HirDatabase, | 286 | db: &dyn HirDatabase, |
diff --git a/crates/ra_hir_ty/src/_match.rs b/crates/ra_hir_ty/src/_match.rs index f29a25505..9e9a9d047 100644 --- a/crates/ra_hir_ty/src/_match.rs +++ b/crates/ra_hir_ty/src/_match.rs | |||
@@ -235,7 +235,10 @@ impl From<PatId> for PatIdOrWild { | |||
235 | } | 235 | } |
236 | 236 | ||
237 | #[derive(Debug, Clone, Copy, PartialEq)] | 237 | #[derive(Debug, Clone, Copy, PartialEq)] |
238 | pub struct MatchCheckNotImplemented; | 238 | pub enum MatchCheckErr { |
239 | NotImplemented, | ||
240 | MalformedMatchArm, | ||
241 | } | ||
239 | 242 | ||
240 | /// The return type of `is_useful` is either an indication of usefulness | 243 | /// The return type of `is_useful` is either an indication of usefulness |
241 | /// of the match arm, or an error in the case the match statement | 244 | /// of the match arm, or an error in the case the match statement |
@@ -244,7 +247,7 @@ pub struct MatchCheckNotImplemented; | |||
244 | /// | 247 | /// |
245 | /// The `std::result::Result` type is used here rather than a custom enum | 248 | /// The `std::result::Result` type is used here rather than a custom enum |
246 | /// to allow the use of `?`. | 249 | /// to allow the use of `?`. |
247 | pub type MatchCheckResult<T> = Result<T, MatchCheckNotImplemented>; | 250 | pub type MatchCheckResult<T> = Result<T, MatchCheckErr>; |
248 | 251 | ||
249 | #[derive(Debug)] | 252 | #[derive(Debug)] |
250 | /// A row in a Matrix. | 253 | /// A row in a Matrix. |
@@ -335,12 +338,12 @@ impl PatStack { | |||
335 | Expr::Literal(Literal::Bool(_)) => None, | 338 | Expr::Literal(Literal::Bool(_)) => None, |
336 | // perhaps this is actually unreachable given we have | 339 | // perhaps this is actually unreachable given we have |
337 | // already checked that these match arms have the appropriate type? | 340 | // already checked that these match arms have the appropriate type? |
338 | _ => return Err(MatchCheckNotImplemented), | 341 | _ => return Err(MatchCheckErr::NotImplemented), |
339 | } | 342 | } |
340 | } | 343 | } |
341 | (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), | 344 | (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), |
342 | (Pat::Path(_), Constructor::Enum(constructor)) => { | 345 | (Pat::Path(_), Constructor::Enum(constructor)) => { |
343 | // enums with no associated data become `Pat::Path` | 346 | // unit enum variants become `Pat::Path` |
344 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); | 347 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); |
345 | if !enum_variant_matches(cx, pat_id, *constructor) { | 348 | if !enum_variant_matches(cx, pat_id, *constructor) { |
346 | None | 349 | None |
@@ -348,16 +351,23 @@ impl PatStack { | |||
348 | Some(self.to_tail()) | 351 | Some(self.to_tail()) |
349 | } | 352 | } |
350 | } | 353 | } |
351 | (Pat::TupleStruct { args: ref pat_ids, .. }, Constructor::Enum(constructor)) => { | 354 | (Pat::TupleStruct { args: ref pat_ids, .. }, Constructor::Enum(enum_constructor)) => { |
352 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); | 355 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); |
353 | if !enum_variant_matches(cx, pat_id, *constructor) { | 356 | if !enum_variant_matches(cx, pat_id, *enum_constructor) { |
354 | None | 357 | None |
355 | } else { | 358 | } else { |
359 | // If the enum variant matches, then we need to confirm | ||
360 | // that the number of patterns aligns with the expected | ||
361 | // number of patterns for that enum variant. | ||
362 | if pat_ids.len() != constructor.arity(cx)? { | ||
363 | return Err(MatchCheckErr::MalformedMatchArm); | ||
364 | } | ||
365 | |||
356 | Some(self.replace_head_with(pat_ids)) | 366 | Some(self.replace_head_with(pat_ids)) |
357 | } | 367 | } |
358 | } | 368 | } |
359 | (Pat::Or(_), _) => return Err(MatchCheckNotImplemented), | 369 | (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented), |
360 | (_, _) => return Err(MatchCheckNotImplemented), | 370 | (_, _) => return Err(MatchCheckErr::NotImplemented), |
361 | }; | 371 | }; |
362 | 372 | ||
363 | Ok(result) | 373 | Ok(result) |
@@ -514,7 +524,7 @@ pub(crate) fn is_useful( | |||
514 | return if any_useful { | 524 | return if any_useful { |
515 | Ok(Usefulness::Useful) | 525 | Ok(Usefulness::Useful) |
516 | } else if found_unimplemented { | 526 | } else if found_unimplemented { |
517 | Err(MatchCheckNotImplemented) | 527 | Err(MatchCheckErr::NotImplemented) |
518 | } else { | 528 | } else { |
519 | Ok(Usefulness::NotUseful) | 529 | Ok(Usefulness::NotUseful) |
520 | }; | 530 | }; |
@@ -567,7 +577,7 @@ pub(crate) fn is_useful( | |||
567 | } | 577 | } |
568 | 578 | ||
569 | if found_unimplemented { | 579 | if found_unimplemented { |
570 | Err(MatchCheckNotImplemented) | 580 | Err(MatchCheckErr::NotImplemented) |
571 | } else { | 581 | } else { |
572 | Ok(Usefulness::NotUseful) | 582 | Ok(Usefulness::NotUseful) |
573 | } | 583 | } |
@@ -604,7 +614,7 @@ impl Constructor { | |||
604 | match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { | 614 | match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { |
605 | VariantData::Tuple(struct_field_data) => struct_field_data.len(), | 615 | VariantData::Tuple(struct_field_data) => struct_field_data.len(), |
606 | VariantData::Unit => 0, | 616 | VariantData::Unit => 0, |
607 | _ => return Err(MatchCheckNotImplemented), | 617 | _ => return Err(MatchCheckErr::NotImplemented), |
608 | } | 618 | } |
609 | } | 619 | } |
610 | }; | 620 | }; |
@@ -637,20 +647,20 @@ fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Opt | |||
637 | Pat::Tuple(pats) => Some(Constructor::Tuple { arity: pats.len() }), | 647 | Pat::Tuple(pats) => Some(Constructor::Tuple { arity: pats.len() }), |
638 | Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { | 648 | Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { |
639 | Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), | 649 | Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), |
640 | _ => return Err(MatchCheckNotImplemented), | 650 | _ => return Err(MatchCheckErr::NotImplemented), |
641 | }, | 651 | }, |
642 | Pat::TupleStruct { .. } | Pat::Path(_) => { | 652 | Pat::TupleStruct { .. } | Pat::Path(_) => { |
643 | let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); | 653 | let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); |
644 | let variant_id = | 654 | let variant_id = |
645 | cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckNotImplemented)?; | 655 | cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::NotImplemented)?; |
646 | match variant_id { | 656 | match variant_id { |
647 | VariantId::EnumVariantId(enum_variant_id) => { | 657 | VariantId::EnumVariantId(enum_variant_id) => { |
648 | Some(Constructor::Enum(enum_variant_id)) | 658 | Some(Constructor::Enum(enum_variant_id)) |
649 | } | 659 | } |
650 | _ => return Err(MatchCheckNotImplemented), | 660 | _ => return Err(MatchCheckErr::NotImplemented), |
651 | } | 661 | } |
652 | } | 662 | } |
653 | _ => return Err(MatchCheckNotImplemented), | 663 | _ => return Err(MatchCheckErr::NotImplemented), |
654 | }; | 664 | }; |
655 | 665 | ||
656 | Ok(res) | 666 | Ok(res) |
@@ -1325,6 +1335,40 @@ mod tests { | |||
1325 | } | 1335 | } |
1326 | 1336 | ||
1327 | #[test] | 1337 | #[test] |
1338 | fn malformed_match_arm_tuple_missing_pattern() { | ||
1339 | let content = r" | ||
1340 | fn test_fn() { | ||
1341 | match (0) { | ||
1342 | () => (), | ||
1343 | } | ||
1344 | } | ||
1345 | "; | ||
1346 | |||
1347 | // Match arms with the incorrect type are filtered out. | ||
1348 | check_diagnostic(content); | ||
1349 | } | ||
1350 | |||
1351 | #[test] | ||
1352 | fn malformed_match_arm_tuple_enum_missing_pattern() { | ||
1353 | let content = r" | ||
1354 | enum Either { | ||
1355 | A, | ||
1356 | B(u32), | ||
1357 | } | ||
1358 | fn test_fn() { | ||
1359 | match Either::A { | ||
1360 | Either::A => (), | ||
1361 | Either::B() => (), | ||
1362 | } | ||
1363 | } | ||
1364 | "; | ||
1365 | |||
1366 | // We are testing to be sure we don't panic here when the match | ||
1367 | // arm `Either::B` is missing its pattern. | ||
1368 | check_no_diagnostic(content); | ||
1369 | } | ||
1370 | |||
1371 | #[test] | ||
1328 | fn enum_not_in_scope() { | 1372 | fn enum_not_in_scope() { |
1329 | let content = r" | 1373 | let content = r" |
1330 | fn test_fn() { | 1374 | fn test_fn() { |
diff --git a/crates/ra_hir_ty/src/expr.rs b/crates/ra_hir_ty/src/expr.rs index fb779cbef..e45e9ea14 100644 --- a/crates/ra_hir_ty/src/expr.rs +++ b/crates/ra_hir_ty/src/expr.rs | |||
@@ -2,12 +2,8 @@ | |||
2 | 2 | ||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use hir_def::{ | 5 | use hir_def::{path::path, resolver::HasResolver, AdtId, FunctionId}; |
6 | path::{path, Path}, | 6 | use hir_expand::diagnostics::DiagnosticSink; |
7 | resolver::HasResolver, | ||
8 | AdtId, FunctionId, | ||
9 | }; | ||
10 | use hir_expand::{diagnostics::DiagnosticSink, name::Name}; | ||
11 | use ra_syntax::{ast, AstPtr}; | 7 | use ra_syntax::{ast, AstPtr}; |
12 | use rustc_hash::FxHashSet; | 8 | use rustc_hash::FxHashSet; |
13 | 9 | ||
@@ -28,7 +24,7 @@ pub use hir_def::{ | |||
28 | ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp, | 24 | ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp, |
29 | MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp, | 25 | MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp, |
30 | }, | 26 | }, |
31 | VariantId, | 27 | LocalStructFieldId, VariantId, |
32 | }; | 28 | }; |
33 | 29 | ||
34 | pub struct ExprValidator<'a, 'b: 'a> { | 30 | pub struct ExprValidator<'a, 'b: 'a> { |
@@ -49,14 +45,37 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
49 | pub fn validate_body(&mut self, db: &dyn HirDatabase) { | 45 | pub fn validate_body(&mut self, db: &dyn HirDatabase) { |
50 | let body = db.body(self.func.into()); | 46 | let body = db.body(self.func.into()); |
51 | 47 | ||
52 | for e in body.exprs.iter() { | 48 | for (id, expr) in body.exprs.iter() { |
53 | if let (id, Expr::RecordLit { path, fields, spread }) = e { | 49 | if let Some((variant_def, missed_fields, true)) = |
54 | self.validate_record_literal(id, path, fields, *spread, db); | 50 | record_literal_missing_fields(db, &self.infer, id, expr) |
55 | } else if let (id, Expr::Match { expr, arms }) = e { | 51 | { |
52 | // XXX: only look at source_map if we do have missing fields | ||
53 | let (_, source_map) = db.body_with_source_map(self.func.into()); | ||
54 | |||
55 | if let Ok(source_ptr) = source_map.expr_syntax(id) { | ||
56 | if let Some(expr) = source_ptr.value.left() { | ||
57 | let root = source_ptr.file_syntax(db.upcast()); | ||
58 | if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) { | ||
59 | if let Some(field_list) = record_lit.record_field_list() { | ||
60 | let variant_data = variant_data(db.upcast(), variant_def); | ||
61 | let missed_fields = missed_fields | ||
62 | .into_iter() | ||
63 | .map(|idx| variant_data.fields()[idx].name.clone()) | ||
64 | .collect(); | ||
65 | self.sink.push(MissingFields { | ||
66 | file: source_ptr.file_id, | ||
67 | field_list: AstPtr::new(&field_list), | ||
68 | missed_fields, | ||
69 | }) | ||
70 | } | ||
71 | } | ||
72 | } | ||
73 | } | ||
74 | } | ||
75 | if let Expr::Match { expr, arms } = expr { | ||
56 | self.validate_match(id, *expr, arms, db, self.infer.clone()); | 76 | self.validate_match(id, *expr, arms, db, self.infer.clone()); |
57 | } | 77 | } |
58 | } | 78 | } |
59 | |||
60 | let body_expr = &body[body.body_expr]; | 79 | let body_expr = &body[body.body_expr]; |
61 | if let Expr::Block { tail: Some(t), .. } = body_expr { | 80 | if let Expr::Block { tail: Some(t), .. } = body_expr { |
62 | self.validate_results_in_tail_expr(body.body_expr, *t, db); | 81 | self.validate_results_in_tail_expr(body.body_expr, *t, db); |
@@ -145,61 +164,6 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
145 | } | 164 | } |
146 | } | 165 | } |
147 | 166 | ||
148 | fn validate_record_literal( | ||
149 | &mut self, | ||
150 | id: ExprId, | ||
151 | _path: &Option<Path>, | ||
152 | fields: &[RecordLitField], | ||
153 | spread: Option<ExprId>, | ||
154 | db: &dyn HirDatabase, | ||
155 | ) { | ||
156 | if spread.is_some() { | ||
157 | return; | ||
158 | }; | ||
159 | let variant_def: VariantId = match self.infer.variant_resolution_for_expr(id) { | ||
160 | Some(VariantId::UnionId(_)) | None => return, | ||
161 | Some(it) => it, | ||
162 | }; | ||
163 | if let VariantId::UnionId(_) = variant_def { | ||
164 | return; | ||
165 | } | ||
166 | |||
167 | let variant_data = variant_data(db.upcast(), variant_def); | ||
168 | |||
169 | let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); | ||
170 | let missed_fields: Vec<Name> = variant_data | ||
171 | .fields() | ||
172 | .iter() | ||
173 | .filter_map(|(_f, d)| { | ||
174 | let name = d.name.clone(); | ||
175 | if lit_fields.contains(&name) { | ||
176 | None | ||
177 | } else { | ||
178 | Some(name) | ||
179 | } | ||
180 | }) | ||
181 | .collect(); | ||
182 | if missed_fields.is_empty() { | ||
183 | return; | ||
184 | } | ||
185 | let (_, source_map) = db.body_with_source_map(self.func.into()); | ||
186 | |||
187 | if let Ok(source_ptr) = source_map.expr_syntax(id) { | ||
188 | if let Some(expr) = source_ptr.value.left() { | ||
189 | let root = source_ptr.file_syntax(db.upcast()); | ||
190 | if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) { | ||
191 | if let Some(field_list) = record_lit.record_field_list() { | ||
192 | self.sink.push(MissingFields { | ||
193 | file: source_ptr.file_id, | ||
194 | field_list: AstPtr::new(&field_list), | ||
195 | missed_fields, | ||
196 | }) | ||
197 | } | ||
198 | } | ||
199 | } | ||
200 | } | ||
201 | } | ||
202 | |||
203 | fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) { | 167 | fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) { |
204 | // the mismatch will be on the whole block currently | 168 | // the mismatch will be on the whole block currently |
205 | let mismatch = match self.infer.type_mismatch_for_expr(body_id) { | 169 | let mismatch = match self.infer.type_mismatch_for_expr(body_id) { |
@@ -232,3 +196,63 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
232 | } | 196 | } |
233 | } | 197 | } |
234 | } | 198 | } |
199 | |||
200 | pub fn record_literal_missing_fields( | ||
201 | db: &dyn HirDatabase, | ||
202 | infer: &InferenceResult, | ||
203 | id: ExprId, | ||
204 | expr: &Expr, | ||
205 | ) -> Option<(VariantId, Vec<LocalStructFieldId>, /*exhaustive*/ bool)> { | ||
206 | let (fields, exhausitve) = match expr { | ||
207 | Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()), | ||
208 | _ => return None, | ||
209 | }; | ||
210 | |||
211 | let variant_def = infer.variant_resolution_for_expr(id)?; | ||
212 | if let VariantId::UnionId(_) = variant_def { | ||
213 | return None; | ||
214 | } | ||
215 | |||
216 | let variant_data = variant_data(db.upcast(), variant_def); | ||
217 | |||
218 | let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); | ||
219 | let missed_fields: Vec<LocalStructFieldId> = variant_data | ||
220 | .fields() | ||
221 | .iter() | ||
222 | .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) }) | ||
223 | .collect(); | ||
224 | if missed_fields.is_empty() { | ||
225 | return None; | ||
226 | } | ||
227 | Some((variant_def, missed_fields, exhausitve)) | ||
228 | } | ||
229 | |||
230 | pub fn record_pattern_missing_fields( | ||
231 | db: &dyn HirDatabase, | ||
232 | infer: &InferenceResult, | ||
233 | id: PatId, | ||
234 | pat: &Pat, | ||
235 | ) -> Option<(VariantId, Vec<LocalStructFieldId>)> { | ||
236 | let fields = match pat { | ||
237 | Pat::Record { path: _, args } => args, | ||
238 | _ => return None, | ||
239 | }; | ||
240 | |||
241 | let variant_def = infer.variant_resolution_for_pat(id)?; | ||
242 | if let VariantId::UnionId(_) = variant_def { | ||
243 | return None; | ||
244 | } | ||
245 | |||
246 | let variant_data = variant_data(db.upcast(), variant_def); | ||
247 | |||
248 | let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); | ||
249 | let missed_fields: Vec<LocalStructFieldId> = variant_data | ||
250 | .fields() | ||
251 | .iter() | ||
252 | .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) }) | ||
253 | .collect(); | ||
254 | if missed_fields.is_empty() { | ||
255 | return None; | ||
256 | } | ||
257 | Some((variant_def, missed_fields)) | ||
258 | } | ||
diff --git a/crates/ra_hir_ty/src/tests.rs b/crates/ra_hir_ty/src/tests.rs index 060814e53..608408d88 100644 --- a/crates/ra_hir_ty/src/tests.rs +++ b/crates/ra_hir_ty/src/tests.rs | |||
@@ -336,7 +336,7 @@ fn no_such_field_with_feature_flag_diagnostics() { | |||
336 | pub(crate) fn new(my_val: usize, bar: bool) -> Self { | 336 | pub(crate) fn new(my_val: usize, bar: bool) -> Self { |
337 | Self { my_val, bar } | 337 | Self { my_val, bar } |
338 | } | 338 | } |
339 | 339 | ||
340 | #[cfg(not(feature = "foo"))] | 340 | #[cfg(not(feature = "foo"))] |
341 | pub(crate) fn new(my_val: usize, _bar: bool) -> Self { | 341 | pub(crate) fn new(my_val: usize, _bar: bool) -> Self { |
342 | Self { my_val } | 342 | Self { my_val } |
diff --git a/crates/ra_ide/src/completion/complete_record.rs b/crates/ra_ide/src/completion/complete_record.rs index 79f5c8c8f..f46bcee5c 100644 --- a/crates/ra_ide/src/completion/complete_record.rs +++ b/crates/ra_ide/src/completion/complete_record.rs | |||
@@ -1,65 +1,24 @@ | |||
1 | //! Complete fields in record literals and patterns. | 1 | //! Complete fields in record literals and patterns. |
2 | use ra_syntax::{ast, ast::NameOwner, SmolStr}; | ||
3 | |||
4 | use crate::completion::{CompletionContext, Completions}; | 2 | use crate::completion::{CompletionContext, Completions}; |
5 | 3 | ||
6 | pub(super) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> { | 4 | pub(super) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> { |
7 | let (ty, variant, already_present_fields) = | 5 | let missing_fields = match (ctx.record_lit_pat.as_ref(), ctx.record_lit_syntax.as_ref()) { |
8 | match (ctx.record_lit_pat.as_ref(), ctx.record_lit_syntax.as_ref()) { | 6 | (None, None) => return None, |
9 | (None, None) => return None, | 7 | (Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"), |
10 | (Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"), | 8 | (Some(record_pat), _) => ctx.sema.record_pattern_missing_fields(record_pat), |
11 | (Some(record_pat), _) => ( | 9 | (_, Some(record_lit)) => ctx.sema.record_literal_missing_fields(record_lit), |
12 | ctx.sema.type_of_pat(&record_pat.clone().into())?, | 10 | }; |
13 | ctx.sema.resolve_record_pattern(record_pat)?, | ||
14 | pattern_ascribed_fields(record_pat), | ||
15 | ), | ||
16 | (_, Some(record_lit)) => ( | ||
17 | ctx.sema.type_of_expr(&record_lit.clone().into())?, | ||
18 | ctx.sema.resolve_record_literal(record_lit)?, | ||
19 | literal_ascribed_fields(record_lit), | ||
20 | ), | ||
21 | }; | ||
22 | 11 | ||
23 | for (field, field_ty) in ty.variant_fields(ctx.db, variant).into_iter().filter(|(field, _)| { | 12 | for (field, ty) in missing_fields { |
24 | // FIXME: already_present_names better be `Vec<hir::Name>` | 13 | acc.add_field(ctx, field, &ty) |
25 | !already_present_fields.contains(&SmolStr::from(field.name(ctx.db).to_string())) | ||
26 | }) { | ||
27 | acc.add_field(ctx, field, &field_ty); | ||
28 | } | 14 | } |
29 | Some(()) | ||
30 | } | ||
31 | |||
32 | fn literal_ascribed_fields(record_lit: &ast::RecordLit) -> Vec<SmolStr> { | ||
33 | record_lit | ||
34 | .record_field_list() | ||
35 | .map(|field_list| field_list.fields()) | ||
36 | .map(|fields| { | ||
37 | fields | ||
38 | .into_iter() | ||
39 | .filter_map(|field| field.name_ref()) | ||
40 | .map(|name_ref| name_ref.text().clone()) | ||
41 | .collect() | ||
42 | }) | ||
43 | .unwrap_or_default() | ||
44 | } | ||
45 | 15 | ||
46 | fn pattern_ascribed_fields(record_pat: &ast::RecordPat) -> Vec<SmolStr> { | 16 | Some(()) |
47 | record_pat | ||
48 | .record_field_pat_list() | ||
49 | .map(|pat_list| { | ||
50 | pat_list | ||
51 | .record_field_pats() | ||
52 | .filter_map(|fild_pat| fild_pat.name()) | ||
53 | .chain(pat_list.bind_pats().filter_map(|bind_pat| bind_pat.name())) | ||
54 | .map(|name| name.text().clone()) | ||
55 | .collect() | ||
56 | }) | ||
57 | .unwrap_or_default() | ||
58 | } | 17 | } |
59 | 18 | ||
60 | #[cfg(test)] | 19 | #[cfg(test)] |
61 | mod tests { | 20 | mod tests { |
62 | mod record_lit_tests { | 21 | mod record_pat_tests { |
63 | use insta::assert_debug_snapshot; | 22 | use insta::assert_debug_snapshot; |
64 | 23 | ||
65 | use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind}; | 24 | use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind}; |
@@ -205,7 +164,7 @@ mod tests { | |||
205 | } | 164 | } |
206 | } | 165 | } |
207 | 166 | ||
208 | mod record_pat_tests { | 167 | mod record_lit_tests { |
209 | use insta::assert_debug_snapshot; | 168 | use insta::assert_debug_snapshot; |
210 | 169 | ||
211 | use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind}; | 170 | use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind}; |
@@ -410,5 +369,38 @@ mod tests { | |||
410 | ] | 369 | ] |
411 | "###); | 370 | "###); |
412 | } | 371 | } |
372 | |||
373 | #[test] | ||
374 | fn completes_functional_update() { | ||
375 | let completions = complete( | ||
376 | r" | ||
377 | struct S { | ||
378 | foo1: u32, | ||
379 | foo2: u32, | ||
380 | } | ||
381 | |||
382 | fn main() { | ||
383 | let foo1 = 1; | ||
384 | let s = S { | ||
385 | foo1, | ||
386 | <|> | ||
387 | .. loop {} | ||
388 | } | ||
389 | } | ||
390 | ", | ||
391 | ); | ||
392 | assert_debug_snapshot!(completions, @r###" | ||
393 | [ | ||
394 | CompletionItem { | ||
395 | label: "foo2", | ||
396 | source_range: [221; 221), | ||
397 | delete: [221; 221), | ||
398 | insert: "foo2", | ||
399 | kind: Field, | ||
400 | detail: "u32", | ||
401 | }, | ||
402 | ] | ||
403 | "###); | ||
404 | } | ||
413 | } | 405 | } |
414 | } | 406 | } |
diff --git a/crates/ra_ide/src/snapshots/highlight_injection.html b/crates/ra_ide/src/snapshots/highlight_injection.html new file mode 100644 index 000000000..6ec13bd80 --- /dev/null +++ b/crates/ra_ide/src/snapshots/highlight_injection.html | |||
@@ -0,0 +1,39 @@ | |||
1 | |||
2 | <style> | ||
3 | body { margin: 0; } | ||
4 | pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; } | ||
5 | |||
6 | .lifetime { color: #DFAF8F; font-style: italic; } | ||
7 | .comment { color: #7F9F7F; } | ||
8 | .struct, .enum { color: #7CB8BB; } | ||
9 | .enum_variant { color: #BDE0F3; } | ||
10 | .string_literal { color: #CC9393; } | ||
11 | .field { color: #94BFF3; } | ||
12 | .function { color: #93E0E3; } | ||
13 | .parameter { color: #94BFF3; } | ||
14 | .text { color: #DCDCCC; } | ||
15 | .type { color: #7CB8BB; } | ||
16 | .builtin_type { color: #8CD0D3; } | ||
17 | .type_param { color: #DFAF8F; } | ||
18 | .attribute { color: #94BFF3; } | ||
19 | .numeric_literal { color: #BFEBBF; } | ||
20 | .macro { color: #94BFF3; } | ||
21 | .module { color: #AFD8AF; } | ||
22 | .variable { color: #DCDCCC; } | ||
23 | .mutable { text-decoration: underline; } | ||
24 | |||
25 | .keyword { color: #F0DFAF; font-weight: bold; } | ||
26 | .keyword.unsafe { color: #BC8383; font-weight: bold; } | ||
27 | .control { font-style: italic; } | ||
28 | </style> | ||
29 | <pre><code><span class="keyword">fn</span> <span class="function declaration">fixture</span>(<span class="variable declaration">ra_fixture</span>: &<span class="builtin_type">str</span>) {} | ||
30 | |||
31 | <span class="keyword">fn</span> <span class="function declaration">main</span>() { | ||
32 | <span class="function">fixture</span>(<span class="string_literal">r#"</span> | ||
33 | <span class="keyword">trait</span> <span class="trait declaration">Foo</span> { | ||
34 | <span class="keyword">fn</span> <span class="function declaration">foo</span>() { | ||
35 | <span class="macro">println!</span>(<span class="string_literal">"2 + 2 = {}"</span>, <span class="numeric_literal">4</span>); | ||
36 | } | ||
37 | }<span class="string_literal">"#</span> | ||
38 | ); | ||
39 | }</code></pre> \ No newline at end of file | ||
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html index 495b07f69..214dcbb62 100644 --- a/crates/ra_ide/src/snapshots/highlighting.html +++ b/crates/ra_ide/src/snapshots/highlighting.html | |||
@@ -26,7 +26,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
26 | .keyword.unsafe { color: #BC8383; font-weight: bold; } | 26 | .keyword.unsafe { color: #BC8383; font-weight: bold; } |
27 | .control { font-style: italic; } | 27 | .control { font-style: italic; } |
28 | </style> | 28 | </style> |
29 | <pre><code><span class="attribute">#</span><span class="attribute">[</span><span class="attribute">derive</span><span class="attribute">(</span><span class="attribute">Clone</span><span class="attribute">,</span><span class="attribute"> </span><span class="attribute">Debug</span><span class="attribute">)</span><span class="attribute">]</span> | 29 | <pre><code><span class="attribute">#[derive(Clone, Debug)]</span> |
30 | <span class="keyword">struct</span> <span class="struct declaration">Foo</span> { | 30 | <span class="keyword">struct</span> <span class="struct declaration">Foo</span> { |
31 | <span class="keyword">pub</span> <span class="field declaration">x</span>: <span class="builtin_type">i32</span>, | 31 | <span class="keyword">pub</span> <span class="field declaration">x</span>: <span class="builtin_type">i32</span>, |
32 | <span class="keyword">pub</span> <span class="field declaration">y</span>: <span class="builtin_type">i32</span>, | 32 | <span class="keyword">pub</span> <span class="field declaration">y</span>: <span class="builtin_type">i32</span>, |
@@ -36,11 +36,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
36 | <span class="function">foo</span>::<<span class="lifetime">'a</span>, <span class="builtin_type">i32</span>>() | 36 | <span class="function">foo</span>::<<span class="lifetime">'a</span>, <span class="builtin_type">i32</span>>() |
37 | } | 37 | } |
38 | 38 | ||
39 | <span class="macro">macro_rules</span><span class="macro">!</span> def_fn { | 39 | <span class="macro">macro_rules!</span> def_fn { |
40 | ($($tt:tt)*) => {$($tt)*} | 40 | ($($tt:tt)*) => {$($tt)*} |
41 | } | 41 | } |
42 | 42 | ||
43 | <span class="macro">def_fn</span><span class="macro">!</span> { | 43 | <span class="macro">def_fn!</span> { |
44 | <span class="keyword">fn</span> <span class="function declaration">bar</span>() -> <span class="builtin_type">u32</span> { | 44 | <span class="keyword">fn</span> <span class="function declaration">bar</span>() -> <span class="builtin_type">u32</span> { |
45 | <span class="numeric_literal">100</span> | 45 | <span class="numeric_literal">100</span> |
46 | } | 46 | } |
@@ -48,7 +48,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
48 | 48 | ||
49 | <span class="comment">// comment</span> | 49 | <span class="comment">// comment</span> |
50 | <span class="keyword">fn</span> <span class="function declaration">main</span>() { | 50 | <span class="keyword">fn</span> <span class="function declaration">main</span>() { |
51 | <span class="macro">println</span><span class="macro">!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>); | 51 | <span class="macro">println!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>); |
52 | 52 | ||
53 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = Vec::new(); | 53 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = Vec::new(); |
54 | <span class="keyword control">if</span> <span class="keyword">true</span> { | 54 | <span class="keyword control">if</span> <span class="keyword">true</span> { |
@@ -73,7 +73,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
73 | <span class="keyword">impl</span><<span class="type_param declaration">T</span>> <span class="enum">Option</span><<span class="type_param">T</span>> { | 73 | <span class="keyword">impl</span><<span class="type_param declaration">T</span>> <span class="enum">Option</span><<span class="type_param">T</span>> { |
74 | <span class="keyword">fn</span> <span class="function declaration">and</span><<span class="type_param declaration">U</span>>(<span class="keyword">self</span>, <span class="variable declaration">other</span>: <span class="enum">Option</span><<span class="type_param">U</span>>) -> <span class="enum">Option</span><(<span class="type_param">T</span>, <span class="type_param">U</span>)> { | 74 | <span class="keyword">fn</span> <span class="function declaration">and</span><<span class="type_param declaration">U</span>>(<span class="keyword">self</span>, <span class="variable declaration">other</span>: <span class="enum">Option</span><<span class="type_param">U</span>>) -> <span class="enum">Option</span><(<span class="type_param">T</span>, <span class="type_param">U</span>)> { |
75 | <span class="keyword control">match</span> <span class="variable">other</span> { | 75 | <span class="keyword control">match</span> <span class="variable">other</span> { |
76 | <span class="enum_variant">None</span> => <span class="macro">unimplemented</span><span class="macro">!</span>(), | 76 | <span class="enum_variant">None</span> => <span class="macro">unimplemented!</span>(), |
77 | <span class="variable declaration">Nope</span> => <span class="variable">Nope</span>, | 77 | <span class="variable declaration">Nope</span> => <span class="variable">Nope</span>, |
78 | } | 78 | } |
79 | } | 79 | } |
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs index d833a816b..83d161f45 100644 --- a/crates/ra_ide/src/syntax_highlighting.rs +++ b/crates/ra_ide/src/syntax_highlighting.rs | |||
@@ -24,7 +24,7 @@ use crate::{call_info::call_info_for_token, Analysis, FileId}; | |||
24 | pub(crate) use html::highlight_as_html; | 24 | pub(crate) use html::highlight_as_html; |
25 | pub use tags::{Highlight, HighlightModifier, HighlightModifiers, HighlightTag}; | 25 | pub use tags::{Highlight, HighlightModifier, HighlightModifiers, HighlightTag}; |
26 | 26 | ||
27 | #[derive(Debug)] | 27 | #[derive(Debug, Clone)] |
28 | pub struct HighlightedRange { | 28 | pub struct HighlightedRange { |
29 | pub range: TextRange, | 29 | pub range: TextRange, |
30 | pub highlight: Highlight, | 30 | pub highlight: Highlight, |
@@ -55,13 +55,55 @@ pub(crate) fn highlight( | |||
55 | }; | 55 | }; |
56 | 56 | ||
57 | let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); | 57 | let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); |
58 | let mut res = Vec::new(); | 58 | // We use a stack for the DFS traversal below. |
59 | // When we leave a node, the we use it to flatten the highlighted ranges. | ||
60 | let mut res: Vec<Vec<HighlightedRange>> = vec![Vec::new()]; | ||
59 | 61 | ||
60 | let mut current_macro_call: Option<ast::MacroCall> = None; | 62 | let mut current_macro_call: Option<ast::MacroCall> = None; |
61 | 63 | ||
62 | // Walk all nodes, keeping track of whether we are inside a macro or not. | 64 | // Walk all nodes, keeping track of whether we are inside a macro or not. |
63 | // If in macro, expand it first and highlight the expanded code. | 65 | // If in macro, expand it first and highlight the expanded code. |
64 | for event in root.preorder_with_tokens() { | 66 | for event in root.preorder_with_tokens() { |
67 | match &event { | ||
68 | WalkEvent::Enter(_) => res.push(Vec::new()), | ||
69 | WalkEvent::Leave(_) => { | ||
70 | /* Flattens the highlighted ranges. | ||
71 | * | ||
72 | * For example `#[cfg(feature = "foo")]` contains the nested ranges: | ||
73 | * 1) parent-range: Attribute [0, 23) | ||
74 | * 2) child-range: String [16, 21) | ||
75 | * | ||
76 | * The following code implements the flattening, for our example this results to: | ||
77 | * `[Attribute [0, 16), String [16, 21), Attribute [21, 23)]` | ||
78 | */ | ||
79 | let children = res.pop().unwrap(); | ||
80 | let prev = res.last_mut().unwrap(); | ||
81 | let needs_flattening = !children.is_empty() | ||
82 | && !prev.is_empty() | ||
83 | && children.first().unwrap().range.is_subrange(&prev.last().unwrap().range); | ||
84 | if !needs_flattening { | ||
85 | prev.extend(children); | ||
86 | } else { | ||
87 | let mut parent = prev.pop().unwrap(); | ||
88 | for ele in children { | ||
89 | assert!(ele.range.is_subrange(&parent.range)); | ||
90 | let mut cloned = parent.clone(); | ||
91 | parent.range = TextRange::from_to(parent.range.start(), ele.range.start()); | ||
92 | cloned.range = TextRange::from_to(ele.range.end(), cloned.range.end()); | ||
93 | if !parent.range.is_empty() { | ||
94 | prev.push(parent); | ||
95 | } | ||
96 | prev.push(ele); | ||
97 | parent = cloned; | ||
98 | } | ||
99 | if !parent.range.is_empty() { | ||
100 | prev.push(parent); | ||
101 | } | ||
102 | } | ||
103 | } | ||
104 | }; | ||
105 | let current = res.last_mut().expect("during DFS traversal, the stack must not be empty"); | ||
106 | |||
65 | let event_range = match &event { | 107 | let event_range = match &event { |
66 | WalkEvent::Enter(it) => it.text_range(), | 108 | WalkEvent::Enter(it) => it.text_range(), |
67 | WalkEvent::Leave(it) => it.text_range(), | 109 | WalkEvent::Leave(it) => it.text_range(), |
@@ -77,7 +119,7 @@ pub(crate) fn highlight( | |||
77 | WalkEvent::Enter(Some(mc)) => { | 119 | WalkEvent::Enter(Some(mc)) => { |
78 | current_macro_call = Some(mc.clone()); | 120 | current_macro_call = Some(mc.clone()); |
79 | if let Some(range) = macro_call_range(&mc) { | 121 | if let Some(range) = macro_call_range(&mc) { |
80 | res.push(HighlightedRange { | 122 | current.push(HighlightedRange { |
81 | range, | 123 | range, |
82 | highlight: HighlightTag::Macro.into(), | 124 | highlight: HighlightTag::Macro.into(), |
83 | binding_hash: None, | 125 | binding_hash: None, |
@@ -119,7 +161,7 @@ pub(crate) fn highlight( | |||
119 | 161 | ||
120 | if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) { | 162 | if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) { |
121 | let expanded = element_to_highlight.as_token().unwrap().clone(); | 163 | let expanded = element_to_highlight.as_token().unwrap().clone(); |
122 | if highlight_injection(&mut res, &sema, token, expanded).is_some() { | 164 | if highlight_injection(current, &sema, token, expanded).is_some() { |
123 | continue; | 165 | continue; |
124 | } | 166 | } |
125 | } | 167 | } |
@@ -127,10 +169,17 @@ pub(crate) fn highlight( | |||
127 | if let Some((highlight, binding_hash)) = | 169 | if let Some((highlight, binding_hash)) = |
128 | highlight_element(&sema, &mut bindings_shadow_count, element_to_highlight) | 170 | highlight_element(&sema, &mut bindings_shadow_count, element_to_highlight) |
129 | { | 171 | { |
130 | res.push(HighlightedRange { range, highlight, binding_hash }); | 172 | current.push(HighlightedRange { range, highlight, binding_hash }); |
131 | } | 173 | } |
132 | } | 174 | } |
133 | 175 | ||
176 | assert_eq!(res.len(), 1, "after DFS traversal, the stack should only contain a single element"); | ||
177 | let res = res.pop().unwrap(); | ||
178 | // Check that ranges are sorted and disjoint | ||
179 | assert!(res | ||
180 | .iter() | ||
181 | .zip(res.iter().skip(1)) | ||
182 | .all(|(left, right)| left.range.end() <= right.range.start())); | ||
134 | res | 183 | res |
135 | } | 184 | } |
136 | 185 | ||
diff --git a/crates/ra_ide/src/syntax_highlighting/html.rs b/crates/ra_ide/src/syntax_highlighting/html.rs index e13766c9d..4496529a1 100644 --- a/crates/ra_ide/src/syntax_highlighting/html.rs +++ b/crates/ra_ide/src/syntax_highlighting/html.rs | |||
@@ -1,9 +1,9 @@ | |||
1 | //! Renders a bit of code as HTML. | 1 | //! Renders a bit of code as HTML. |
2 | 2 | ||
3 | use ra_db::SourceDatabase; | 3 | use ra_db::SourceDatabase; |
4 | use ra_syntax::AstNode; | 4 | use ra_syntax::{AstNode, TextUnit}; |
5 | 5 | ||
6 | use crate::{FileId, HighlightedRange, RootDatabase}; | 6 | use crate::{FileId, RootDatabase}; |
7 | 7 | ||
8 | use super::highlight; | 8 | use super::highlight; |
9 | 9 | ||
@@ -21,51 +21,35 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo | |||
21 | ) | 21 | ) |
22 | } | 22 | } |
23 | 23 | ||
24 | let mut ranges = highlight(db, file_id, None); | 24 | let ranges = highlight(db, file_id, None); |
25 | ranges.sort_by_key(|it| it.range.start()); | 25 | let text = parse.tree().syntax().to_string(); |
26 | // quick non-optimal heuristic to intersect token ranges and highlighted ranges | 26 | let mut prev_pos = TextUnit::from(0); |
27 | let mut frontier = 0; | ||
28 | let mut could_intersect: Vec<&HighlightedRange> = Vec::new(); | ||
29 | |||
30 | let mut buf = String::new(); | 27 | let mut buf = String::new(); |
31 | buf.push_str(&STYLE); | 28 | buf.push_str(&STYLE); |
32 | buf.push_str("<pre><code>"); | 29 | buf.push_str("<pre><code>"); |
33 | let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.into_token()); | 30 | for range in &ranges { |
34 | for token in tokens { | 31 | if range.range.start() > prev_pos { |
35 | could_intersect.retain(|it| token.text_range().start() <= it.range.end()); | 32 | let curr = &text[prev_pos.to_usize()..range.range.start().to_usize()]; |
36 | while let Some(r) = ranges.get(frontier) { | 33 | let text = html_escape(curr); |
37 | if r.range.start() <= token.text_range().end() { | ||
38 | could_intersect.push(r); | ||
39 | frontier += 1; | ||
40 | } else { | ||
41 | break; | ||
42 | } | ||
43 | } | ||
44 | let text = html_escape(&token.text()); | ||
45 | let ranges = could_intersect | ||
46 | .iter() | ||
47 | .filter(|it| token.text_range().is_subrange(&it.range)) | ||
48 | .collect::<Vec<_>>(); | ||
49 | if ranges.is_empty() { | ||
50 | buf.push_str(&text); | 34 | buf.push_str(&text); |
51 | } else { | ||
52 | let classes = ranges | ||
53 | .iter() | ||
54 | .map(|it| it.highlight.to_string().replace('.', " ")) | ||
55 | .collect::<Vec<_>>() | ||
56 | .join(" "); | ||
57 | let binding_hash = ranges.first().and_then(|x| x.binding_hash); | ||
58 | let color = match (rainbow, binding_hash) { | ||
59 | (true, Some(hash)) => format!( | ||
60 | " data-binding-hash=\"{}\" style=\"color: {};\"", | ||
61 | hash, | ||
62 | rainbowify(hash) | ||
63 | ), | ||
64 | _ => "".into(), | ||
65 | }; | ||
66 | buf.push_str(&format!("<span class=\"{}\"{}>{}</span>", classes, color, text)); | ||
67 | } | 35 | } |
36 | let curr = &text[range.range.start().to_usize()..range.range.end().to_usize()]; | ||
37 | |||
38 | let class = range.highlight.to_string().replace('.', " "); | ||
39 | let color = match (rainbow, range.binding_hash) { | ||
40 | (true, Some(hash)) => { | ||
41 | format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash)) | ||
42 | } | ||
43 | _ => "".into(), | ||
44 | }; | ||
45 | buf.push_str(&format!("<span class=\"{}\"{}>{}</span>", class, color, html_escape(curr))); | ||
46 | |||
47 | prev_pos = range.range.end(); | ||
68 | } | 48 | } |
49 | // Add the remaining (non-highlighted) text | ||
50 | let curr = &text[prev_pos.to_usize()..]; | ||
51 | let text = html_escape(curr); | ||
52 | buf.push_str(&text); | ||
69 | buf.push_str("</code></pre>"); | 53 | buf.push_str("</code></pre>"); |
70 | buf | 54 | buf |
71 | } | 55 | } |
diff --git a/crates/ra_ide/src/syntax_highlighting/tests.rs b/crates/ra_ide/src/syntax_highlighting/tests.rs index 98c030791..110887c2a 100644 --- a/crates/ra_ide/src/syntax_highlighting/tests.rs +++ b/crates/ra_ide/src/syntax_highlighting/tests.rs | |||
@@ -131,3 +131,28 @@ fn test_ranges() { | |||
131 | 131 | ||
132 | assert_eq!(&highlights[0].highlight.to_string(), "field.declaration"); | 132 | assert_eq!(&highlights[0].highlight.to_string(), "field.declaration"); |
133 | } | 133 | } |
134 | |||
135 | #[test] | ||
136 | fn test_flattening() { | ||
137 | let (analysis, file_id) = single_file( | ||
138 | r##" | ||
139 | fn fixture(ra_fixture: &str) {} | ||
140 | |||
141 | fn main() { | ||
142 | fixture(r#" | ||
143 | trait Foo { | ||
144 | fn foo() { | ||
145 | println!("2 + 2 = {}", 4); | ||
146 | } | ||
147 | }"# | ||
148 | ); | ||
149 | }"## | ||
150 | .trim(), | ||
151 | ); | ||
152 | |||
153 | let dst_file = project_dir().join("crates/ra_ide/src/snapshots/highlight_injection.html"); | ||
154 | let actual_html = &analysis.highlight_as_html(file_id, false).unwrap(); | ||
155 | let expected_html = &read_text(&dst_file); | ||
156 | fs::write(dst_file, &actual_html).unwrap(); | ||
157 | assert_eq_text!(expected_html, actual_html); | ||
158 | } | ||
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index a7fcea0ac..254318e23 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -1615,6 +1615,23 @@ fn test_issue_2520() { | |||
1615 | } | 1615 | } |
1616 | 1616 | ||
1617 | #[test] | 1617 | #[test] |
1618 | fn test_issue_3861() { | ||
1619 | let macro_fixture = parse_macro( | ||
1620 | r#" | ||
1621 | macro_rules! rgb_color { | ||
1622 | ($p:expr, $t: ty) => { | ||
1623 | pub fn new() { | ||
1624 | let _ = 0 as $t << $p; | ||
1625 | } | ||
1626 | }; | ||
1627 | } | ||
1628 | "#, | ||
1629 | ); | ||
1630 | |||
1631 | macro_fixture.expand_items(r#"rgb_color!(8 + 8, u32);"#); | ||
1632 | } | ||
1633 | |||
1634 | #[test] | ||
1618 | fn test_repeat_bad_var() { | 1635 | fn test_repeat_bad_var() { |
1619 | // FIXME: the second rule of the macro should be removed and an error about | 1636 | // FIXME: the second rule of the macro should be removed and an error about |
1620 | // `$( $c )+` raised | 1637 | // `$( $c )+` raised |
diff --git a/crates/ra_parser/src/grammar/types.rs b/crates/ra_parser/src/grammar/types.rs index 2c00bce80..386969d2d 100644 --- a/crates/ra_parser/src/grammar/types.rs +++ b/crates/ra_parser/src/grammar/types.rs | |||
@@ -7,7 +7,7 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![ | |||
7 | DYN_KW, L_ANGLE, | 7 | DYN_KW, L_ANGLE, |
8 | ]); | 8 | ]); |
9 | 9 | ||
10 | const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA]; | 10 | const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA, L_DOLLAR]; |
11 | 11 | ||
12 | pub(crate) fn type_(p: &mut Parser) { | 12 | pub(crate) fn type_(p: &mut Parser) { |
13 | type_with_bounds_cond(p, true); | 13 | type_with_bounds_cond(p, true); |
diff --git a/crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs b/crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs index 9029f8815..55d93917c 100644 --- a/crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs +++ b/crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs | |||
@@ -54,12 +54,14 @@ pub struct Diagnostic { | |||
54 | } | 54 | } |
55 | 55 | ||
56 | macro_rules! diagnostic_child_methods { | 56 | macro_rules! diagnostic_child_methods { |
57 | ($spanned:ident, $regular:ident, $level:expr) => ( | 57 | ($spanned:ident, $regular:ident, $level:expr) => { |
58 | /// Adds a new child diagnostic message to `self` with the level | 58 | /// Adds a new child diagnostic message to `self` with the level |
59 | /// identified by this method's name with the given `spans` and | 59 | /// identified by this method's name with the given `spans` and |
60 | /// `message`. | 60 | /// `message`. |
61 | pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic | 61 | pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic |
62 | where S: MultiSpan, T: Into<String> | 62 | where |
63 | S: MultiSpan, | ||
64 | T: Into<String>, | ||
63 | { | 65 | { |
64 | self.children.push(Diagnostic::spanned(spans, $level, message)); | 66 | self.children.push(Diagnostic::spanned(spans, $level, message)); |
65 | self | 67 | self |
@@ -71,7 +73,7 @@ macro_rules! diagnostic_child_methods { | |||
71 | self.children.push(Diagnostic::new($level, message)); | 73 | self.children.push(Diagnostic::new($level, message)); |
72 | self | 74 | self |
73 | } | 75 | } |
74 | ) | 76 | }; |
75 | } | 77 | } |
76 | 78 | ||
77 | /// Iterator over the children diagnostics of a `Diagnostic`. | 79 | /// Iterator over the children diagnostics of a `Diagnostic`. |
diff --git a/crates/ra_proc_macro_srv/src/proc_macro/mod.rs b/crates/ra_proc_macro_srv/src/proc_macro/mod.rs index e35a6ff8b..ee0dc9722 100644 --- a/crates/ra_proc_macro_srv/src/proc_macro/mod.rs +++ b/crates/ra_proc_macro_srv/src/proc_macro/mod.rs | |||
@@ -169,13 +169,13 @@ pub mod token_stream { | |||
169 | pub struct Span(bridge::client::Span); | 169 | pub struct Span(bridge::client::Span); |
170 | 170 | ||
171 | macro_rules! diagnostic_method { | 171 | macro_rules! diagnostic_method { |
172 | ($name:ident, $level:expr) => ( | 172 | ($name:ident, $level:expr) => { |
173 | /// Creates a new `Diagnostic` with the given `message` at the span | 173 | /// Creates a new `Diagnostic` with the given `message` at the span |
174 | /// `self`. | 174 | /// `self`. |
175 | pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic { | 175 | pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic { |
176 | Diagnostic::spanned(self, $level, message) | 176 | Diagnostic::spanned(self, $level, message) |
177 | } | 177 | } |
178 | ) | 178 | }; |
179 | } | 179 | } |
180 | 180 | ||
181 | impl Span { | 181 | impl Span { |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index 191123c8e..8d1098036 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -316,7 +316,7 @@ impl<'a> SyntaxRewriter<'a> { | |||
316 | } | 316 | } |
317 | } | 317 | } |
318 | 318 | ||
319 | impl<'a> ops::AddAssign for SyntaxRewriter<'_> { | 319 | impl ops::AddAssign for SyntaxRewriter<'_> { |
320 | fn add_assign(&mut self, rhs: SyntaxRewriter) { | 320 | fn add_assign(&mut self, rhs: SyntaxRewriter) { |
321 | assert!(rhs.f.is_none()); | 321 | assert!(rhs.f.is_none()); |
322 | self.replacements.extend(rhs.replacements) | 322 | self.replacements.extend(rhs.replacements) |