aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBenjamin Coenen <[email protected]>2020-04-09 08:39:17 +0100
committerBenjamin Coenen <[email protected]>2020-04-09 08:53:53 +0100
commit585bb83e2aec9c79dae8c2e031e9165f40937003 (patch)
tree3dda062f3deb768b211e7e091dd5b29b9b6fae84
parent8f1dba6f9ae1d8d314dd9d007e4c582ed1403e8d (diff)
parent080c983498afcac3eb54028af5c9f8bfe7f2c826 (diff)
feat: add attributes support on struct fields and method #3870
Signed-off-by: Benjamin Coenen <[email protected]>
-rw-r--r--.github/workflows/release.yaml4
-rw-r--r--crates/ra_hir/src/code_model.rs31
-rw-r--r--crates/ra_hir/src/semantics.rs28
-rw-r--r--crates/ra_hir/src/source_analyzer.rs90
-rw-r--r--crates/ra_hir_ty/src/_match.rs74
-rw-r--r--crates/ra_hir_ty/src/expr.rs158
-rw-r--r--crates/ra_hir_ty/src/tests.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_record.rs96
-rw-r--r--crates/ra_ide/src/snapshots/highlight_injection.html39
-rw-r--r--crates/ra_ide/src/snapshots/highlighting.html10
-rw-r--r--crates/ra_ide/src/syntax_highlighting.rs59
-rw-r--r--crates/ra_ide/src/syntax_highlighting/html.rs66
-rw-r--r--crates/ra_ide/src/syntax_highlighting/tests.rs25
-rw-r--r--crates/ra_mbe/src/tests.rs17
-rw-r--r--crates/ra_parser/src/grammar/types.rs2
-rw-r--r--crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs8
-rw-r--r--crates/ra_proc_macro_srv/src/proc_macro/mod.rs4
-rw-r--r--crates/ra_syntax/src/algo.rs2
-rw-r--r--docs/user/readme.adoc25
-rw-r--r--editors/code/package-lock.json44
-rw-r--r--editors/code/package.json13
-rw-r--r--editors/code/src/client.ts16
-rw-r--r--editors/code/src/config.ts1
-rw-r--r--editors/code/src/ctx.ts2
-rw-r--r--xtask/src/dist.rs20
-rw-r--r--xtask/src/lib.rs4
-rw-r--r--xtask/src/main.rs14
-rw-r--r--xtask/src/not_bash.rs4
28 files changed, 522 insertions, 336 deletions
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 4db122ec7..2c1192f07 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -50,11 +50,11 @@ jobs:
50 50
51 - name: Dist 51 - name: Dist
52 if: matrix.os == 'ubuntu-latest' && github.ref == 'refs/heads/release' 52 if: matrix.os == 'ubuntu-latest' && github.ref == 'refs/heads/release'
53 run: cargo xtask dist --client --version 0.2.$GITHUB_RUN_NUMBER --tag $(date --iso --utc) 53 run: cargo xtask dist --client 0.2.$GITHUB_RUN_NUMBER
54 54
55 - name: Dist 55 - name: Dist
56 if: matrix.os == 'ubuntu-latest' && github.ref != 'refs/heads/release' 56 if: matrix.os == 'ubuntu-latest' && github.ref != 'refs/heads/release'
57 run: cargo xtask dist --client --version 0.3.$GITHUB_RUN_NUMBER-nightly --tag nightly 57 run: cargo xtask dist --nightly --client 0.3.$GITHUB_RUN_NUMBER-nightly
58 58
59 - name: Dist 59 - name: Dist
60 if: matrix.os != 'ubuntu-latest' 60 if: matrix.os != 'ubuntu-latest'
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs
index c6f3bdb8e..9baebf643 100644
--- a/crates/ra_hir/src/code_model.rs
+++ b/crates/ra_hir/src/code_model.rs
@@ -1027,8 +1027,16 @@ impl Type {
1027 ty: Ty, 1027 ty: Ty,
1028 ) -> Option<Type> { 1028 ) -> Option<Type> {
1029 let krate = resolver.krate()?; 1029 let krate = resolver.krate()?;
1030 Some(Type::new_with_resolver_inner(db, krate, resolver, ty))
1031 }
1032 pub(crate) fn new_with_resolver_inner(
1033 db: &dyn HirDatabase,
1034 krate: CrateId,
1035 resolver: &Resolver,
1036 ty: Ty,
1037 ) -> Type {
1030 let environment = TraitEnvironment::lower(db, &resolver); 1038 let environment = TraitEnvironment::lower(db, &resolver);
1031 Some(Type { krate, ty: InEnvironment { value: ty, environment } }) 1039 Type { krate, ty: InEnvironment { value: ty, environment } }
1032 } 1040 }
1033 1041
1034 fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type { 1042 fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type {
@@ -1152,27 +1160,6 @@ impl Type {
1152 res 1160 res
1153 } 1161 }
1154 1162
1155 pub fn variant_fields(
1156 &self,
1157 db: &dyn HirDatabase,
1158 def: VariantDef,
1159 ) -> Vec<(StructField, Type)> {
1160 // FIXME: check that ty and def match
1161 match &self.ty.value {
1162 Ty::Apply(a_ty) => {
1163 let field_types = db.field_types(def.into());
1164 def.fields(db)
1165 .into_iter()
1166 .map(|it| {
1167 let ty = field_types[it.id].clone().subst(&a_ty.parameters);
1168 (it, self.derived(ty))
1169 })
1170 .collect()
1171 }
1172 _ => Vec::new(),
1173 }
1174 }
1175
1176 pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a { 1163 pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
1177 // There should be no inference vars in types passed here 1164 // There should be no inference vars in types passed here
1178 // FIXME check that? 1165 // FIXME check that?
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs
index 2ad231d36..2707e422d 100644
--- a/crates/ra_hir/src/semantics.rs
+++ b/crates/ra_hir/src/semantics.rs
@@ -23,7 +23,7 @@ use crate::{
23 semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, 23 semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
24 source_analyzer::{resolve_hir_path, SourceAnalyzer}, 24 source_analyzer::{resolve_hir_path, SourceAnalyzer},
25 AssocItem, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, Name, 25 AssocItem, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, Name,
26 Origin, Path, ScopeDef, StructField, Trait, Type, TypeParam, VariantDef, 26 Origin, Path, ScopeDef, StructField, Trait, Type, TypeParam,
27}; 27};
28 28
29#[derive(Debug, Clone, PartialEq, Eq)] 29#[derive(Debug, Clone, PartialEq, Eq)]
@@ -187,14 +187,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
187 self.analyze(field.syntax()).resolve_record_field(self.db, field) 187 self.analyze(field.syntax()).resolve_record_field(self.db, field)
188 } 188 }
189 189
190 pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> {
191 self.analyze(record_lit.syntax()).resolve_record_literal(self.db, record_lit)
192 }
193
194 pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> {
195 self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat)
196 }
197
198 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { 190 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
199 let sa = self.analyze(macro_call.syntax()); 191 let sa = self.analyze(macro_call.syntax());
200 let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); 192 let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
@@ -212,6 +204,24 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
212 // FIXME: use this instead? 204 // FIXME: use this instead?
213 // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; 205 // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
214 206
207 pub fn record_literal_missing_fields(
208 &self,
209 literal: &ast::RecordLit,
210 ) -> Vec<(StructField, Type)> {
211 self.analyze(literal.syntax())
212 .record_literal_missing_fields(self.db, literal)
213 .unwrap_or_default()
214 }
215
216 pub fn record_pattern_missing_fields(
217 &self,
218 pattern: &ast::RecordPat,
219 ) -> Vec<(StructField, Type)> {
220 self.analyze(pattern.syntax())
221 .record_pattern_missing_fields(self.db, pattern)
222 .unwrap_or_default()
223 }
224
215 pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { 225 pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
216 let src = self.find_file(src.syntax().clone()).with_value(src).cloned(); 226 let src = self.find_file(src.syntax().clone()).with_value(src).cloned();
217 T::to_def(self, src) 227 T::to_def(self, src)
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs
index 815ca158c..45631f8fd 100644
--- a/crates/ra_hir/src/source_analyzer.rs
+++ b/crates/ra_hir/src/source_analyzer.rs
@@ -14,10 +14,13 @@ use hir_def::{
14 }, 14 },
15 expr::{ExprId, Pat, PatId}, 15 expr::{ExprId, Pat, PatId},
16 resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, 16 resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
17 AsMacroCall, DefWithBodyId, 17 AsMacroCall, DefWithBodyId, LocalStructFieldId, StructFieldId, VariantId,
18}; 18};
19use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; 19use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
20use hir_ty::InferenceResult; 20use hir_ty::{
21 expr::{record_literal_missing_fields, record_pattern_missing_fields},
22 InferenceResult, Substs, Ty,
23};
21use ra_syntax::{ 24use ra_syntax::{
22 ast::{self, AstNode}, 25 ast::{self, AstNode},
23 SyntaxNode, SyntaxNodePtr, TextUnit, 26 SyntaxNode, SyntaxNodePtr, TextUnit,
@@ -25,8 +28,10 @@ use ra_syntax::{
25 28
26use crate::{ 29use crate::{
27 db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Function, Local, MacroDef, 30 db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Function, Local, MacroDef,
28 ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias, TypeParam, 31 ModPath, ModuleDef, Path, PathKind, Static, Struct, StructField, Trait, Type, TypeAlias,
32 TypeParam,
29}; 33};
34use ra_db::CrateId;
30 35
31/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of 36/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
32/// original source files. It should not be used inside the HIR itself. 37/// original source files. It should not be used inside the HIR itself.
@@ -164,23 +169,6 @@ impl SourceAnalyzer {
164 Some((struct_field.into(), local)) 169 Some((struct_field.into(), local))
165 } 170 }
166 171
167 pub(crate) fn resolve_record_literal(
168 &self,
169 db: &dyn HirDatabase,
170 record_lit: &ast::RecordLit,
171 ) -> Option<crate::VariantDef> {
172 let expr_id = self.expr_id(db, &record_lit.clone().into())?;
173 self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into())
174 }
175
176 pub(crate) fn resolve_record_pattern(
177 &self,
178 record_pat: &ast::RecordPat,
179 ) -> Option<crate::VariantDef> {
180 let pat_id = self.pat_id(&record_pat.clone().into())?;
181 self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into())
182 }
183
184 pub(crate) fn resolve_macro_call( 172 pub(crate) fn resolve_macro_call(
185 &self, 173 &self,
186 db: &dyn HirDatabase, 174 db: &dyn HirDatabase,
@@ -231,6 +219,68 @@ impl SourceAnalyzer {
231 resolve_hir_path(db, &self.resolver, &hir_path) 219 resolve_hir_path(db, &self.resolver, &hir_path)
232 } 220 }
233 221
222 pub(crate) fn record_literal_missing_fields(
223 &self,
224 db: &dyn HirDatabase,
225 literal: &ast::RecordLit,
226 ) -> Option<Vec<(StructField, Type)>> {
227 let krate = self.resolver.krate()?;
228 let body = self.body.as_ref()?;
229 let infer = self.infer.as_ref()?;
230
231 let expr_id = self.expr_id(db, &literal.clone().into())?;
232 let substs = match &infer.type_of_expr[expr_id] {
233 Ty::Apply(a_ty) => &a_ty.parameters,
234 _ => return None,
235 };
236
237 let (variant, missing_fields, _exhaustive) =
238 record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
239 let res = self.missing_fields(db, krate, substs, variant, missing_fields);
240 Some(res)
241 }
242
243 pub(crate) fn record_pattern_missing_fields(
244 &self,
245 db: &dyn HirDatabase,
246 pattern: &ast::RecordPat,
247 ) -> Option<Vec<(StructField, Type)>> {
248 let krate = self.resolver.krate()?;
249 let body = self.body.as_ref()?;
250 let infer = self.infer.as_ref()?;
251
252 let pat_id = self.pat_id(&pattern.clone().into())?;
253 let substs = match &infer.type_of_pat[pat_id] {
254 Ty::Apply(a_ty) => &a_ty.parameters,
255 _ => return None,
256 };
257
258 let (variant, missing_fields) =
259 record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
260 let res = self.missing_fields(db, krate, substs, variant, missing_fields);
261 Some(res)
262 }
263
264 fn missing_fields(
265 &self,
266 db: &dyn HirDatabase,
267 krate: CrateId,
268 substs: &Substs,
269 variant: VariantId,
270 missing_fields: Vec<LocalStructFieldId>,
271 ) -> Vec<(StructField, Type)> {
272 let field_types = db.field_types(variant);
273
274 missing_fields
275 .into_iter()
276 .map(|local_id| {
277 let field = StructFieldId { parent: variant, local_id };
278 let ty = field_types[local_id].clone().subst(substs);
279 (field.into(), Type::new_with_resolver_inner(db, krate, &self.resolver, ty))
280 })
281 .collect()
282 }
283
234 pub(crate) fn expand( 284 pub(crate) fn expand(
235 &self, 285 &self,
236 db: &dyn HirDatabase, 286 db: &dyn HirDatabase,
diff --git a/crates/ra_hir_ty/src/_match.rs b/crates/ra_hir_ty/src/_match.rs
index f29a25505..9e9a9d047 100644
--- a/crates/ra_hir_ty/src/_match.rs
+++ b/crates/ra_hir_ty/src/_match.rs
@@ -235,7 +235,10 @@ impl From<PatId> for PatIdOrWild {
235} 235}
236 236
237#[derive(Debug, Clone, Copy, PartialEq)] 237#[derive(Debug, Clone, Copy, PartialEq)]
238pub struct MatchCheckNotImplemented; 238pub enum MatchCheckErr {
239 NotImplemented,
240 MalformedMatchArm,
241}
239 242
240/// The return type of `is_useful` is either an indication of usefulness 243/// The return type of `is_useful` is either an indication of usefulness
241/// of the match arm, or an error in the case the match statement 244/// of the match arm, or an error in the case the match statement
@@ -244,7 +247,7 @@ pub struct MatchCheckNotImplemented;
244/// 247///
245/// The `std::result::Result` type is used here rather than a custom enum 248/// The `std::result::Result` type is used here rather than a custom enum
246/// to allow the use of `?`. 249/// to allow the use of `?`.
247pub type MatchCheckResult<T> = Result<T, MatchCheckNotImplemented>; 250pub type MatchCheckResult<T> = Result<T, MatchCheckErr>;
248 251
249#[derive(Debug)] 252#[derive(Debug)]
250/// A row in a Matrix. 253/// A row in a Matrix.
@@ -335,12 +338,12 @@ impl PatStack {
335 Expr::Literal(Literal::Bool(_)) => None, 338 Expr::Literal(Literal::Bool(_)) => None,
336 // perhaps this is actually unreachable given we have 339 // perhaps this is actually unreachable given we have
337 // already checked that these match arms have the appropriate type? 340 // already checked that these match arms have the appropriate type?
338 _ => return Err(MatchCheckNotImplemented), 341 _ => return Err(MatchCheckErr::NotImplemented),
339 } 342 }
340 } 343 }
341 (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), 344 (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?),
342 (Pat::Path(_), Constructor::Enum(constructor)) => { 345 (Pat::Path(_), Constructor::Enum(constructor)) => {
343 // enums with no associated data become `Pat::Path` 346 // unit enum variants become `Pat::Path`
344 let pat_id = self.head().as_id().expect("we know this isn't a wild"); 347 let pat_id = self.head().as_id().expect("we know this isn't a wild");
345 if !enum_variant_matches(cx, pat_id, *constructor) { 348 if !enum_variant_matches(cx, pat_id, *constructor) {
346 None 349 None
@@ -348,16 +351,23 @@ impl PatStack {
348 Some(self.to_tail()) 351 Some(self.to_tail())
349 } 352 }
350 } 353 }
351 (Pat::TupleStruct { args: ref pat_ids, .. }, Constructor::Enum(constructor)) => { 354 (Pat::TupleStruct { args: ref pat_ids, .. }, Constructor::Enum(enum_constructor)) => {
352 let pat_id = self.head().as_id().expect("we know this isn't a wild"); 355 let pat_id = self.head().as_id().expect("we know this isn't a wild");
353 if !enum_variant_matches(cx, pat_id, *constructor) { 356 if !enum_variant_matches(cx, pat_id, *enum_constructor) {
354 None 357 None
355 } else { 358 } else {
359 // If the enum variant matches, then we need to confirm
360 // that the number of patterns aligns with the expected
361 // number of patterns for that enum variant.
362 if pat_ids.len() != constructor.arity(cx)? {
363 return Err(MatchCheckErr::MalformedMatchArm);
364 }
365
356 Some(self.replace_head_with(pat_ids)) 366 Some(self.replace_head_with(pat_ids))
357 } 367 }
358 } 368 }
359 (Pat::Or(_), _) => return Err(MatchCheckNotImplemented), 369 (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented),
360 (_, _) => return Err(MatchCheckNotImplemented), 370 (_, _) => return Err(MatchCheckErr::NotImplemented),
361 }; 371 };
362 372
363 Ok(result) 373 Ok(result)
@@ -514,7 +524,7 @@ pub(crate) fn is_useful(
514 return if any_useful { 524 return if any_useful {
515 Ok(Usefulness::Useful) 525 Ok(Usefulness::Useful)
516 } else if found_unimplemented { 526 } else if found_unimplemented {
517 Err(MatchCheckNotImplemented) 527 Err(MatchCheckErr::NotImplemented)
518 } else { 528 } else {
519 Ok(Usefulness::NotUseful) 529 Ok(Usefulness::NotUseful)
520 }; 530 };
@@ -567,7 +577,7 @@ pub(crate) fn is_useful(
567 } 577 }
568 578
569 if found_unimplemented { 579 if found_unimplemented {
570 Err(MatchCheckNotImplemented) 580 Err(MatchCheckErr::NotImplemented)
571 } else { 581 } else {
572 Ok(Usefulness::NotUseful) 582 Ok(Usefulness::NotUseful)
573 } 583 }
@@ -604,7 +614,7 @@ impl Constructor {
604 match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { 614 match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() {
605 VariantData::Tuple(struct_field_data) => struct_field_data.len(), 615 VariantData::Tuple(struct_field_data) => struct_field_data.len(),
606 VariantData::Unit => 0, 616 VariantData::Unit => 0,
607 _ => return Err(MatchCheckNotImplemented), 617 _ => return Err(MatchCheckErr::NotImplemented),
608 } 618 }
609 } 619 }
610 }; 620 };
@@ -637,20 +647,20 @@ fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Opt
637 Pat::Tuple(pats) => Some(Constructor::Tuple { arity: pats.len() }), 647 Pat::Tuple(pats) => Some(Constructor::Tuple { arity: pats.len() }),
638 Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { 648 Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] {
639 Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), 649 Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)),
640 _ => return Err(MatchCheckNotImplemented), 650 _ => return Err(MatchCheckErr::NotImplemented),
641 }, 651 },
642 Pat::TupleStruct { .. } | Pat::Path(_) => { 652 Pat::TupleStruct { .. } | Pat::Path(_) => {
643 let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); 653 let pat_id = pat.as_id().expect("we already know this pattern is not a wild");
644 let variant_id = 654 let variant_id =
645 cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckNotImplemented)?; 655 cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::NotImplemented)?;
646 match variant_id { 656 match variant_id {
647 VariantId::EnumVariantId(enum_variant_id) => { 657 VariantId::EnumVariantId(enum_variant_id) => {
648 Some(Constructor::Enum(enum_variant_id)) 658 Some(Constructor::Enum(enum_variant_id))
649 } 659 }
650 _ => return Err(MatchCheckNotImplemented), 660 _ => return Err(MatchCheckErr::NotImplemented),
651 } 661 }
652 } 662 }
653 _ => return Err(MatchCheckNotImplemented), 663 _ => return Err(MatchCheckErr::NotImplemented),
654 }; 664 };
655 665
656 Ok(res) 666 Ok(res)
@@ -1325,6 +1335,40 @@ mod tests {
1325 } 1335 }
1326 1336
1327 #[test] 1337 #[test]
1338 fn malformed_match_arm_tuple_missing_pattern() {
1339 let content = r"
1340 fn test_fn() {
1341 match (0) {
1342 () => (),
1343 }
1344 }
1345 ";
1346
1347 // Match arms with the incorrect type are filtered out.
1348 check_diagnostic(content);
1349 }
1350
1351 #[test]
1352 fn malformed_match_arm_tuple_enum_missing_pattern() {
1353 let content = r"
1354 enum Either {
1355 A,
1356 B(u32),
1357 }
1358 fn test_fn() {
1359 match Either::A {
1360 Either::A => (),
1361 Either::B() => (),
1362 }
1363 }
1364 ";
1365
1366 // We are testing to be sure we don't panic here when the match
1367 // arm `Either::B` is missing its pattern.
1368 check_no_diagnostic(content);
1369 }
1370
1371 #[test]
1328 fn enum_not_in_scope() { 1372 fn enum_not_in_scope() {
1329 let content = r" 1373 let content = r"
1330 fn test_fn() { 1374 fn test_fn() {
diff --git a/crates/ra_hir_ty/src/expr.rs b/crates/ra_hir_ty/src/expr.rs
index fb779cbef..e45e9ea14 100644
--- a/crates/ra_hir_ty/src/expr.rs
+++ b/crates/ra_hir_ty/src/expr.rs
@@ -2,12 +2,8 @@
2 2
3use std::sync::Arc; 3use std::sync::Arc;
4 4
5use hir_def::{ 5use hir_def::{path::path, resolver::HasResolver, AdtId, FunctionId};
6 path::{path, Path}, 6use hir_expand::diagnostics::DiagnosticSink;
7 resolver::HasResolver,
8 AdtId, FunctionId,
9};
10use hir_expand::{diagnostics::DiagnosticSink, name::Name};
11use ra_syntax::{ast, AstPtr}; 7use ra_syntax::{ast, AstPtr};
12use rustc_hash::FxHashSet; 8use rustc_hash::FxHashSet;
13 9
@@ -28,7 +24,7 @@ pub use hir_def::{
28 ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp, 24 ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
29 MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp, 25 MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
30 }, 26 },
31 VariantId, 27 LocalStructFieldId, VariantId,
32}; 28};
33 29
34pub struct ExprValidator<'a, 'b: 'a> { 30pub struct ExprValidator<'a, 'b: 'a> {
@@ -49,14 +45,37 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
49 pub fn validate_body(&mut self, db: &dyn HirDatabase) { 45 pub fn validate_body(&mut self, db: &dyn HirDatabase) {
50 let body = db.body(self.func.into()); 46 let body = db.body(self.func.into());
51 47
52 for e in body.exprs.iter() { 48 for (id, expr) in body.exprs.iter() {
53 if let (id, Expr::RecordLit { path, fields, spread }) = e { 49 if let Some((variant_def, missed_fields, true)) =
54 self.validate_record_literal(id, path, fields, *spread, db); 50 record_literal_missing_fields(db, &self.infer, id, expr)
55 } else if let (id, Expr::Match { expr, arms }) = e { 51 {
52 // XXX: only look at source_map if we do have missing fields
53 let (_, source_map) = db.body_with_source_map(self.func.into());
54
55 if let Ok(source_ptr) = source_map.expr_syntax(id) {
56 if let Some(expr) = source_ptr.value.left() {
57 let root = source_ptr.file_syntax(db.upcast());
58 if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
59 if let Some(field_list) = record_lit.record_field_list() {
60 let variant_data = variant_data(db.upcast(), variant_def);
61 let missed_fields = missed_fields
62 .into_iter()
63 .map(|idx| variant_data.fields()[idx].name.clone())
64 .collect();
65 self.sink.push(MissingFields {
66 file: source_ptr.file_id,
67 field_list: AstPtr::new(&field_list),
68 missed_fields,
69 })
70 }
71 }
72 }
73 }
74 }
75 if let Expr::Match { expr, arms } = expr {
56 self.validate_match(id, *expr, arms, db, self.infer.clone()); 76 self.validate_match(id, *expr, arms, db, self.infer.clone());
57 } 77 }
58 } 78 }
59
60 let body_expr = &body[body.body_expr]; 79 let body_expr = &body[body.body_expr];
61 if let Expr::Block { tail: Some(t), .. } = body_expr { 80 if let Expr::Block { tail: Some(t), .. } = body_expr {
62 self.validate_results_in_tail_expr(body.body_expr, *t, db); 81 self.validate_results_in_tail_expr(body.body_expr, *t, db);
@@ -145,61 +164,6 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
145 } 164 }
146 } 165 }
147 166
148 fn validate_record_literal(
149 &mut self,
150 id: ExprId,
151 _path: &Option<Path>,
152 fields: &[RecordLitField],
153 spread: Option<ExprId>,
154 db: &dyn HirDatabase,
155 ) {
156 if spread.is_some() {
157 return;
158 };
159 let variant_def: VariantId = match self.infer.variant_resolution_for_expr(id) {
160 Some(VariantId::UnionId(_)) | None => return,
161 Some(it) => it,
162 };
163 if let VariantId::UnionId(_) = variant_def {
164 return;
165 }
166
167 let variant_data = variant_data(db.upcast(), variant_def);
168
169 let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
170 let missed_fields: Vec<Name> = variant_data
171 .fields()
172 .iter()
173 .filter_map(|(_f, d)| {
174 let name = d.name.clone();
175 if lit_fields.contains(&name) {
176 None
177 } else {
178 Some(name)
179 }
180 })
181 .collect();
182 if missed_fields.is_empty() {
183 return;
184 }
185 let (_, source_map) = db.body_with_source_map(self.func.into());
186
187 if let Ok(source_ptr) = source_map.expr_syntax(id) {
188 if let Some(expr) = source_ptr.value.left() {
189 let root = source_ptr.file_syntax(db.upcast());
190 if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
191 if let Some(field_list) = record_lit.record_field_list() {
192 self.sink.push(MissingFields {
193 file: source_ptr.file_id,
194 field_list: AstPtr::new(&field_list),
195 missed_fields,
196 })
197 }
198 }
199 }
200 }
201 }
202
203 fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) { 167 fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
204 // the mismatch will be on the whole block currently 168 // the mismatch will be on the whole block currently
205 let mismatch = match self.infer.type_mismatch_for_expr(body_id) { 169 let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
@@ -232,3 +196,63 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
232 } 196 }
233 } 197 }
234} 198}
199
200pub fn record_literal_missing_fields(
201 db: &dyn HirDatabase,
202 infer: &InferenceResult,
203 id: ExprId,
204 expr: &Expr,
205) -> Option<(VariantId, Vec<LocalStructFieldId>, /*exhaustive*/ bool)> {
206 let (fields, exhausitve) = match expr {
207 Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
208 _ => return None,
209 };
210
211 let variant_def = infer.variant_resolution_for_expr(id)?;
212 if let VariantId::UnionId(_) = variant_def {
213 return None;
214 }
215
216 let variant_data = variant_data(db.upcast(), variant_def);
217
218 let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
219 let missed_fields: Vec<LocalStructFieldId> = variant_data
220 .fields()
221 .iter()
222 .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
223 .collect();
224 if missed_fields.is_empty() {
225 return None;
226 }
227 Some((variant_def, missed_fields, exhausitve))
228}
229
230pub fn record_pattern_missing_fields(
231 db: &dyn HirDatabase,
232 infer: &InferenceResult,
233 id: PatId,
234 pat: &Pat,
235) -> Option<(VariantId, Vec<LocalStructFieldId>)> {
236 let fields = match pat {
237 Pat::Record { path: _, args } => args,
238 _ => return None,
239 };
240
241 let variant_def = infer.variant_resolution_for_pat(id)?;
242 if let VariantId::UnionId(_) = variant_def {
243 return None;
244 }
245
246 let variant_data = variant_data(db.upcast(), variant_def);
247
248 let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
249 let missed_fields: Vec<LocalStructFieldId> = variant_data
250 .fields()
251 .iter()
252 .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
253 .collect();
254 if missed_fields.is_empty() {
255 return None;
256 }
257 Some((variant_def, missed_fields))
258}
diff --git a/crates/ra_hir_ty/src/tests.rs b/crates/ra_hir_ty/src/tests.rs
index 060814e53..608408d88 100644
--- a/crates/ra_hir_ty/src/tests.rs
+++ b/crates/ra_hir_ty/src/tests.rs
@@ -336,7 +336,7 @@ fn no_such_field_with_feature_flag_diagnostics() {
336 pub(crate) fn new(my_val: usize, bar: bool) -> Self { 336 pub(crate) fn new(my_val: usize, bar: bool) -> Self {
337 Self { my_val, bar } 337 Self { my_val, bar }
338 } 338 }
339 339
340 #[cfg(not(feature = "foo"))] 340 #[cfg(not(feature = "foo"))]
341 pub(crate) fn new(my_val: usize, _bar: bool) -> Self { 341 pub(crate) fn new(my_val: usize, _bar: bool) -> Self {
342 Self { my_val } 342 Self { my_val }
diff --git a/crates/ra_ide/src/completion/complete_record.rs b/crates/ra_ide/src/completion/complete_record.rs
index 79f5c8c8f..f46bcee5c 100644
--- a/crates/ra_ide/src/completion/complete_record.rs
+++ b/crates/ra_ide/src/completion/complete_record.rs
@@ -1,65 +1,24 @@
1//! Complete fields in record literals and patterns. 1//! Complete fields in record literals and patterns.
2use ra_syntax::{ast, ast::NameOwner, SmolStr};
3
4use crate::completion::{CompletionContext, Completions}; 2use crate::completion::{CompletionContext, Completions};
5 3
6pub(super) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> { 4pub(super) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> {
7 let (ty, variant, already_present_fields) = 5 let missing_fields = match (ctx.record_lit_pat.as_ref(), ctx.record_lit_syntax.as_ref()) {
8 match (ctx.record_lit_pat.as_ref(), ctx.record_lit_syntax.as_ref()) { 6 (None, None) => return None,
9 (None, None) => return None, 7 (Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"),
10 (Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"), 8 (Some(record_pat), _) => ctx.sema.record_pattern_missing_fields(record_pat),
11 (Some(record_pat), _) => ( 9 (_, Some(record_lit)) => ctx.sema.record_literal_missing_fields(record_lit),
12 ctx.sema.type_of_pat(&record_pat.clone().into())?, 10 };
13 ctx.sema.resolve_record_pattern(record_pat)?,
14 pattern_ascribed_fields(record_pat),
15 ),
16 (_, Some(record_lit)) => (
17 ctx.sema.type_of_expr(&record_lit.clone().into())?,
18 ctx.sema.resolve_record_literal(record_lit)?,
19 literal_ascribed_fields(record_lit),
20 ),
21 };
22 11
23 for (field, field_ty) in ty.variant_fields(ctx.db, variant).into_iter().filter(|(field, _)| { 12 for (field, ty) in missing_fields {
24 // FIXME: already_present_names better be `Vec<hir::Name>` 13 acc.add_field(ctx, field, &ty)
25 !already_present_fields.contains(&SmolStr::from(field.name(ctx.db).to_string()))
26 }) {
27 acc.add_field(ctx, field, &field_ty);
28 } 14 }
29 Some(())
30}
31
32fn literal_ascribed_fields(record_lit: &ast::RecordLit) -> Vec<SmolStr> {
33 record_lit
34 .record_field_list()
35 .map(|field_list| field_list.fields())
36 .map(|fields| {
37 fields
38 .into_iter()
39 .filter_map(|field| field.name_ref())
40 .map(|name_ref| name_ref.text().clone())
41 .collect()
42 })
43 .unwrap_or_default()
44}
45 15
46fn pattern_ascribed_fields(record_pat: &ast::RecordPat) -> Vec<SmolStr> { 16 Some(())
47 record_pat
48 .record_field_pat_list()
49 .map(|pat_list| {
50 pat_list
51 .record_field_pats()
52 .filter_map(|fild_pat| fild_pat.name())
53 .chain(pat_list.bind_pats().filter_map(|bind_pat| bind_pat.name()))
54 .map(|name| name.text().clone())
55 .collect()
56 })
57 .unwrap_or_default()
58} 17}
59 18
60#[cfg(test)] 19#[cfg(test)]
61mod tests { 20mod tests {
62 mod record_lit_tests { 21 mod record_pat_tests {
63 use insta::assert_debug_snapshot; 22 use insta::assert_debug_snapshot;
64 23
65 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind}; 24 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
@@ -205,7 +164,7 @@ mod tests {
205 } 164 }
206 } 165 }
207 166
208 mod record_pat_tests { 167 mod record_lit_tests {
209 use insta::assert_debug_snapshot; 168 use insta::assert_debug_snapshot;
210 169
211 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind}; 170 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
@@ -410,5 +369,38 @@ mod tests {
410 ] 369 ]
411 "###); 370 "###);
412 } 371 }
372
373 #[test]
374 fn completes_functional_update() {
375 let completions = complete(
376 r"
377 struct S {
378 foo1: u32,
379 foo2: u32,
380 }
381
382 fn main() {
383 let foo1 = 1;
384 let s = S {
385 foo1,
386 <|>
387 .. loop {}
388 }
389 }
390 ",
391 );
392 assert_debug_snapshot!(completions, @r###"
393 [
394 CompletionItem {
395 label: "foo2",
396 source_range: [221; 221),
397 delete: [221; 221),
398 insert: "foo2",
399 kind: Field,
400 detail: "u32",
401 },
402 ]
403 "###);
404 }
413 } 405 }
414} 406}
diff --git a/crates/ra_ide/src/snapshots/highlight_injection.html b/crates/ra_ide/src/snapshots/highlight_injection.html
new file mode 100644
index 000000000..6ec13bd80
--- /dev/null
+++ b/crates/ra_ide/src/snapshots/highlight_injection.html
@@ -0,0 +1,39 @@
1
2<style>
3body { margin: 0; }
4pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
5
6.lifetime { color: #DFAF8F; font-style: italic; }
7.comment { color: #7F9F7F; }
8.struct, .enum { color: #7CB8BB; }
9.enum_variant { color: #BDE0F3; }
10.string_literal { color: #CC9393; }
11.field { color: #94BFF3; }
12.function { color: #93E0E3; }
13.parameter { color: #94BFF3; }
14.text { color: #DCDCCC; }
15.type { color: #7CB8BB; }
16.builtin_type { color: #8CD0D3; }
17.type_param { color: #DFAF8F; }
18.attribute { color: #94BFF3; }
19.numeric_literal { color: #BFEBBF; }
20.macro { color: #94BFF3; }
21.module { color: #AFD8AF; }
22.variable { color: #DCDCCC; }
23.mutable { text-decoration: underline; }
24
25.keyword { color: #F0DFAF; font-weight: bold; }
26.keyword.unsafe { color: #BC8383; font-weight: bold; }
27.control { font-style: italic; }
28</style>
29<pre><code><span class="keyword">fn</span> <span class="function declaration">fixture</span>(<span class="variable declaration">ra_fixture</span>: &<span class="builtin_type">str</span>) {}
30
31<span class="keyword">fn</span> <span class="function declaration">main</span>() {
32 <span class="function">fixture</span>(<span class="string_literal">r#"</span>
33 <span class="keyword">trait</span> <span class="trait declaration">Foo</span> {
34 <span class="keyword">fn</span> <span class="function declaration">foo</span>() {
35 <span class="macro">println!</span>(<span class="string_literal">"2 + 2 = {}"</span>, <span class="numeric_literal">4</span>);
36 }
37 }<span class="string_literal">"#</span>
38 );
39}</code></pre> \ No newline at end of file
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html
index 495b07f69..214dcbb62 100644
--- a/crates/ra_ide/src/snapshots/highlighting.html
+++ b/crates/ra_ide/src/snapshots/highlighting.html
@@ -26,7 +26,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
26.keyword.unsafe { color: #BC8383; font-weight: bold; } 26.keyword.unsafe { color: #BC8383; font-weight: bold; }
27.control { font-style: italic; } 27.control { font-style: italic; }
28</style> 28</style>
29<pre><code><span class="attribute">#</span><span class="attribute">[</span><span class="attribute">derive</span><span class="attribute">(</span><span class="attribute">Clone</span><span class="attribute">,</span><span class="attribute"> </span><span class="attribute">Debug</span><span class="attribute">)</span><span class="attribute">]</span> 29<pre><code><span class="attribute">#[derive(Clone, Debug)]</span>
30<span class="keyword">struct</span> <span class="struct declaration">Foo</span> { 30<span class="keyword">struct</span> <span class="struct declaration">Foo</span> {
31 <span class="keyword">pub</span> <span class="field declaration">x</span>: <span class="builtin_type">i32</span>, 31 <span class="keyword">pub</span> <span class="field declaration">x</span>: <span class="builtin_type">i32</span>,
32 <span class="keyword">pub</span> <span class="field declaration">y</span>: <span class="builtin_type">i32</span>, 32 <span class="keyword">pub</span> <span class="field declaration">y</span>: <span class="builtin_type">i32</span>,
@@ -36,11 +36,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
36 <span class="function">foo</span>::&lt;<span class="lifetime">'a</span>, <span class="builtin_type">i32</span>&gt;() 36 <span class="function">foo</span>::&lt;<span class="lifetime">'a</span>, <span class="builtin_type">i32</span>&gt;()
37} 37}
38 38
39<span class="macro">macro_rules</span><span class="macro">!</span> def_fn { 39<span class="macro">macro_rules!</span> def_fn {
40 ($($tt:tt)*) =&gt; {$($tt)*} 40 ($($tt:tt)*) =&gt; {$($tt)*}
41} 41}
42 42
43<span class="macro">def_fn</span><span class="macro">!</span> { 43<span class="macro">def_fn!</span> {
44 <span class="keyword">fn</span> <span class="function declaration">bar</span>() -&gt; <span class="builtin_type">u32</span> { 44 <span class="keyword">fn</span> <span class="function declaration">bar</span>() -&gt; <span class="builtin_type">u32</span> {
45 <span class="numeric_literal">100</span> 45 <span class="numeric_literal">100</span>
46 } 46 }
@@ -48,7 +48,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
48 48
49<span class="comment">// comment</span> 49<span class="comment">// comment</span>
50<span class="keyword">fn</span> <span class="function declaration">main</span>() { 50<span class="keyword">fn</span> <span class="function declaration">main</span>() {
51 <span class="macro">println</span><span class="macro">!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>); 51 <span class="macro">println!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>);
52 52
53 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = Vec::new(); 53 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = Vec::new();
54 <span class="keyword control">if</span> <span class="keyword">true</span> { 54 <span class="keyword control">if</span> <span class="keyword">true</span> {
@@ -73,7 +73,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
73<span class="keyword">impl</span>&lt;<span class="type_param declaration">T</span>&gt; <span class="enum">Option</span>&lt;<span class="type_param">T</span>&gt; { 73<span class="keyword">impl</span>&lt;<span class="type_param declaration">T</span>&gt; <span class="enum">Option</span>&lt;<span class="type_param">T</span>&gt; {
74 <span class="keyword">fn</span> <span class="function declaration">and</span>&lt;<span class="type_param declaration">U</span>&gt;(<span class="keyword">self</span>, <span class="variable declaration">other</span>: <span class="enum">Option</span>&lt;<span class="type_param">U</span>&gt;) -&gt; <span class="enum">Option</span>&lt;(<span class="type_param">T</span>, <span class="type_param">U</span>)&gt; { 74 <span class="keyword">fn</span> <span class="function declaration">and</span>&lt;<span class="type_param declaration">U</span>&gt;(<span class="keyword">self</span>, <span class="variable declaration">other</span>: <span class="enum">Option</span>&lt;<span class="type_param">U</span>&gt;) -&gt; <span class="enum">Option</span>&lt;(<span class="type_param">T</span>, <span class="type_param">U</span>)&gt; {
75 <span class="keyword control">match</span> <span class="variable">other</span> { 75 <span class="keyword control">match</span> <span class="variable">other</span> {
76 <span class="enum_variant">None</span> =&gt; <span class="macro">unimplemented</span><span class="macro">!</span>(), 76 <span class="enum_variant">None</span> =&gt; <span class="macro">unimplemented!</span>(),
77 <span class="variable declaration">Nope</span> =&gt; <span class="variable">Nope</span>, 77 <span class="variable declaration">Nope</span> =&gt; <span class="variable">Nope</span>,
78 } 78 }
79 } 79 }
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs
index d833a816b..83d161f45 100644
--- a/crates/ra_ide/src/syntax_highlighting.rs
+++ b/crates/ra_ide/src/syntax_highlighting.rs
@@ -24,7 +24,7 @@ use crate::{call_info::call_info_for_token, Analysis, FileId};
24pub(crate) use html::highlight_as_html; 24pub(crate) use html::highlight_as_html;
25pub use tags::{Highlight, HighlightModifier, HighlightModifiers, HighlightTag}; 25pub use tags::{Highlight, HighlightModifier, HighlightModifiers, HighlightTag};
26 26
27#[derive(Debug)] 27#[derive(Debug, Clone)]
28pub struct HighlightedRange { 28pub struct HighlightedRange {
29 pub range: TextRange, 29 pub range: TextRange,
30 pub highlight: Highlight, 30 pub highlight: Highlight,
@@ -55,13 +55,55 @@ pub(crate) fn highlight(
55 }; 55 };
56 56
57 let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); 57 let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
58 let mut res = Vec::new(); 58 // We use a stack for the DFS traversal below.
59 // When we leave a node, the we use it to flatten the highlighted ranges.
60 let mut res: Vec<Vec<HighlightedRange>> = vec![Vec::new()];
59 61
60 let mut current_macro_call: Option<ast::MacroCall> = None; 62 let mut current_macro_call: Option<ast::MacroCall> = None;
61 63
62 // Walk all nodes, keeping track of whether we are inside a macro or not. 64 // Walk all nodes, keeping track of whether we are inside a macro or not.
63 // If in macro, expand it first and highlight the expanded code. 65 // If in macro, expand it first and highlight the expanded code.
64 for event in root.preorder_with_tokens() { 66 for event in root.preorder_with_tokens() {
67 match &event {
68 WalkEvent::Enter(_) => res.push(Vec::new()),
69 WalkEvent::Leave(_) => {
70 /* Flattens the highlighted ranges.
71 *
72 * For example `#[cfg(feature = "foo")]` contains the nested ranges:
73 * 1) parent-range: Attribute [0, 23)
74 * 2) child-range: String [16, 21)
75 *
76 * The following code implements the flattening, for our example this results to:
77 * `[Attribute [0, 16), String [16, 21), Attribute [21, 23)]`
78 */
79 let children = res.pop().unwrap();
80 let prev = res.last_mut().unwrap();
81 let needs_flattening = !children.is_empty()
82 && !prev.is_empty()
83 && children.first().unwrap().range.is_subrange(&prev.last().unwrap().range);
84 if !needs_flattening {
85 prev.extend(children);
86 } else {
87 let mut parent = prev.pop().unwrap();
88 for ele in children {
89 assert!(ele.range.is_subrange(&parent.range));
90 let mut cloned = parent.clone();
91 parent.range = TextRange::from_to(parent.range.start(), ele.range.start());
92 cloned.range = TextRange::from_to(ele.range.end(), cloned.range.end());
93 if !parent.range.is_empty() {
94 prev.push(parent);
95 }
96 prev.push(ele);
97 parent = cloned;
98 }
99 if !parent.range.is_empty() {
100 prev.push(parent);
101 }
102 }
103 }
104 };
105 let current = res.last_mut().expect("during DFS traversal, the stack must not be empty");
106
65 let event_range = match &event { 107 let event_range = match &event {
66 WalkEvent::Enter(it) => it.text_range(), 108 WalkEvent::Enter(it) => it.text_range(),
67 WalkEvent::Leave(it) => it.text_range(), 109 WalkEvent::Leave(it) => it.text_range(),
@@ -77,7 +119,7 @@ pub(crate) fn highlight(
77 WalkEvent::Enter(Some(mc)) => { 119 WalkEvent::Enter(Some(mc)) => {
78 current_macro_call = Some(mc.clone()); 120 current_macro_call = Some(mc.clone());
79 if let Some(range) = macro_call_range(&mc) { 121 if let Some(range) = macro_call_range(&mc) {
80 res.push(HighlightedRange { 122 current.push(HighlightedRange {
81 range, 123 range,
82 highlight: HighlightTag::Macro.into(), 124 highlight: HighlightTag::Macro.into(),
83 binding_hash: None, 125 binding_hash: None,
@@ -119,7 +161,7 @@ pub(crate) fn highlight(
119 161
120 if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) { 162 if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) {
121 let expanded = element_to_highlight.as_token().unwrap().clone(); 163 let expanded = element_to_highlight.as_token().unwrap().clone();
122 if highlight_injection(&mut res, &sema, token, expanded).is_some() { 164 if highlight_injection(current, &sema, token, expanded).is_some() {
123 continue; 165 continue;
124 } 166 }
125 } 167 }
@@ -127,10 +169,17 @@ pub(crate) fn highlight(
127 if let Some((highlight, binding_hash)) = 169 if let Some((highlight, binding_hash)) =
128 highlight_element(&sema, &mut bindings_shadow_count, element_to_highlight) 170 highlight_element(&sema, &mut bindings_shadow_count, element_to_highlight)
129 { 171 {
130 res.push(HighlightedRange { range, highlight, binding_hash }); 172 current.push(HighlightedRange { range, highlight, binding_hash });
131 } 173 }
132 } 174 }
133 175
176 assert_eq!(res.len(), 1, "after DFS traversal, the stack should only contain a single element");
177 let res = res.pop().unwrap();
178 // Check that ranges are sorted and disjoint
179 assert!(res
180 .iter()
181 .zip(res.iter().skip(1))
182 .all(|(left, right)| left.range.end() <= right.range.start()));
134 res 183 res
135} 184}
136 185
diff --git a/crates/ra_ide/src/syntax_highlighting/html.rs b/crates/ra_ide/src/syntax_highlighting/html.rs
index e13766c9d..4496529a1 100644
--- a/crates/ra_ide/src/syntax_highlighting/html.rs
+++ b/crates/ra_ide/src/syntax_highlighting/html.rs
@@ -1,9 +1,9 @@
1//! Renders a bit of code as HTML. 1//! Renders a bit of code as HTML.
2 2
3use ra_db::SourceDatabase; 3use ra_db::SourceDatabase;
4use ra_syntax::AstNode; 4use ra_syntax::{AstNode, TextUnit};
5 5
6use crate::{FileId, HighlightedRange, RootDatabase}; 6use crate::{FileId, RootDatabase};
7 7
8use super::highlight; 8use super::highlight;
9 9
@@ -21,51 +21,35 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
21 ) 21 )
22 } 22 }
23 23
24 let mut ranges = highlight(db, file_id, None); 24 let ranges = highlight(db, file_id, None);
25 ranges.sort_by_key(|it| it.range.start()); 25 let text = parse.tree().syntax().to_string();
26 // quick non-optimal heuristic to intersect token ranges and highlighted ranges 26 let mut prev_pos = TextUnit::from(0);
27 let mut frontier = 0;
28 let mut could_intersect: Vec<&HighlightedRange> = Vec::new();
29
30 let mut buf = String::new(); 27 let mut buf = String::new();
31 buf.push_str(&STYLE); 28 buf.push_str(&STYLE);
32 buf.push_str("<pre><code>"); 29 buf.push_str("<pre><code>");
33 let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.into_token()); 30 for range in &ranges {
34 for token in tokens { 31 if range.range.start() > prev_pos {
35 could_intersect.retain(|it| token.text_range().start() <= it.range.end()); 32 let curr = &text[prev_pos.to_usize()..range.range.start().to_usize()];
36 while let Some(r) = ranges.get(frontier) { 33 let text = html_escape(curr);
37 if r.range.start() <= token.text_range().end() {
38 could_intersect.push(r);
39 frontier += 1;
40 } else {
41 break;
42 }
43 }
44 let text = html_escape(&token.text());
45 let ranges = could_intersect
46 .iter()
47 .filter(|it| token.text_range().is_subrange(&it.range))
48 .collect::<Vec<_>>();
49 if ranges.is_empty() {
50 buf.push_str(&text); 34 buf.push_str(&text);
51 } else {
52 let classes = ranges
53 .iter()
54 .map(|it| it.highlight.to_string().replace('.', " "))
55 .collect::<Vec<_>>()
56 .join(" ");
57 let binding_hash = ranges.first().and_then(|x| x.binding_hash);
58 let color = match (rainbow, binding_hash) {
59 (true, Some(hash)) => format!(
60 " data-binding-hash=\"{}\" style=\"color: {};\"",
61 hash,
62 rainbowify(hash)
63 ),
64 _ => "".into(),
65 };
66 buf.push_str(&format!("<span class=\"{}\"{}>{}</span>", classes, color, text));
67 } 35 }
36 let curr = &text[range.range.start().to_usize()..range.range.end().to_usize()];
37
38 let class = range.highlight.to_string().replace('.', " ");
39 let color = match (rainbow, range.binding_hash) {
40 (true, Some(hash)) => {
41 format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash))
42 }
43 _ => "".into(),
44 };
45 buf.push_str(&format!("<span class=\"{}\"{}>{}</span>", class, color, html_escape(curr)));
46
47 prev_pos = range.range.end();
68 } 48 }
49 // Add the remaining (non-highlighted) text
50 let curr = &text[prev_pos.to_usize()..];
51 let text = html_escape(curr);
52 buf.push_str(&text);
69 buf.push_str("</code></pre>"); 53 buf.push_str("</code></pre>");
70 buf 54 buf
71} 55}
diff --git a/crates/ra_ide/src/syntax_highlighting/tests.rs b/crates/ra_ide/src/syntax_highlighting/tests.rs
index 98c030791..110887c2a 100644
--- a/crates/ra_ide/src/syntax_highlighting/tests.rs
+++ b/crates/ra_ide/src/syntax_highlighting/tests.rs
@@ -131,3 +131,28 @@ fn test_ranges() {
131 131
132 assert_eq!(&highlights[0].highlight.to_string(), "field.declaration"); 132 assert_eq!(&highlights[0].highlight.to_string(), "field.declaration");
133} 133}
134
135#[test]
136fn test_flattening() {
137 let (analysis, file_id) = single_file(
138 r##"
139fn fixture(ra_fixture: &str) {}
140
141fn main() {
142 fixture(r#"
143 trait Foo {
144 fn foo() {
145 println!("2 + 2 = {}", 4);
146 }
147 }"#
148 );
149}"##
150 .trim(),
151 );
152
153 let dst_file = project_dir().join("crates/ra_ide/src/snapshots/highlight_injection.html");
154 let actual_html = &analysis.highlight_as_html(file_id, false).unwrap();
155 let expected_html = &read_text(&dst_file);
156 fs::write(dst_file, &actual_html).unwrap();
157 assert_eq_text!(expected_html, actual_html);
158}
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index a7fcea0ac..254318e23 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -1615,6 +1615,23 @@ fn test_issue_2520() {
1615} 1615}
1616 1616
1617#[test] 1617#[test]
1618fn test_issue_3861() {
1619 let macro_fixture = parse_macro(
1620 r#"
1621 macro_rules! rgb_color {
1622 ($p:expr, $t: ty) => {
1623 pub fn new() {
1624 let _ = 0 as $t << $p;
1625 }
1626 };
1627 }
1628 "#,
1629 );
1630
1631 macro_fixture.expand_items(r#"rgb_color!(8 + 8, u32);"#);
1632}
1633
1634#[test]
1618fn test_repeat_bad_var() { 1635fn test_repeat_bad_var() {
1619 // FIXME: the second rule of the macro should be removed and an error about 1636 // FIXME: the second rule of the macro should be removed and an error about
1620 // `$( $c )+` raised 1637 // `$( $c )+` raised
diff --git a/crates/ra_parser/src/grammar/types.rs b/crates/ra_parser/src/grammar/types.rs
index 2c00bce80..386969d2d 100644
--- a/crates/ra_parser/src/grammar/types.rs
+++ b/crates/ra_parser/src/grammar/types.rs
@@ -7,7 +7,7 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![
7 DYN_KW, L_ANGLE, 7 DYN_KW, L_ANGLE,
8]); 8]);
9 9
10const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA]; 10const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA, L_DOLLAR];
11 11
12pub(crate) fn type_(p: &mut Parser) { 12pub(crate) fn type_(p: &mut Parser) {
13 type_with_bounds_cond(p, true); 13 type_with_bounds_cond(p, true);
diff --git a/crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs b/crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs
index 9029f8815..55d93917c 100644
--- a/crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs
+++ b/crates/ra_proc_macro_srv/src/proc_macro/diagnostic.rs
@@ -54,12 +54,14 @@ pub struct Diagnostic {
54} 54}
55 55
56macro_rules! diagnostic_child_methods { 56macro_rules! diagnostic_child_methods {
57 ($spanned:ident, $regular:ident, $level:expr) => ( 57 ($spanned:ident, $regular:ident, $level:expr) => {
58 /// Adds a new child diagnostic message to `self` with the level 58 /// Adds a new child diagnostic message to `self` with the level
59 /// identified by this method's name with the given `spans` and 59 /// identified by this method's name with the given `spans` and
60 /// `message`. 60 /// `message`.
61 pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic 61 pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
62 where S: MultiSpan, T: Into<String> 62 where
63 S: MultiSpan,
64 T: Into<String>,
63 { 65 {
64 self.children.push(Diagnostic::spanned(spans, $level, message)); 66 self.children.push(Diagnostic::spanned(spans, $level, message));
65 self 67 self
@@ -71,7 +73,7 @@ macro_rules! diagnostic_child_methods {
71 self.children.push(Diagnostic::new($level, message)); 73 self.children.push(Diagnostic::new($level, message));
72 self 74 self
73 } 75 }
74 ) 76 };
75} 77}
76 78
77/// Iterator over the children diagnostics of a `Diagnostic`. 79/// Iterator over the children diagnostics of a `Diagnostic`.
diff --git a/crates/ra_proc_macro_srv/src/proc_macro/mod.rs b/crates/ra_proc_macro_srv/src/proc_macro/mod.rs
index e35a6ff8b..ee0dc9722 100644
--- a/crates/ra_proc_macro_srv/src/proc_macro/mod.rs
+++ b/crates/ra_proc_macro_srv/src/proc_macro/mod.rs
@@ -169,13 +169,13 @@ pub mod token_stream {
169pub struct Span(bridge::client::Span); 169pub struct Span(bridge::client::Span);
170 170
171macro_rules! diagnostic_method { 171macro_rules! diagnostic_method {
172 ($name:ident, $level:expr) => ( 172 ($name:ident, $level:expr) => {
173 /// Creates a new `Diagnostic` with the given `message` at the span 173 /// Creates a new `Diagnostic` with the given `message` at the span
174 /// `self`. 174 /// `self`.
175 pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic { 175 pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
176 Diagnostic::spanned(self, $level, message) 176 Diagnostic::spanned(self, $level, message)
177 } 177 }
178 ) 178 };
179} 179}
180 180
181impl Span { 181impl Span {
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs
index 191123c8e..8d1098036 100644
--- a/crates/ra_syntax/src/algo.rs
+++ b/crates/ra_syntax/src/algo.rs
@@ -316,7 +316,7 @@ impl<'a> SyntaxRewriter<'a> {
316 } 316 }
317} 317}
318 318
319impl<'a> ops::AddAssign for SyntaxRewriter<'_> { 319impl ops::AddAssign for SyntaxRewriter<'_> {
320 fn add_assign(&mut self, rhs: SyntaxRewriter) { 320 fn add_assign(&mut self, rhs: SyntaxRewriter) {
321 assert!(rhs.f.is_none()); 321 assert!(rhs.f.is_none());
322 self.replacements.extend(rhs.replacements) 322 self.replacements.extend(rhs.replacements)
diff --git a/docs/user/readme.adoc b/docs/user/readme.adoc
index 911163eb7..abd126340 100644
--- a/docs/user/readme.adoc
+++ b/docs/user/readme.adoc
@@ -187,30 +187,7 @@ Prerequisites:
187 187
188`LSP` package. 188`LSP` package.
189 189
190Installation: 190Invoke the command palette (`ctrl+shift+p`) and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
191
1921. Invoke the command palette with <kbd>Ctrl+Shift+P</kbd>
1932. Type `LSP Settings` to open the LSP preferences editor
1943. Add the following LSP client definition to your settings:
195+
196[source,json]
197----
198"rust-analyzer": {
199 "command": ["rust-analyzer"],
200 "languageId": "rust",
201 "scopes": ["source.rust"],
202 "syntaxes": [
203 "Packages/Rust/Rust.sublime-syntax",
204 "Packages/Rust Enhanced/RustEnhanced.sublime-syntax"
205 ],
206 "initializationOptions": {
207 "featureFlags": {
208 }
209 },
210}
211----
212
2134. You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
214 191
215== Usage 192== Usage
216 193
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index d5dec8fc5..eb4f299a1 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -115,25 +115,25 @@
115 "dev": true 115 "dev": true
116 }, 116 },
117 "@typescript-eslint/eslint-plugin": { 117 "@typescript-eslint/eslint-plugin": {
118 "version": "2.26.0", 118 "version": "2.27.0",
119 "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.26.0.tgz", 119 "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.27.0.tgz",
120 "integrity": "sha512-4yUnLv40bzfzsXcTAtZyTjbiGUXMrcIJcIMioI22tSOyAxpdXiZ4r7YQUU8Jj6XXrLz9d5aMHPQf5JFR7h27Nw==", 120 "integrity": "sha512-/my+vVHRN7zYgcp0n4z5A6HAK7bvKGBiswaM5zIlOQczsxj/aiD7RcgD+dvVFuwFaGh5+kM7XA6Q6PN0bvb1tw==",
121 "dev": true, 121 "dev": true,
122 "requires": { 122 "requires": {
123 "@typescript-eslint/experimental-utils": "2.26.0", 123 "@typescript-eslint/experimental-utils": "2.27.0",
124 "functional-red-black-tree": "^1.0.1", 124 "functional-red-black-tree": "^1.0.1",
125 "regexpp": "^3.0.0", 125 "regexpp": "^3.0.0",
126 "tsutils": "^3.17.1" 126 "tsutils": "^3.17.1"
127 } 127 }
128 }, 128 },
129 "@typescript-eslint/experimental-utils": { 129 "@typescript-eslint/experimental-utils": {
130 "version": "2.26.0", 130 "version": "2.27.0",
131 "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-2.26.0.tgz", 131 "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-2.27.0.tgz",
132 "integrity": "sha512-RELVoH5EYd+JlGprEyojUv9HeKcZqF7nZUGSblyAw1FwOGNnmQIU8kxJ69fttQvEwCsX5D6ECJT8GTozxrDKVQ==", 132 "integrity": "sha512-vOsYzjwJlY6E0NJRXPTeCGqjv5OHgRU1kzxHKWJVPjDYGbPgLudBXjIlc+OD1hDBZ4l1DLbOc5VjofKahsu9Jw==",
133 "dev": true, 133 "dev": true,
134 "requires": { 134 "requires": {
135 "@types/json-schema": "^7.0.3", 135 "@types/json-schema": "^7.0.3",
136 "@typescript-eslint/typescript-estree": "2.26.0", 136 "@typescript-eslint/typescript-estree": "2.27.0",
137 "eslint-scope": "^5.0.0", 137 "eslint-scope": "^5.0.0",
138 "eslint-utils": "^2.0.0" 138 "eslint-utils": "^2.0.0"
139 }, 139 },
@@ -150,21 +150,21 @@
150 } 150 }
151 }, 151 },
152 "@typescript-eslint/parser": { 152 "@typescript-eslint/parser": {
153 "version": "2.26.0", 153 "version": "2.27.0",
154 "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-2.26.0.tgz", 154 "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-2.27.0.tgz",
155 "integrity": "sha512-+Xj5fucDtdKEVGSh9353wcnseMRkPpEAOY96EEenN7kJVrLqy/EVwtIh3mxcUz8lsFXW1mT5nN5vvEam/a5HiQ==", 155 "integrity": "sha512-HFUXZY+EdwrJXZo31DW4IS1ujQW3krzlRjBrFRrJcMDh0zCu107/nRfhk/uBasO8m0NVDbBF5WZKcIUMRO7vPg==",
156 "dev": true, 156 "dev": true,
157 "requires": { 157 "requires": {
158 "@types/eslint-visitor-keys": "^1.0.0", 158 "@types/eslint-visitor-keys": "^1.0.0",
159 "@typescript-eslint/experimental-utils": "2.26.0", 159 "@typescript-eslint/experimental-utils": "2.27.0",
160 "@typescript-eslint/typescript-estree": "2.26.0", 160 "@typescript-eslint/typescript-estree": "2.27.0",
161 "eslint-visitor-keys": "^1.1.0" 161 "eslint-visitor-keys": "^1.1.0"
162 } 162 }
163 }, 163 },
164 "@typescript-eslint/typescript-estree": { 164 "@typescript-eslint/typescript-estree": {
165 "version": "2.26.0", 165 "version": "2.27.0",
166 "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.26.0.tgz", 166 "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.27.0.tgz",
167 "integrity": "sha512-3x4SyZCLB4zsKsjuhxDLeVJN6W29VwBnYpCsZ7vIdPel9ZqLfIZJgJXO47MNUkurGpQuIBALdPQKtsSnWpE1Yg==", 167 "integrity": "sha512-t2miCCJIb/FU8yArjAvxllxbTiyNqaXJag7UOpB5DVoM3+xnjeOngtqlJkLRnMtzaRcJhe3CIR9RmL40omubhg==",
168 "dev": true, 168 "dev": true,
169 "requires": { 169 "requires": {
170 "debug": "^4.1.1", 170 "debug": "^4.1.1",
@@ -1367,9 +1367,9 @@
1367 } 1367 }
1368 }, 1368 },
1369 "regexpp": { 1369 "regexpp": {
1370 "version": "3.0.0", 1370 "version": "3.1.0",
1371 "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.0.0.tgz", 1371 "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.1.0.tgz",
1372 "integrity": "sha512-Z+hNr7RAVWxznLPuA7DIh8UNX1j9CDrUQxskw9IrBE1Dxue2lyXT+shqEIeLUjrokxIP8CMy1WkjgG3rTsd5/g==", 1372 "integrity": "sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q==",
1373 "dev": true 1373 "dev": true
1374 }, 1374 },
1375 "resolve": { 1375 "resolve": {
@@ -1407,9 +1407,9 @@
1407 } 1407 }
1408 }, 1408 },
1409 "rollup": { 1409 "rollup": {
1410 "version": "2.3.2", 1410 "version": "2.3.3",
1411 "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.3.2.tgz", 1411 "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.3.3.tgz",
1412 "integrity": "sha512-p66+fbfaUUOGE84sHXAOgfeaYQMslgAazoQMp//nlR519R61213EPFgrMZa48j31jNacJwexSAR1Q8V/BwGKBA==", 1412 "integrity": "sha512-uJ9VNWk80mb4wDCSfd1AyHoSc9TrWbkZtnO6wbsMTp9muSWkT26Dvc99MX1yGCOTvUN1Skw/KpFzKdUDuZKTXA==",
1413 "dev": true, 1413 "dev": true,
1414 "requires": { 1414 "requires": {
1415 "fsevents": "~2.1.2" 1415 "fsevents": "~2.1.2"
diff --git a/editors/code/package.json b/editors/code/package.json
index 8ae8ea414..0bf7b6ae6 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -21,7 +21,7 @@
21 "Programming Languages" 21 "Programming Languages"
22 ], 22 ],
23 "engines": { 23 "engines": {
24 "vscode": "^1.43.0" 24 "vscode": "^1.44.0"
25 }, 25 },
26 "enableProposedApi": true, 26 "enableProposedApi": true,
27 "scripts": { 27 "scripts": {
@@ -42,10 +42,10 @@
42 "@types/node": "^12.12.34", 42 "@types/node": "^12.12.34",
43 "@types/node-fetch": "^2.5.5", 43 "@types/node-fetch": "^2.5.5",
44 "@types/vscode": "^1.43.0", 44 "@types/vscode": "^1.43.0",
45 "@typescript-eslint/eslint-plugin": "^2.26.0", 45 "@typescript-eslint/eslint-plugin": "^2.27.0",
46 "@typescript-eslint/parser": "^2.26.0", 46 "@typescript-eslint/parser": "^2.27.0",
47 "eslint": "^6.8.0", 47 "eslint": "^6.8.0",
48 "rollup": "^2.3.2", 48 "rollup": "^2.3.3",
49 "tslib": "^1.11.1", 49 "tslib": "^1.11.1",
50 "typescript": "^3.8.3", 50 "typescript": "^3.8.3",
51 "typescript-formatter": "^7.2.2", 51 "typescript-formatter": "^7.2.2",
@@ -342,11 +342,6 @@
342 "default": true, 342 "default": true,
343 "description": "Show function name and docs in parameter hints" 343 "description": "Show function name and docs in parameter hints"
344 }, 344 },
345 "rust-analyzer.highlighting.semanticTokens": {
346 "type": "boolean",
347 "default": false,
348 "description": "Use proposed semantic tokens API for syntax highlighting"
349 },
350 "rust-analyzer.updates.channel": { 345 "rust-analyzer.updates.channel": {
351 "type": "string", 346 "type": "string",
352 "enum": [ 347 "enum": [
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index 3b1d00bca..0ad4b63ae 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -1,11 +1,10 @@
1import * as lc from 'vscode-languageclient'; 1import * as lc from 'vscode-languageclient';
2import * as vscode from 'vscode'; 2import * as vscode from 'vscode';
3 3
4import { Config } from './config';
5import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed'; 4import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed';
6import { SemanticTokensFeature, DocumentSemanticsTokensSignature } from 'vscode-languageclient/lib/semanticTokens.proposed'; 5import { SemanticTokensFeature, DocumentSemanticsTokensSignature } from 'vscode-languageclient/lib/semanticTokens.proposed';
7 6
8export async function createClient(config: Config, serverPath: string, cwd: string): Promise<lc.LanguageClient> { 7export async function createClient(serverPath: string, cwd: string): Promise<lc.LanguageClient> {
9 // '.' Is the fallback if no folder is open 8 // '.' Is the fallback if no folder is open
10 // TODO?: Workspace folders support Uri's (eg: file://test.txt). 9 // TODO?: Workspace folders support Uri's (eg: file://test.txt).
11 // It might be a good idea to test if the uri points to a file. 10 // It might be a good idea to test if the uri points to a file.
@@ -73,15 +72,12 @@ export async function createClient(config: Config, serverPath: string, cwd: stri
73 }; 72 };
74 73
75 // To turn on all proposed features use: res.registerProposedFeatures(); 74 // To turn on all proposed features use: res.registerProposedFeatures();
76 // Here we want to just enable CallHierarchyFeature since it is available on stable. 75 // Here we want to enable CallHierarchyFeature and SemanticTokensFeature
77 // Note that while the CallHierarchyFeature is stable the LSP protocol is not. 76 // since they are available on stable.
77 // Note that while these features are stable in vscode their LSP protocol
78 // implementations are still in the "proposed" category for 3.16.
78 res.registerFeature(new CallHierarchyFeature(res)); 79 res.registerFeature(new CallHierarchyFeature(res));
79 80 res.registerFeature(new SemanticTokensFeature(res));
80 if (config.package.enableProposedApi) {
81 if (config.highlightingSemanticTokens) {
82 res.registerFeature(new SemanticTokensFeature(res));
83 }
84 }
85 81
86 return res; 82 return res;
87} 83}
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index 1f45f1de0..21c1c9f23 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -69,7 +69,6 @@ export class Config {
69 get serverPath() { return this.cfg.get<null | string>("serverPath")!; } 69 get serverPath() { return this.cfg.get<null | string>("serverPath")!; }
70 get channel() { return this.cfg.get<UpdatesChannel>("updates.channel")!; } 70 get channel() { return this.cfg.get<UpdatesChannel>("updates.channel")!; }
71 get askBeforeDownload() { return this.cfg.get<boolean>("updates.askBeforeDownload")!; } 71 get askBeforeDownload() { return this.cfg.get<boolean>("updates.askBeforeDownload")!; }
72 get highlightingSemanticTokens() { return this.cfg.get<boolean>("highlighting.semanticTokens")!; }
73 get traceExtension() { return this.cfg.get<boolean>("trace.extension")!; } 72 get traceExtension() { return this.cfg.get<boolean>("trace.extension")!; }
74 73
75 get inlayHints() { 74 get inlayHints() {
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index bd1c3de07..f7ed62d03 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -21,7 +21,7 @@ export class Ctx {
21 serverPath: string, 21 serverPath: string,
22 cwd: string, 22 cwd: string,
23 ): Promise<Ctx> { 23 ): Promise<Ctx> {
24 const client = await createClient(config, serverPath, cwd); 24 const client = await createClient(serverPath, cwd);
25 const res = new Ctx(config, extCtx, client, serverPath); 25 const res = new Ctx(config, extCtx, client, serverPath);
26 res.pushCleanup(client.start()); 26 res.pushCleanup(client.start());
27 await client.onReady(); 27 await client.onReady();
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs
index 3255eefb9..a56eeef8d 100644
--- a/xtask/src/dist.rs
+++ b/xtask/src/dist.rs
@@ -3,24 +3,20 @@ use std::path::PathBuf;
3use anyhow::Result; 3use anyhow::Result;
4 4
5use crate::{ 5use crate::{
6 not_bash::{fs2, pushd, rm_rf, run}, 6 not_bash::{date_iso, fs2, pushd, rm_rf, run},
7 project_root, 7 project_root,
8}; 8};
9 9
10pub struct ClientOpts { 10pub fn run_dist(nightly: bool, client_version: Option<String>) -> Result<()> {
11 pub version: String,
12 pub release_tag: String,
13}
14
15pub fn run_dist(client_opts: Option<ClientOpts>) -> Result<()> {
16 let dist = project_root().join("dist"); 11 let dist = project_root().join("dist");
17 rm_rf(&dist)?; 12 rm_rf(&dist)?;
18 fs2::create_dir_all(&dist)?; 13 fs2::create_dir_all(&dist)?;
19 14
20 if let Some(ClientOpts { version, release_tag }) = client_opts { 15 if let Some(version) = client_version {
16 let release_tag = if nightly { "nightly".to_string() } else { date_iso()? };
21 dist_client(&version, &release_tag)?; 17 dist_client(&version, &release_tag)?;
22 } 18 }
23 dist_server()?; 19 dist_server(nightly)?;
24 Ok(()) 20 Ok(())
25} 21}
26 22
@@ -50,7 +46,7 @@ fn dist_client(version: &str, release_tag: &str) -> Result<()> {
50 Ok(()) 46 Ok(())
51} 47}
52 48
53fn dist_server() -> Result<()> { 49fn dist_server(nightly: bool) -> Result<()> {
54 if cfg!(target_os = "linux") { 50 if cfg!(target_os = "linux") {
55 std::env::set_var("CC", "clang"); 51 std::env::set_var("CC", "clang");
56 run!( 52 run!(
@@ -60,7 +56,9 @@ fn dist_server() -> Result<()> {
60 // We'd want to add, but that requires setting the right linker somehow 56 // We'd want to add, but that requires setting the right linker somehow
61 // --features=jemalloc 57 // --features=jemalloc
62 )?; 58 )?;
63 run!("strip ./target/x86_64-unknown-linux-musl/release/rust-analyzer")?; 59 if !nightly {
60 run!("strip ./target/x86_64-unknown-linux-musl/release/rust-analyzer")?;
61 }
64 } else { 62 } else {
65 run!("cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release")?; 63 run!("cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release")?;
66 } 64 }
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs
index 0b8243f62..9d087daa2 100644
--- a/xtask/src/lib.rs
+++ b/xtask/src/lib.rs
@@ -21,7 +21,7 @@ use walkdir::{DirEntry, WalkDir};
21 21
22use crate::{ 22use crate::{
23 codegen::Mode, 23 codegen::Mode,
24 not_bash::{fs2, pushd, rm_rf, run}, 24 not_bash::{date_iso, fs2, pushd, rm_rf, run},
25}; 25};
26 26
27pub use anyhow::Result; 27pub use anyhow::Result;
@@ -180,7 +180,7 @@ pub fn run_release(dry_run: bool) -> Result<()> {
180 let website_root = project_root().join("../rust-analyzer.github.io"); 180 let website_root = project_root().join("../rust-analyzer.github.io");
181 let changelog_dir = website_root.join("./thisweek/_posts"); 181 let changelog_dir = website_root.join("./thisweek/_posts");
182 182
183 let today = run!("date --iso")?; 183 let today = date_iso()?;
184 let commit = run!("git rev-parse HEAD")?; 184 let commit = run!("git rev-parse HEAD")?;
185 let changelog_n = fs2::read_dir(changelog_dir.as_path())?.count(); 185 let changelog_n = fs2::read_dir(changelog_dir.as_path())?.count();
186 186
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index a9adcfba4..dff3ce4a1 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -13,7 +13,7 @@ use std::env;
13use pico_args::Arguments; 13use pico_args::Arguments;
14use xtask::{ 14use xtask::{
15 codegen::{self, Mode}, 15 codegen::{self, Mode},
16 dist::{run_dist, ClientOpts}, 16 dist::run_dist,
17 install::{ClientOpt, InstallCmd, ServerOpt}, 17 install::{ClientOpt, InstallCmd, ServerOpt},
18 not_bash::pushd, 18 not_bash::pushd,
19 pre_commit, project_root, run_clippy, run_fuzzer, run_pre_cache, run_release, run_rustfmt, 19 pre_commit, project_root, run_clippy, run_fuzzer, run_pre_cache, run_release, run_rustfmt,
@@ -103,16 +103,10 @@ FLAGS:
103 run_release(dry_run) 103 run_release(dry_run)
104 } 104 }
105 "dist" => { 105 "dist" => {
106 let client_opts = if args.contains("--client") { 106 let nightly = args.contains("--nightly");
107 Some(ClientOpts { 107 let client_version: Option<String> = args.opt_value_from_str("--client")?;
108 version: args.value_from_str("--version")?,
109 release_tag: args.value_from_str("--tag")?,
110 })
111 } else {
112 None
113 };
114 args.finish()?; 108 args.finish()?;
115 run_dist(client_opts) 109 run_dist(nightly, client_version)
116 } 110 }
117 _ => { 111 _ => {
118 eprintln!( 112 eprintln!(
diff --git a/xtask/src/not_bash.rs b/xtask/src/not_bash.rs
index 2d45e5dff..ef1699934 100644
--- a/xtask/src/not_bash.rs
+++ b/xtask/src/not_bash.rs
@@ -94,6 +94,10 @@ pub fn run_process(cmd: String, echo: bool) -> Result<String> {
94 run_process_inner(&cmd, echo).with_context(|| format!("process `{}` failed", cmd)) 94 run_process_inner(&cmd, echo).with_context(|| format!("process `{}` failed", cmd))
95} 95}
96 96
97pub fn date_iso() -> Result<String> {
98 run!("date --iso --utc")
99}
100
97fn run_process_inner(cmd: &str, echo: bool) -> Result<String> { 101fn run_process_inner(cmd: &str, echo: bool) -> Result<String> {
98 let mut args = shelx(cmd); 102 let mut args = shelx(cmd);
99 let binary = args.remove(0); 103 let binary = args.remove(0);