diff options
-rw-r--r-- | Cargo.lock | 1 | ||||
-rw-r--r-- | crates/ra_assists/src/lib.rs | 6 | ||||
-rw-r--r-- | crates/ra_hir_def/src/adt.rs | 1 | ||||
-rw-r--r-- | crates/ra_hir_def/src/data.rs | 31 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/_match.rs | 74 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/expr.rs | 3 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/tests.rs | 30 | ||||
-rw-r--r-- | crates/ra_proc_macro_srv/Cargo.toml | 2 | ||||
-rw-r--r-- | crates/ra_proc_macro_srv/src/lib.rs | 3 | ||||
-rw-r--r-- | crates/ra_proc_macro_srv/src/rustc_server.rs | 684 | ||||
-rw-r--r-- | docs/user/readme.adoc | 25 | ||||
-rw-r--r-- | editors/code/package.json | 7 | ||||
-rw-r--r-- | editors/code/src/client.ts | 16 | ||||
-rw-r--r-- | editors/code/src/config.ts | 1 | ||||
-rw-r--r-- | editors/code/src/ctx.ts | 2 |
15 files changed, 819 insertions, 67 deletions
diff --git a/Cargo.lock b/Cargo.lock index eb9824218..34f05e83a 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -1081,6 +1081,7 @@ version = "0.1.0" | |||
1081 | dependencies = [ | 1081 | dependencies = [ |
1082 | "cargo_metadata", | 1082 | "cargo_metadata", |
1083 | "difference", | 1083 | "difference", |
1084 | "ra_mbe", | ||
1084 | "ra_proc_macro", | 1085 | "ra_proc_macro", |
1085 | "ra_tt", | 1086 | "ra_tt", |
1086 | "serde_derive", | 1087 | "serde_derive", |
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs index c698d6e8c..5ba5254fd 100644 --- a/crates/ra_assists/src/lib.rs +++ b/crates/ra_assists/src/lib.rs | |||
@@ -132,13 +132,12 @@ mod handlers { | |||
132 | 132 | ||
133 | pub(crate) fn all() -> &'static [AssistHandler] { | 133 | pub(crate) fn all() -> &'static [AssistHandler] { |
134 | &[ | 134 | &[ |
135 | // These are alphabetic for the foolish consistency | ||
135 | add_custom_impl::add_custom_impl, | 136 | add_custom_impl::add_custom_impl, |
136 | add_derive::add_derive, | 137 | add_derive::add_derive, |
137 | add_explicit_type::add_explicit_type, | 138 | add_explicit_type::add_explicit_type, |
138 | add_function::add_function, | 139 | add_function::add_function, |
139 | add_impl::add_impl, | 140 | add_impl::add_impl, |
140 | add_missing_impl_members::add_missing_default_members, | ||
141 | add_missing_impl_members::add_missing_impl_members, | ||
142 | add_new::add_new, | 141 | add_new::add_new, |
143 | apply_demorgan::apply_demorgan, | 142 | apply_demorgan::apply_demorgan, |
144 | auto_import::auto_import, | 143 | auto_import::auto_import, |
@@ -168,6 +167,9 @@ mod handlers { | |||
168 | replace_unwrap_with_match::replace_unwrap_with_match, | 167 | replace_unwrap_with_match::replace_unwrap_with_match, |
169 | split_import::split_import, | 168 | split_import::split_import, |
170 | add_from_impl_for_enum::add_from_impl_for_enum, | 169 | add_from_impl_for_enum::add_from_impl_for_enum, |
170 | // These are manually sorted for better priorities | ||
171 | add_missing_impl_members::add_missing_impl_members, | ||
172 | add_missing_impl_members::add_missing_default_members, | ||
171 | ] | 173 | ] |
172 | } | 174 | } |
173 | } | 175 | } |
diff --git a/crates/ra_hir_def/src/adt.rs b/crates/ra_hir_def/src/adt.rs index de07fc952..7fc4cd76e 100644 --- a/crates/ra_hir_def/src/adt.rs +++ b/crates/ra_hir_def/src/adt.rs | |||
@@ -54,6 +54,7 @@ pub struct StructFieldData { | |||
54 | impl StructData { | 54 | impl StructData { |
55 | pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc<StructData> { | 55 | pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc<StructData> { |
56 | let src = id.lookup(db).source(db); | 56 | let src = id.lookup(db).source(db); |
57 | |||
57 | let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); | 58 | let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); |
58 | let variant_data = VariantData::new(db, src.map(|s| s.kind())); | 59 | let variant_data = VariantData::new(db, src.map(|s| s.kind())); |
59 | let variant_data = Arc::new(variant_data); | 60 | let variant_data = Arc::new(variant_data); |
diff --git a/crates/ra_hir_def/src/data.rs b/crates/ra_hir_def/src/data.rs index 04bd4a305..606ec48b0 100644 --- a/crates/ra_hir_def/src/data.rs +++ b/crates/ra_hir_def/src/data.rs | |||
@@ -3,15 +3,18 @@ | |||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use hir_expand::{ | 5 | use hir_expand::{ |
6 | hygiene::Hygiene, | ||
6 | name::{name, AsName, Name}, | 7 | name::{name, AsName, Name}, |
7 | AstId, InFile, | 8 | AstId, InFile, |
8 | }; | 9 | }; |
10 | use ra_cfg::CfgOptions; | ||
9 | use ra_prof::profile; | 11 | use ra_prof::profile; |
10 | use ra_syntax::ast::{ | 12 | use ra_syntax::ast::{ |
11 | self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner, VisibilityOwner, | 13 | self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner, VisibilityOwner, |
12 | }; | 14 | }; |
13 | 15 | ||
14 | use crate::{ | 16 | use crate::{ |
17 | attr::Attrs, | ||
15 | db::DefDatabase, | 18 | db::DefDatabase, |
16 | path::{path, GenericArgs, Path}, | 19 | path::{path, GenericArgs, Path}, |
17 | src::HasSource, | 20 | src::HasSource, |
@@ -26,6 +29,7 @@ pub struct FunctionData { | |||
26 | pub name: Name, | 29 | pub name: Name, |
27 | pub params: Vec<TypeRef>, | 30 | pub params: Vec<TypeRef>, |
28 | pub ret_type: TypeRef, | 31 | pub ret_type: TypeRef, |
32 | pub attrs: Attrs, | ||
29 | /// True if the first param is `self`. This is relevant to decide whether this | 33 | /// True if the first param is `self`. This is relevant to decide whether this |
30 | /// can be called as a method. | 34 | /// can be called as a method. |
31 | pub has_self_param: bool, | 35 | pub has_self_param: bool, |
@@ -63,6 +67,8 @@ impl FunctionData { | |||
63 | params.push(type_ref); | 67 | params.push(type_ref); |
64 | } | 68 | } |
65 | } | 69 | } |
70 | let attrs = Attrs::new(&src.value, &Hygiene::new(db.upcast(), src.file_id)); | ||
71 | |||
66 | let ret_type = if let Some(type_ref) = src.value.ret_type().and_then(|rt| rt.type_ref()) { | 72 | let ret_type = if let Some(type_ref) = src.value.ret_type().and_then(|rt| rt.type_ref()) { |
67 | TypeRef::from_ast(type_ref) | 73 | TypeRef::from_ast(type_ref) |
68 | } else { | 74 | } else { |
@@ -81,7 +87,7 @@ impl FunctionData { | |||
81 | let visibility = | 87 | let visibility = |
82 | RawVisibility::from_ast_with_default(db, vis_default, src.map(|s| s.visibility())); | 88 | RawVisibility::from_ast_with_default(db, vis_default, src.map(|s| s.visibility())); |
83 | 89 | ||
84 | let sig = FunctionData { name, params, ret_type, has_self_param, visibility }; | 90 | let sig = FunctionData { name, params, ret_type, has_self_param, visibility, attrs }; |
85 | Arc::new(sig) | 91 | Arc::new(sig) |
86 | } | 92 | } |
87 | } | 93 | } |
@@ -211,6 +217,7 @@ impl ImplData { | |||
211 | let module_id = impl_loc.container.module(db); | 217 | let module_id = impl_loc.container.module(db); |
212 | 218 | ||
213 | let mut items = Vec::new(); | 219 | let mut items = Vec::new(); |
220 | |||
214 | if let Some(item_list) = src.value.item_list() { | 221 | if let Some(item_list) = src.value.item_list() { |
215 | items.extend(collect_impl_items(db, item_list.impl_items(), src.file_id, id)); | 222 | items.extend(collect_impl_items(db, item_list.impl_items(), src.file_id, id)); |
216 | items.extend(collect_impl_items_in_macros( | 223 | items.extend(collect_impl_items_in_macros( |
@@ -311,6 +318,10 @@ fn collect_impl_items_in_macro( | |||
311 | } | 318 | } |
312 | } | 319 | } |
313 | 320 | ||
321 | fn is_cfg_enabled(cfg_options: &CfgOptions, attrs: &Attrs) -> bool { | ||
322 | attrs.by_key("cfg").tt_values().all(|tt| cfg_options.is_cfg_enabled(tt) != Some(false)) | ||
323 | } | ||
324 | |||
314 | fn collect_impl_items( | 325 | fn collect_impl_items( |
315 | db: &dyn DefDatabase, | 326 | db: &dyn DefDatabase, |
316 | impl_items: impl Iterator<Item = ImplItem>, | 327 | impl_items: impl Iterator<Item = ImplItem>, |
@@ -318,16 +329,26 @@ fn collect_impl_items( | |||
318 | id: ImplId, | 329 | id: ImplId, |
319 | ) -> Vec<AssocItemId> { | 330 | ) -> Vec<AssocItemId> { |
320 | let items = db.ast_id_map(file_id); | 331 | let items = db.ast_id_map(file_id); |
332 | let crate_graph = db.crate_graph(); | ||
333 | let module_id = id.lookup(db).container.module(db); | ||
321 | 334 | ||
322 | impl_items | 335 | impl_items |
323 | .map(|item_node| match item_node { | 336 | .filter_map(|item_node| match item_node { |
324 | ast::ImplItem::FnDef(it) => { | 337 | ast::ImplItem::FnDef(it) => { |
325 | let def = FunctionLoc { | 338 | let def = FunctionLoc { |
326 | container: AssocContainerId::ImplId(id), | 339 | container: AssocContainerId::ImplId(id), |
327 | ast_id: AstId::new(file_id, items.ast_id(&it)), | 340 | ast_id: AstId::new(file_id, items.ast_id(&it)), |
328 | } | 341 | } |
329 | .intern(db); | 342 | .intern(db); |
330 | def.into() | 343 | |
344 | if !is_cfg_enabled( | ||
345 | &crate_graph[module_id.krate].cfg_options, | ||
346 | &db.function_data(def).attrs, | ||
347 | ) { | ||
348 | None | ||
349 | } else { | ||
350 | Some(def.into()) | ||
351 | } | ||
331 | } | 352 | } |
332 | ast::ImplItem::ConstDef(it) => { | 353 | ast::ImplItem::ConstDef(it) => { |
333 | let def = ConstLoc { | 354 | let def = ConstLoc { |
@@ -335,7 +356,7 @@ fn collect_impl_items( | |||
335 | ast_id: AstId::new(file_id, items.ast_id(&it)), | 356 | ast_id: AstId::new(file_id, items.ast_id(&it)), |
336 | } | 357 | } |
337 | .intern(db); | 358 | .intern(db); |
338 | def.into() | 359 | Some(def.into()) |
339 | } | 360 | } |
340 | ast::ImplItem::TypeAliasDef(it) => { | 361 | ast::ImplItem::TypeAliasDef(it) => { |
341 | let def = TypeAliasLoc { | 362 | let def = TypeAliasLoc { |
@@ -343,7 +364,7 @@ fn collect_impl_items( | |||
343 | ast_id: AstId::new(file_id, items.ast_id(&it)), | 364 | ast_id: AstId::new(file_id, items.ast_id(&it)), |
344 | } | 365 | } |
345 | .intern(db); | 366 | .intern(db); |
346 | def.into() | 367 | Some(def.into()) |
347 | } | 368 | } |
348 | }) | 369 | }) |
349 | .collect() | 370 | .collect() |
diff --git a/crates/ra_hir_ty/src/_match.rs b/crates/ra_hir_ty/src/_match.rs index f29a25505..9e9a9d047 100644 --- a/crates/ra_hir_ty/src/_match.rs +++ b/crates/ra_hir_ty/src/_match.rs | |||
@@ -235,7 +235,10 @@ impl From<PatId> for PatIdOrWild { | |||
235 | } | 235 | } |
236 | 236 | ||
237 | #[derive(Debug, Clone, Copy, PartialEq)] | 237 | #[derive(Debug, Clone, Copy, PartialEq)] |
238 | pub struct MatchCheckNotImplemented; | 238 | pub enum MatchCheckErr { |
239 | NotImplemented, | ||
240 | MalformedMatchArm, | ||
241 | } | ||
239 | 242 | ||
240 | /// The return type of `is_useful` is either an indication of usefulness | 243 | /// The return type of `is_useful` is either an indication of usefulness |
241 | /// of the match arm, or an error in the case the match statement | 244 | /// of the match arm, or an error in the case the match statement |
@@ -244,7 +247,7 @@ pub struct MatchCheckNotImplemented; | |||
244 | /// | 247 | /// |
245 | /// The `std::result::Result` type is used here rather than a custom enum | 248 | /// The `std::result::Result` type is used here rather than a custom enum |
246 | /// to allow the use of `?`. | 249 | /// to allow the use of `?`. |
247 | pub type MatchCheckResult<T> = Result<T, MatchCheckNotImplemented>; | 250 | pub type MatchCheckResult<T> = Result<T, MatchCheckErr>; |
248 | 251 | ||
249 | #[derive(Debug)] | 252 | #[derive(Debug)] |
250 | /// A row in a Matrix. | 253 | /// A row in a Matrix. |
@@ -335,12 +338,12 @@ impl PatStack { | |||
335 | Expr::Literal(Literal::Bool(_)) => None, | 338 | Expr::Literal(Literal::Bool(_)) => None, |
336 | // perhaps this is actually unreachable given we have | 339 | // perhaps this is actually unreachable given we have |
337 | // already checked that these match arms have the appropriate type? | 340 | // already checked that these match arms have the appropriate type? |
338 | _ => return Err(MatchCheckNotImplemented), | 341 | _ => return Err(MatchCheckErr::NotImplemented), |
339 | } | 342 | } |
340 | } | 343 | } |
341 | (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), | 344 | (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), |
342 | (Pat::Path(_), Constructor::Enum(constructor)) => { | 345 | (Pat::Path(_), Constructor::Enum(constructor)) => { |
343 | // enums with no associated data become `Pat::Path` | 346 | // unit enum variants become `Pat::Path` |
344 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); | 347 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); |
345 | if !enum_variant_matches(cx, pat_id, *constructor) { | 348 | if !enum_variant_matches(cx, pat_id, *constructor) { |
346 | None | 349 | None |
@@ -348,16 +351,23 @@ impl PatStack { | |||
348 | Some(self.to_tail()) | 351 | Some(self.to_tail()) |
349 | } | 352 | } |
350 | } | 353 | } |
351 | (Pat::TupleStruct { args: ref pat_ids, .. }, Constructor::Enum(constructor)) => { | 354 | (Pat::TupleStruct { args: ref pat_ids, .. }, Constructor::Enum(enum_constructor)) => { |
352 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); | 355 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); |
353 | if !enum_variant_matches(cx, pat_id, *constructor) { | 356 | if !enum_variant_matches(cx, pat_id, *enum_constructor) { |
354 | None | 357 | None |
355 | } else { | 358 | } else { |
359 | // If the enum variant matches, then we need to confirm | ||
360 | // that the number of patterns aligns with the expected | ||
361 | // number of patterns for that enum variant. | ||
362 | if pat_ids.len() != constructor.arity(cx)? { | ||
363 | return Err(MatchCheckErr::MalformedMatchArm); | ||
364 | } | ||
365 | |||
356 | Some(self.replace_head_with(pat_ids)) | 366 | Some(self.replace_head_with(pat_ids)) |
357 | } | 367 | } |
358 | } | 368 | } |
359 | (Pat::Or(_), _) => return Err(MatchCheckNotImplemented), | 369 | (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented), |
360 | (_, _) => return Err(MatchCheckNotImplemented), | 370 | (_, _) => return Err(MatchCheckErr::NotImplemented), |
361 | }; | 371 | }; |
362 | 372 | ||
363 | Ok(result) | 373 | Ok(result) |
@@ -514,7 +524,7 @@ pub(crate) fn is_useful( | |||
514 | return if any_useful { | 524 | return if any_useful { |
515 | Ok(Usefulness::Useful) | 525 | Ok(Usefulness::Useful) |
516 | } else if found_unimplemented { | 526 | } else if found_unimplemented { |
517 | Err(MatchCheckNotImplemented) | 527 | Err(MatchCheckErr::NotImplemented) |
518 | } else { | 528 | } else { |
519 | Ok(Usefulness::NotUseful) | 529 | Ok(Usefulness::NotUseful) |
520 | }; | 530 | }; |
@@ -567,7 +577,7 @@ pub(crate) fn is_useful( | |||
567 | } | 577 | } |
568 | 578 | ||
569 | if found_unimplemented { | 579 | if found_unimplemented { |
570 | Err(MatchCheckNotImplemented) | 580 | Err(MatchCheckErr::NotImplemented) |
571 | } else { | 581 | } else { |
572 | Ok(Usefulness::NotUseful) | 582 | Ok(Usefulness::NotUseful) |
573 | } | 583 | } |
@@ -604,7 +614,7 @@ impl Constructor { | |||
604 | match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { | 614 | match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { |
605 | VariantData::Tuple(struct_field_data) => struct_field_data.len(), | 615 | VariantData::Tuple(struct_field_data) => struct_field_data.len(), |
606 | VariantData::Unit => 0, | 616 | VariantData::Unit => 0, |
607 | _ => return Err(MatchCheckNotImplemented), | 617 | _ => return Err(MatchCheckErr::NotImplemented), |
608 | } | 618 | } |
609 | } | 619 | } |
610 | }; | 620 | }; |
@@ -637,20 +647,20 @@ fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Opt | |||
637 | Pat::Tuple(pats) => Some(Constructor::Tuple { arity: pats.len() }), | 647 | Pat::Tuple(pats) => Some(Constructor::Tuple { arity: pats.len() }), |
638 | Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { | 648 | Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { |
639 | Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), | 649 | Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), |
640 | _ => return Err(MatchCheckNotImplemented), | 650 | _ => return Err(MatchCheckErr::NotImplemented), |
641 | }, | 651 | }, |
642 | Pat::TupleStruct { .. } | Pat::Path(_) => { | 652 | Pat::TupleStruct { .. } | Pat::Path(_) => { |
643 | let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); | 653 | let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); |
644 | let variant_id = | 654 | let variant_id = |
645 | cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckNotImplemented)?; | 655 | cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::NotImplemented)?; |
646 | match variant_id { | 656 | match variant_id { |
647 | VariantId::EnumVariantId(enum_variant_id) => { | 657 | VariantId::EnumVariantId(enum_variant_id) => { |
648 | Some(Constructor::Enum(enum_variant_id)) | 658 | Some(Constructor::Enum(enum_variant_id)) |
649 | } | 659 | } |
650 | _ => return Err(MatchCheckNotImplemented), | 660 | _ => return Err(MatchCheckErr::NotImplemented), |
651 | } | 661 | } |
652 | } | 662 | } |
653 | _ => return Err(MatchCheckNotImplemented), | 663 | _ => return Err(MatchCheckErr::NotImplemented), |
654 | }; | 664 | }; |
655 | 665 | ||
656 | Ok(res) | 666 | Ok(res) |
@@ -1325,6 +1335,40 @@ mod tests { | |||
1325 | } | 1335 | } |
1326 | 1336 | ||
1327 | #[test] | 1337 | #[test] |
1338 | fn malformed_match_arm_tuple_missing_pattern() { | ||
1339 | let content = r" | ||
1340 | fn test_fn() { | ||
1341 | match (0) { | ||
1342 | () => (), | ||
1343 | } | ||
1344 | } | ||
1345 | "; | ||
1346 | |||
1347 | // Match arms with the incorrect type are filtered out. | ||
1348 | check_diagnostic(content); | ||
1349 | } | ||
1350 | |||
1351 | #[test] | ||
1352 | fn malformed_match_arm_tuple_enum_missing_pattern() { | ||
1353 | let content = r" | ||
1354 | enum Either { | ||
1355 | A, | ||
1356 | B(u32), | ||
1357 | } | ||
1358 | fn test_fn() { | ||
1359 | match Either::A { | ||
1360 | Either::A => (), | ||
1361 | Either::B() => (), | ||
1362 | } | ||
1363 | } | ||
1364 | "; | ||
1365 | |||
1366 | // We are testing to be sure we don't panic here when the match | ||
1367 | // arm `Either::B` is missing its pattern. | ||
1368 | check_no_diagnostic(content); | ||
1369 | } | ||
1370 | |||
1371 | #[test] | ||
1328 | fn enum_not_in_scope() { | 1372 | fn enum_not_in_scope() { |
1329 | let content = r" | 1373 | let content = r" |
1330 | fn test_fn() { | 1374 | fn test_fn() { |
diff --git a/crates/ra_hir_ty/src/expr.rs b/crates/ra_hir_ty/src/expr.rs index b4592fbf5..e45e9ea14 100644 --- a/crates/ra_hir_ty/src/expr.rs +++ b/crates/ra_hir_ty/src/expr.rs | |||
@@ -4,8 +4,7 @@ use std::sync::Arc; | |||
4 | 4 | ||
5 | use hir_def::{path::path, resolver::HasResolver, AdtId, FunctionId}; | 5 | use hir_def::{path::path, resolver::HasResolver, AdtId, FunctionId}; |
6 | use hir_expand::diagnostics::DiagnosticSink; | 6 | use hir_expand::diagnostics::DiagnosticSink; |
7 | use ra_syntax::ast; | 7 | use ra_syntax::{ast, AstPtr}; |
8 | use ra_syntax::AstPtr; | ||
9 | use rustc_hash::FxHashSet; | 8 | use rustc_hash::FxHashSet; |
10 | 9 | ||
11 | use crate::{ | 10 | use crate::{ |
diff --git a/crates/ra_hir_ty/src/tests.rs b/crates/ra_hir_ty/src/tests.rs index e4a103d1b..608408d88 100644 --- a/crates/ra_hir_ty/src/tests.rs +++ b/crates/ra_hir_ty/src/tests.rs | |||
@@ -319,3 +319,33 @@ fn no_such_field_diagnostics() { | |||
319 | "### | 319 | "### |
320 | ); | 320 | ); |
321 | } | 321 | } |
322 | |||
323 | #[test] | ||
324 | fn no_such_field_with_feature_flag_diagnostics() { | ||
325 | let diagnostics = TestDB::with_files( | ||
326 | r#" | ||
327 | //- /lib.rs crate:foo cfg:feature=foo | ||
328 | struct MyStruct { | ||
329 | my_val: usize, | ||
330 | #[cfg(feature = "foo")] | ||
331 | bar: bool, | ||
332 | } | ||
333 | |||
334 | impl MyStruct { | ||
335 | #[cfg(feature = "foo")] | ||
336 | pub(crate) fn new(my_val: usize, bar: bool) -> Self { | ||
337 | Self { my_val, bar } | ||
338 | } | ||
339 | |||
340 | #[cfg(not(feature = "foo"))] | ||
341 | pub(crate) fn new(my_val: usize, _bar: bool) -> Self { | ||
342 | Self { my_val } | ||
343 | } | ||
344 | } | ||
345 | "#, | ||
346 | ) | ||
347 | .diagnostics() | ||
348 | .0; | ||
349 | |||
350 | assert_snapshot!(diagnostics, @r###""###); | ||
351 | } | ||
diff --git a/crates/ra_proc_macro_srv/Cargo.toml b/crates/ra_proc_macro_srv/Cargo.toml index 6300d668a..f08de5fc7 100644 --- a/crates/ra_proc_macro_srv/Cargo.toml +++ b/crates/ra_proc_macro_srv/Cargo.toml | |||
@@ -10,9 +10,9 @@ doctest = false | |||
10 | 10 | ||
11 | [dependencies] | 11 | [dependencies] |
12 | ra_tt = { path = "../ra_tt" } | 12 | ra_tt = { path = "../ra_tt" } |
13 | ra_mbe = { path = "../ra_mbe" } | ||
13 | ra_proc_macro = { path = "../ra_proc_macro" } | 14 | ra_proc_macro = { path = "../ra_proc_macro" } |
14 | 15 | ||
15 | |||
16 | [dev-dependencies] | 16 | [dev-dependencies] |
17 | cargo_metadata = "0.9.1" | 17 | cargo_metadata = "0.9.1" |
18 | difference = "2.0.0" | 18 | difference = "2.0.0" |
diff --git a/crates/ra_proc_macro_srv/src/lib.rs b/crates/ra_proc_macro_srv/src/lib.rs index 80cfa1174..f376df236 100644 --- a/crates/ra_proc_macro_srv/src/lib.rs +++ b/crates/ra_proc_macro_srv/src/lib.rs | |||
@@ -14,6 +14,9 @@ | |||
14 | #[doc(hidden)] | 14 | #[doc(hidden)] |
15 | mod proc_macro; | 15 | mod proc_macro; |
16 | 16 | ||
17 | #[doc(hidden)] | ||
18 | mod rustc_server; | ||
19 | |||
17 | use proc_macro::bridge::client::TokenStream; | 20 | use proc_macro::bridge::client::TokenStream; |
18 | use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; | 21 | use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; |
19 | 22 | ||
diff --git a/crates/ra_proc_macro_srv/src/rustc_server.rs b/crates/ra_proc_macro_srv/src/rustc_server.rs new file mode 100644 index 000000000..92d1fd989 --- /dev/null +++ b/crates/ra_proc_macro_srv/src/rustc_server.rs | |||
@@ -0,0 +1,684 @@ | |||
1 | //! Rustc proc-macro server implementation with ra_tt | ||
2 | //! | ||
3 | //! Based on idea from https://github.com/fedochet/rust-proc-macro-expander | ||
4 | //! The lib-proc-macro server backend is `TokenStream`-agnostic, such that | ||
5 | //! we could provide any TokenStream implementation. | ||
6 | //! The original idea from fedochet is using proc-macro2 as backend, | ||
7 | //! we use ra_tt instead for better intergation with RA. | ||
8 | //! | ||
9 | //! FIXME: No span and source file informatin is implemented yet | ||
10 | |||
11 | use crate::proc_macro::bridge::{self, server}; | ||
12 | use ra_tt as tt; | ||
13 | |||
14 | use std::collections::{Bound, HashMap}; | ||
15 | use std::hash::Hash; | ||
16 | use std::iter::FromIterator; | ||
17 | use std::str::FromStr; | ||
18 | use std::{ascii, vec::IntoIter}; | ||
19 | |||
20 | type Group = tt::Subtree; | ||
21 | type TokenTree = tt::TokenTree; | ||
22 | type Punct = tt::Punct; | ||
23 | type Spacing = tt::Spacing; | ||
24 | type Literal = tt::Literal; | ||
25 | type Span = tt::TokenId; | ||
26 | |||
27 | #[derive(Debug, Clone)] | ||
28 | pub struct TokenStream { | ||
29 | pub subtree: tt::Subtree, | ||
30 | } | ||
31 | |||
32 | impl TokenStream { | ||
33 | pub fn new() -> Self { | ||
34 | TokenStream { subtree: Default::default() } | ||
35 | } | ||
36 | |||
37 | pub fn is_empty(&self) -> bool { | ||
38 | self.subtree.token_trees.is_empty() | ||
39 | } | ||
40 | } | ||
41 | |||
42 | /// Creates a token stream containing a single token tree. | ||
43 | impl From<TokenTree> for TokenStream { | ||
44 | fn from(tree: TokenTree) -> TokenStream { | ||
45 | TokenStream { subtree: tt::Subtree { delimiter: None, token_trees: vec![tree] } } | ||
46 | } | ||
47 | } | ||
48 | |||
49 | /// Collects a number of token trees into a single stream. | ||
50 | impl FromIterator<TokenTree> for TokenStream { | ||
51 | fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self { | ||
52 | trees.into_iter().map(TokenStream::from).collect() | ||
53 | } | ||
54 | } | ||
55 | |||
56 | /// A "flattening" operation on token streams, collects token trees | ||
57 | /// from multiple token streams into a single stream. | ||
58 | impl FromIterator<TokenStream> for TokenStream { | ||
59 | fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { | ||
60 | let mut builder = TokenStreamBuilder::new(); | ||
61 | streams.into_iter().for_each(|stream| builder.push(stream)); | ||
62 | builder.build() | ||
63 | } | ||
64 | } | ||
65 | |||
66 | impl Extend<TokenTree> for TokenStream { | ||
67 | fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) { | ||
68 | self.extend(trees.into_iter().map(TokenStream::from)); | ||
69 | } | ||
70 | } | ||
71 | |||
72 | impl Extend<TokenStream> for TokenStream { | ||
73 | fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { | ||
74 | for item in streams { | ||
75 | self.subtree.token_trees.extend(&mut item.into_iter()) | ||
76 | } | ||
77 | } | ||
78 | } | ||
79 | |||
80 | type Level = crate::proc_macro::Level; | ||
81 | type LineColumn = crate::proc_macro::LineColumn; | ||
82 | type SourceFile = crate::proc_macro::SourceFile; | ||
83 | |||
84 | /// A structure representing a diagnostic message and associated children | ||
85 | /// messages. | ||
86 | #[derive(Clone, Debug)] | ||
87 | pub struct Diagnostic { | ||
88 | level: Level, | ||
89 | message: String, | ||
90 | spans: Vec<Span>, | ||
91 | children: Vec<Diagnostic>, | ||
92 | } | ||
93 | |||
94 | impl Diagnostic { | ||
95 | /// Creates a new diagnostic with the given `level` and `message`. | ||
96 | pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic { | ||
97 | Diagnostic { level, message: message.into(), spans: vec![], children: vec![] } | ||
98 | } | ||
99 | } | ||
100 | |||
101 | // Rustc Server Ident has to be `Copyable` | ||
102 | // We use a stub here for bypassing | ||
103 | #[derive(Hash, Eq, PartialEq, Copy, Clone)] | ||
104 | pub struct IdentId(u32); | ||
105 | |||
106 | #[derive(Clone, Hash, Eq, PartialEq)] | ||
107 | struct IdentData(tt::Ident); | ||
108 | |||
109 | #[derive(Default)] | ||
110 | struct IdentInterner { | ||
111 | idents: HashMap<IdentData, u32>, | ||
112 | ident_data: Vec<IdentData>, | ||
113 | } | ||
114 | |||
115 | impl IdentInterner { | ||
116 | fn intern(&mut self, data: &IdentData) -> u32 { | ||
117 | if let Some(index) = self.idents.get(data) { | ||
118 | return *index; | ||
119 | } | ||
120 | |||
121 | let index = self.idents.len() as u32; | ||
122 | self.ident_data.push(data.clone()); | ||
123 | self.idents.insert(data.clone(), index); | ||
124 | index | ||
125 | } | ||
126 | |||
127 | fn get(&self, index: u32) -> &IdentData { | ||
128 | &self.ident_data[index as usize] | ||
129 | } | ||
130 | |||
131 | #[allow(unused)] | ||
132 | fn get_mut(&mut self, index: u32) -> &mut IdentData { | ||
133 | self.ident_data.get_mut(index as usize).expect("Should be consistent") | ||
134 | } | ||
135 | } | ||
136 | |||
137 | pub struct TokenStreamBuilder { | ||
138 | acc: TokenStream, | ||
139 | } | ||
140 | |||
141 | /// Public implementation details for the `TokenStream` type, such as iterators. | ||
142 | pub mod token_stream { | ||
143 | use super::{tt, TokenStream, TokenTree}; | ||
144 | use std::str::FromStr; | ||
145 | |||
146 | /// An iterator over `TokenStream`'s `TokenTree`s. | ||
147 | /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, | ||
148 | /// and returns whole groups as token trees. | ||
149 | impl IntoIterator for TokenStream { | ||
150 | type Item = TokenTree; | ||
151 | type IntoIter = super::IntoIter<TokenTree>; | ||
152 | |||
153 | fn into_iter(self) -> Self::IntoIter { | ||
154 | self.subtree.token_trees.into_iter() | ||
155 | } | ||
156 | } | ||
157 | |||
158 | type LexError = String; | ||
159 | |||
160 | /// Attempts to break the string into tokens and parse those tokens into a token stream. | ||
161 | /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters | ||
162 | /// or characters not existing in the language. | ||
163 | /// All tokens in the parsed stream get `Span::call_site()` spans. | ||
164 | /// | ||
165 | /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to | ||
166 | /// change these errors into `LexError`s later. | ||
167 | impl FromStr for TokenStream { | ||
168 | type Err = LexError; | ||
169 | |||
170 | fn from_str(src: &str) -> Result<TokenStream, LexError> { | ||
171 | let (subtree, _token_map) = | ||
172 | ra_mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?; | ||
173 | |||
174 | let tt: tt::TokenTree = subtree.into(); | ||
175 | Ok(tt.into()) | ||
176 | } | ||
177 | } | ||
178 | |||
179 | impl ToString for TokenStream { | ||
180 | fn to_string(&self) -> String { | ||
181 | let tt = self.subtree.clone().into(); | ||
182 | to_text(&tt) | ||
183 | } | ||
184 | } | ||
185 | |||
186 | fn to_text(tkn: &tt::TokenTree) -> String { | ||
187 | match tkn { | ||
188 | tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident.text.clone().into(), | ||
189 | tt::TokenTree::Leaf(tt::Leaf::Literal(literal)) => literal.text.clone().into(), | ||
190 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => format!("{}", punct.char), | ||
191 | tt::TokenTree::Subtree(subtree) => { | ||
192 | let content = subtree | ||
193 | .token_trees | ||
194 | .iter() | ||
195 | .map(|tkn| { | ||
196 | let s = to_text(tkn); | ||
197 | if let tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) = tkn { | ||
198 | if punct.spacing == tt::Spacing::Alone { | ||
199 | return s + " "; | ||
200 | } | ||
201 | } | ||
202 | s | ||
203 | }) | ||
204 | .collect::<Vec<_>>() | ||
205 | .concat(); | ||
206 | let (open, close) = match subtree.delimiter.map(|it| it.kind) { | ||
207 | None => ("", ""), | ||
208 | Some(tt::DelimiterKind::Brace) => ("{", "}"), | ||
209 | Some(tt::DelimiterKind::Parenthesis) => ("(", ")"), | ||
210 | Some(tt::DelimiterKind::Bracket) => ("[", "]"), | ||
211 | }; | ||
212 | format!("{}{}{}", open, content, close) | ||
213 | } | ||
214 | } | ||
215 | } | ||
216 | } | ||
217 | |||
218 | impl TokenStreamBuilder { | ||
219 | fn new() -> TokenStreamBuilder { | ||
220 | TokenStreamBuilder { acc: TokenStream::new() } | ||
221 | } | ||
222 | |||
223 | fn push(&mut self, stream: TokenStream) { | ||
224 | self.acc.extend(stream.into_iter()) | ||
225 | } | ||
226 | |||
227 | fn build(self) -> TokenStream { | ||
228 | self.acc | ||
229 | } | ||
230 | } | ||
231 | |||
232 | #[derive(Clone)] | ||
233 | pub struct TokenStreamIter { | ||
234 | trees: IntoIter<TokenTree>, | ||
235 | } | ||
236 | |||
237 | #[derive(Default)] | ||
238 | pub struct Rustc { | ||
239 | ident_interner: IdentInterner, | ||
240 | // FIXME: store span information here. | ||
241 | } | ||
242 | |||
243 | impl server::Types for Rustc { | ||
244 | type TokenStream = TokenStream; | ||
245 | type TokenStreamBuilder = TokenStreamBuilder; | ||
246 | type TokenStreamIter = TokenStreamIter; | ||
247 | type Group = Group; | ||
248 | type Punct = Punct; | ||
249 | type Ident = IdentId; | ||
250 | type Literal = Literal; | ||
251 | type SourceFile = SourceFile; | ||
252 | type Diagnostic = Diagnostic; | ||
253 | type Span = Span; | ||
254 | type MultiSpan = Vec<Span>; | ||
255 | } | ||
256 | |||
257 | impl server::TokenStream for Rustc { | ||
258 | fn new(&mut self) -> Self::TokenStream { | ||
259 | Self::TokenStream::new() | ||
260 | } | ||
261 | |||
262 | fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { | ||
263 | stream.is_empty() | ||
264 | } | ||
265 | fn from_str(&mut self, src: &str) -> Self::TokenStream { | ||
266 | Self::TokenStream::from_str(src).expect("cannot parse string") | ||
267 | } | ||
268 | fn to_string(&mut self, stream: &Self::TokenStream) -> String { | ||
269 | stream.to_string() | ||
270 | } | ||
271 | fn from_token_tree( | ||
272 | &mut self, | ||
273 | tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>, | ||
274 | ) -> Self::TokenStream { | ||
275 | match tree { | ||
276 | bridge::TokenTree::Group(group) => { | ||
277 | let tree = TokenTree::from(group); | ||
278 | Self::TokenStream::from_iter(vec![tree]) | ||
279 | } | ||
280 | |||
281 | bridge::TokenTree::Ident(IdentId(index)) => { | ||
282 | let IdentData(ident) = self.ident_interner.get(index).clone(); | ||
283 | let ident: tt::Ident = ident; | ||
284 | let leaf = tt::Leaf::from(ident); | ||
285 | let tree = TokenTree::from(leaf); | ||
286 | Self::TokenStream::from_iter(vec![tree]) | ||
287 | } | ||
288 | |||
289 | bridge::TokenTree::Literal(literal) => { | ||
290 | let leaf = tt::Leaf::from(literal); | ||
291 | let tree = TokenTree::from(leaf); | ||
292 | Self::TokenStream::from_iter(vec![tree]) | ||
293 | } | ||
294 | |||
295 | bridge::TokenTree::Punct(p) => { | ||
296 | let leaf = tt::Leaf::from(p); | ||
297 | let tree = TokenTree::from(leaf); | ||
298 | Self::TokenStream::from_iter(vec![tree]) | ||
299 | } | ||
300 | } | ||
301 | } | ||
302 | |||
303 | fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter { | ||
304 | let trees: Vec<TokenTree> = stream.into_iter().collect(); | ||
305 | TokenStreamIter { trees: trees.into_iter() } | ||
306 | } | ||
307 | } | ||
308 | |||
309 | impl server::TokenStreamBuilder for Rustc { | ||
310 | fn new(&mut self) -> Self::TokenStreamBuilder { | ||
311 | Self::TokenStreamBuilder::new() | ||
312 | } | ||
313 | fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) { | ||
314 | builder.push(stream) | ||
315 | } | ||
316 | fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream { | ||
317 | builder.build() | ||
318 | } | ||
319 | } | ||
320 | |||
321 | impl server::TokenStreamIter for Rustc { | ||
322 | fn next( | ||
323 | &mut self, | ||
324 | iter: &mut Self::TokenStreamIter, | ||
325 | ) -> Option<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> { | ||
326 | iter.trees.next().map(|tree| match tree { | ||
327 | TokenTree::Subtree(group) => bridge::TokenTree::Group(group), | ||
328 | TokenTree::Leaf(tt::Leaf::Ident(ident)) => { | ||
329 | bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident)))) | ||
330 | } | ||
331 | TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal), | ||
332 | TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct), | ||
333 | }) | ||
334 | } | ||
335 | } | ||
336 | |||
337 | fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> { | ||
338 | let kind = match d { | ||
339 | bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, | ||
340 | bridge::Delimiter::Brace => tt::DelimiterKind::Brace, | ||
341 | bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket, | ||
342 | bridge::Delimiter::None => return None, | ||
343 | }; | ||
344 | Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }) | ||
345 | } | ||
346 | |||
347 | fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter { | ||
348 | match d.map(|it| it.kind) { | ||
349 | Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis, | ||
350 | Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace, | ||
351 | Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket, | ||
352 | None => bridge::Delimiter::None, | ||
353 | } | ||
354 | } | ||
355 | |||
356 | fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing { | ||
357 | match spacing { | ||
358 | bridge::Spacing::Alone => Spacing::Alone, | ||
359 | bridge::Spacing::Joint => Spacing::Joint, | ||
360 | } | ||
361 | } | ||
362 | |||
363 | fn spacing_to_external(spacing: Spacing) -> bridge::Spacing { | ||
364 | match spacing { | ||
365 | Spacing::Alone => bridge::Spacing::Alone, | ||
366 | Spacing::Joint => bridge::Spacing::Joint, | ||
367 | } | ||
368 | } | ||
369 | |||
370 | impl server::Group for Rustc { | ||
371 | fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group { | ||
372 | Self::Group { | ||
373 | delimiter: delim_to_internal(delimiter), | ||
374 | token_trees: stream.subtree.token_trees, | ||
375 | } | ||
376 | } | ||
377 | fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter { | ||
378 | delim_to_external(group.delimiter) | ||
379 | } | ||
380 | |||
381 | // NOTE: Return value of do not include delimiter | ||
382 | fn stream(&mut self, group: &Self::Group) -> Self::TokenStream { | ||
383 | TokenStream { | ||
384 | subtree: tt::Subtree { delimiter: None, token_trees: group.token_trees.clone() }, | ||
385 | } | ||
386 | } | ||
387 | |||
388 | fn span(&mut self, group: &Self::Group) -> Self::Span { | ||
389 | group.delimiter.map(|it| it.id).unwrap_or_else(|| tt::TokenId::unspecified()) | ||
390 | } | ||
391 | |||
392 | fn set_span(&mut self, _group: &mut Self::Group, _span: Self::Span) { | ||
393 | // FIXME handle span | ||
394 | } | ||
395 | |||
396 | fn span_open(&mut self, _group: &Self::Group) -> Self::Span { | ||
397 | // FIXME handle span | ||
398 | // MySpan(self.span_interner.intern(&MySpanData(group.span_open()))) | ||
399 | tt::TokenId::unspecified() | ||
400 | } | ||
401 | |||
402 | fn span_close(&mut self, _group: &Self::Group) -> Self::Span { | ||
403 | // FIXME handle span | ||
404 | tt::TokenId::unspecified() | ||
405 | } | ||
406 | } | ||
407 | |||
408 | impl server::Punct for Rustc { | ||
409 | fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct { | ||
410 | tt::Punct { | ||
411 | char: ch, | ||
412 | spacing: spacing_to_internal(spacing), | ||
413 | id: tt::TokenId::unspecified(), | ||
414 | } | ||
415 | } | ||
416 | fn as_char(&mut self, punct: Self::Punct) -> char { | ||
417 | punct.char | ||
418 | } | ||
419 | fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing { | ||
420 | spacing_to_external(punct.spacing) | ||
421 | } | ||
422 | fn span(&mut self, _punct: Self::Punct) -> Self::Span { | ||
423 | // FIXME handle span | ||
424 | tt::TokenId::unspecified() | ||
425 | } | ||
426 | fn with_span(&mut self, punct: Self::Punct, _span: Self::Span) -> Self::Punct { | ||
427 | // FIXME handle span | ||
428 | punct | ||
429 | } | ||
430 | } | ||
431 | |||
432 | impl server::Ident for Rustc { | ||
433 | fn new(&mut self, string: &str, _span: Self::Span, _is_raw: bool) -> Self::Ident { | ||
434 | IdentId( | ||
435 | self.ident_interner.intern(&IdentData(tt::Ident { | ||
436 | text: string.into(), | ||
437 | id: tt::TokenId::unspecified(), | ||
438 | })), | ||
439 | ) | ||
440 | } | ||
441 | |||
442 | fn span(&mut self, _ident: Self::Ident) -> Self::Span { | ||
443 | // FIXME handle span | ||
444 | tt::TokenId::unspecified() | ||
445 | } | ||
446 | fn with_span(&mut self, ident: Self::Ident, _span: Self::Span) -> Self::Ident { | ||
447 | // FIXME handle span | ||
448 | ident | ||
449 | } | ||
450 | } | ||
451 | |||
452 | impl server::Literal for Rustc { | ||
453 | // FIXME(eddyb) `Literal` should not expose internal `Debug` impls. | ||
454 | fn debug(&mut self, literal: &Self::Literal) -> String { | ||
455 | format!("{:?}", literal) | ||
456 | } | ||
457 | |||
458 | fn integer(&mut self, n: &str) -> Self::Literal { | ||
459 | let n: i128 = n.parse().unwrap(); | ||
460 | Literal { text: n.to_string().into(), id: tt::TokenId::unspecified() } | ||
461 | } | ||
462 | |||
463 | fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal { | ||
464 | macro_rules! def_suffixed_integer { | ||
465 | ($kind:ident, $($ty:ty),*) => { | ||
466 | match $kind { | ||
467 | $( | ||
468 | stringify!($ty) => { | ||
469 | let n: $ty = n.parse().unwrap(); | ||
470 | format!(concat!("{}", stringify!($ty)), n) | ||
471 | } | ||
472 | )* | ||
473 | _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind), | ||
474 | } | ||
475 | } | ||
476 | } | ||
477 | |||
478 | let text = | ||
479 | def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128}; | ||
480 | |||
481 | Literal { text: text.into(), id: tt::TokenId::unspecified() } | ||
482 | } | ||
483 | |||
484 | fn float(&mut self, n: &str) -> Self::Literal { | ||
485 | let n: f64 = n.parse().unwrap(); | ||
486 | let mut text = f64::to_string(&n); | ||
487 | if !text.contains('.') { | ||
488 | text += ".0" | ||
489 | } | ||
490 | Literal { text: text.into(), id: tt::TokenId::unspecified() } | ||
491 | } | ||
492 | |||
493 | fn f32(&mut self, n: &str) -> Self::Literal { | ||
494 | let n: f32 = n.parse().unwrap(); | ||
495 | let text = format!("{}f32", n); | ||
496 | Literal { text: text.into(), id: tt::TokenId::unspecified() } | ||
497 | } | ||
498 | |||
499 | fn f64(&mut self, n: &str) -> Self::Literal { | ||
500 | let n: f64 = n.parse().unwrap(); | ||
501 | let text = format!("{}f64", n); | ||
502 | Literal { text: text.into(), id: tt::TokenId::unspecified() } | ||
503 | } | ||
504 | |||
505 | fn string(&mut self, string: &str) -> Self::Literal { | ||
506 | let mut escaped = String::new(); | ||
507 | for ch in string.chars() { | ||
508 | escaped.extend(ch.escape_debug()); | ||
509 | } | ||
510 | Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() } | ||
511 | } | ||
512 | |||
513 | fn character(&mut self, ch: char) -> Self::Literal { | ||
514 | Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() } | ||
515 | } | ||
516 | |||
517 | fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { | ||
518 | let string = bytes | ||
519 | .iter() | ||
520 | .cloned() | ||
521 | .flat_map(ascii::escape_default) | ||
522 | .map(Into::<char>::into) | ||
523 | .collect::<String>(); | ||
524 | |||
525 | Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() } | ||
526 | } | ||
527 | |||
528 | fn span(&mut self, literal: &Self::Literal) -> Self::Span { | ||
529 | literal.id | ||
530 | } | ||
531 | |||
532 | fn set_span(&mut self, _literal: &mut Self::Literal, _span: Self::Span) { | ||
533 | // FIXME handle span | ||
534 | } | ||
535 | |||
536 | fn subspan( | ||
537 | &mut self, | ||
538 | _literal: &Self::Literal, | ||
539 | _start: Bound<usize>, | ||
540 | _end: Bound<usize>, | ||
541 | ) -> Option<Self::Span> { | ||
542 | // FIXME handle span | ||
543 | None | ||
544 | } | ||
545 | } | ||
546 | |||
547 | impl server::SourceFile for Rustc { | ||
548 | fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool { | ||
549 | file1.eq(file2) | ||
550 | } | ||
551 | fn path(&mut self, file: &Self::SourceFile) -> String { | ||
552 | String::from( | ||
553 | file.path().to_str().expect("non-UTF8 file path in `proc_macro::SourceFile::path`"), | ||
554 | ) | ||
555 | } | ||
556 | fn is_real(&mut self, file: &Self::SourceFile) -> bool { | ||
557 | file.is_real() | ||
558 | } | ||
559 | } | ||
560 | |||
561 | impl server::Diagnostic for Rustc { | ||
562 | fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic { | ||
563 | let mut diag = Diagnostic::new(level, msg); | ||
564 | diag.spans = spans; | ||
565 | diag | ||
566 | } | ||
567 | |||
568 | fn sub( | ||
569 | &mut self, | ||
570 | _diag: &mut Self::Diagnostic, | ||
571 | _level: Level, | ||
572 | _msg: &str, | ||
573 | _spans: Self::MultiSpan, | ||
574 | ) { | ||
575 | // FIXME handle diagnostic | ||
576 | // | ||
577 | } | ||
578 | |||
579 | fn emit(&mut self, _diag: Self::Diagnostic) { | ||
580 | // FIXME handle diagnostic | ||
581 | // diag.emit() | ||
582 | } | ||
583 | } | ||
584 | |||
585 | impl server::Span for Rustc { | ||
586 | fn debug(&mut self, span: Self::Span) -> String { | ||
587 | format!("{:?}", span.0) | ||
588 | } | ||
589 | fn def_site(&mut self) -> Self::Span { | ||
590 | // MySpan(self.span_interner.intern(&MySpanData(Span::def_site()))) | ||
591 | // FIXME handle span | ||
592 | tt::TokenId::unspecified() | ||
593 | } | ||
594 | fn call_site(&mut self) -> Self::Span { | ||
595 | // MySpan(self.span_interner.intern(&MySpanData(Span::call_site()))) | ||
596 | // FIXME handle span | ||
597 | tt::TokenId::unspecified() | ||
598 | } | ||
599 | fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { | ||
600 | // let MySpanData(span) = self.span_interner.get(span.0); | ||
601 | unimplemented!() | ||
602 | } | ||
603 | |||
604 | /// Recent feature, not yet in the proc_macro | ||
605 | /// | ||
606 | /// See PR: | ||
607 | /// https://github.com/rust-lang/rust/pull/55780 | ||
608 | fn source_text(&mut self, _span: Self::Span) -> Option<String> { | ||
609 | None | ||
610 | } | ||
611 | |||
612 | fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> { | ||
613 | // FIXME handle span | ||
614 | None | ||
615 | } | ||
616 | fn source(&mut self, span: Self::Span) -> Self::Span { | ||
617 | // FIXME handle span | ||
618 | span | ||
619 | } | ||
620 | fn start(&mut self, _span: Self::Span) -> LineColumn { | ||
621 | // FIXME handle span | ||
622 | LineColumn { line: 0, column: 0 } | ||
623 | } | ||
624 | fn end(&mut self, _span: Self::Span) -> LineColumn { | ||
625 | // FIXME handle span | ||
626 | LineColumn { line: 0, column: 0 } | ||
627 | } | ||
628 | fn join(&mut self, _first: Self::Span, _second: Self::Span) -> Option<Self::Span> { | ||
629 | None | ||
630 | } | ||
631 | fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { | ||
632 | // FIXME handle span | ||
633 | tt::TokenId::unspecified() | ||
634 | } | ||
635 | |||
636 | fn mixed_site(&mut self) -> Self::Span { | ||
637 | // FIXME handle span | ||
638 | tt::TokenId::unspecified() | ||
639 | } | ||
640 | } | ||
641 | |||
642 | impl server::MultiSpan for Rustc { | ||
643 | fn new(&mut self) -> Self::MultiSpan { | ||
644 | // FIXME handle span | ||
645 | vec![] | ||
646 | } | ||
647 | |||
648 | fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) { | ||
649 | //TODP | ||
650 | other.push(span) | ||
651 | } | ||
652 | } | ||
653 | |||
654 | #[cfg(test)] | ||
655 | mod tests { | ||
656 | use super::*; | ||
657 | use crate::proc_macro::bridge::server::Literal; | ||
658 | |||
659 | #[test] | ||
660 | fn test_rustc_server_literals() { | ||
661 | let mut srv = Rustc { ident_interner: IdentInterner::default() }; | ||
662 | assert_eq!(srv.integer("1234").text, "1234"); | ||
663 | |||
664 | assert_eq!(srv.typed_integer("12", "u8").text, "12u8"); | ||
665 | assert_eq!(srv.typed_integer("255", "u16").text, "255u16"); | ||
666 | assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32"); | ||
667 | assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64"); | ||
668 | assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128"); | ||
669 | assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize"); | ||
670 | assert_eq!(srv.typed_integer("127", "i8").text, "127i8"); | ||
671 | assert_eq!(srv.typed_integer("255", "i16").text, "255i16"); | ||
672 | assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32"); | ||
673 | assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64"); | ||
674 | assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128"); | ||
675 | assert_eq!(srv.float("0").text, "0.0"); | ||
676 | assert_eq!(srv.float("15684.5867").text, "15684.5867"); | ||
677 | assert_eq!(srv.f32("15684.58").text, "15684.58f32"); | ||
678 | assert_eq!(srv.f64("15684.58").text, "15684.58f64"); | ||
679 | |||
680 | assert_eq!(srv.string("hello_world").text, "\"hello_world\""); | ||
681 | assert_eq!(srv.character('c').text, "'c'"); | ||
682 | assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\""); | ||
683 | } | ||
684 | } | ||
diff --git a/docs/user/readme.adoc b/docs/user/readme.adoc index 911163eb7..abd126340 100644 --- a/docs/user/readme.adoc +++ b/docs/user/readme.adoc | |||
@@ -187,30 +187,7 @@ Prerequisites: | |||
187 | 187 | ||
188 | `LSP` package. | 188 | `LSP` package. |
189 | 189 | ||
190 | Installation: | 190 | Invoke the command palette (`ctrl+shift+p`) and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer) |
191 | |||
192 | 1. Invoke the command palette with <kbd>Ctrl+Shift+P</kbd> | ||
193 | 2. Type `LSP Settings` to open the LSP preferences editor | ||
194 | 3. Add the following LSP client definition to your settings: | ||
195 | + | ||
196 | [source,json] | ||
197 | ---- | ||
198 | "rust-analyzer": { | ||
199 | "command": ["rust-analyzer"], | ||
200 | "languageId": "rust", | ||
201 | "scopes": ["source.rust"], | ||
202 | "syntaxes": [ | ||
203 | "Packages/Rust/Rust.sublime-syntax", | ||
204 | "Packages/Rust Enhanced/RustEnhanced.sublime-syntax" | ||
205 | ], | ||
206 | "initializationOptions": { | ||
207 | "featureFlags": { | ||
208 | } | ||
209 | }, | ||
210 | } | ||
211 | ---- | ||
212 | |||
213 | 4. You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer) | ||
214 | 191 | ||
215 | == Usage | 192 | == Usage |
216 | 193 | ||
diff --git a/editors/code/package.json b/editors/code/package.json index 94edc6eeb..0bf7b6ae6 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -21,7 +21,7 @@ | |||
21 | "Programming Languages" | 21 | "Programming Languages" |
22 | ], | 22 | ], |
23 | "engines": { | 23 | "engines": { |
24 | "vscode": "^1.43.0" | 24 | "vscode": "^1.44.0" |
25 | }, | 25 | }, |
26 | "enableProposedApi": true, | 26 | "enableProposedApi": true, |
27 | "scripts": { | 27 | "scripts": { |
@@ -342,11 +342,6 @@ | |||
342 | "default": true, | 342 | "default": true, |
343 | "description": "Show function name and docs in parameter hints" | 343 | "description": "Show function name and docs in parameter hints" |
344 | }, | 344 | }, |
345 | "rust-analyzer.highlighting.semanticTokens": { | ||
346 | "type": "boolean", | ||
347 | "default": false, | ||
348 | "description": "Use proposed semantic tokens API for syntax highlighting" | ||
349 | }, | ||
350 | "rust-analyzer.updates.channel": { | 345 | "rust-analyzer.updates.channel": { |
351 | "type": "string", | 346 | "type": "string", |
352 | "enum": [ | 347 | "enum": [ |
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 3b1d00bca..0ad4b63ae 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts | |||
@@ -1,11 +1,10 @@ | |||
1 | import * as lc from 'vscode-languageclient'; | 1 | import * as lc from 'vscode-languageclient'; |
2 | import * as vscode from 'vscode'; | 2 | import * as vscode from 'vscode'; |
3 | 3 | ||
4 | import { Config } from './config'; | ||
5 | import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed'; | 4 | import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed'; |
6 | import { SemanticTokensFeature, DocumentSemanticsTokensSignature } from 'vscode-languageclient/lib/semanticTokens.proposed'; | 5 | import { SemanticTokensFeature, DocumentSemanticsTokensSignature } from 'vscode-languageclient/lib/semanticTokens.proposed'; |
7 | 6 | ||
8 | export async function createClient(config: Config, serverPath: string, cwd: string): Promise<lc.LanguageClient> { | 7 | export async function createClient(serverPath: string, cwd: string): Promise<lc.LanguageClient> { |
9 | // '.' Is the fallback if no folder is open | 8 | // '.' Is the fallback if no folder is open |
10 | // TODO?: Workspace folders support Uri's (eg: file://test.txt). | 9 | // TODO?: Workspace folders support Uri's (eg: file://test.txt). |
11 | // It might be a good idea to test if the uri points to a file. | 10 | // It might be a good idea to test if the uri points to a file. |
@@ -73,15 +72,12 @@ export async function createClient(config: Config, serverPath: string, cwd: stri | |||
73 | }; | 72 | }; |
74 | 73 | ||
75 | // To turn on all proposed features use: res.registerProposedFeatures(); | 74 | // To turn on all proposed features use: res.registerProposedFeatures(); |
76 | // Here we want to just enable CallHierarchyFeature since it is available on stable. | 75 | // Here we want to enable CallHierarchyFeature and SemanticTokensFeature |
77 | // Note that while the CallHierarchyFeature is stable the LSP protocol is not. | 76 | // since they are available on stable. |
77 | // Note that while these features are stable in vscode their LSP protocol | ||
78 | // implementations are still in the "proposed" category for 3.16. | ||
78 | res.registerFeature(new CallHierarchyFeature(res)); | 79 | res.registerFeature(new CallHierarchyFeature(res)); |
79 | 80 | res.registerFeature(new SemanticTokensFeature(res)); | |
80 | if (config.package.enableProposedApi) { | ||
81 | if (config.highlightingSemanticTokens) { | ||
82 | res.registerFeature(new SemanticTokensFeature(res)); | ||
83 | } | ||
84 | } | ||
85 | 81 | ||
86 | return res; | 82 | return res; |
87 | } | 83 | } |
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index 1f45f1de0..21c1c9f23 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts | |||
@@ -69,7 +69,6 @@ export class Config { | |||
69 | get serverPath() { return this.cfg.get<null | string>("serverPath")!; } | 69 | get serverPath() { return this.cfg.get<null | string>("serverPath")!; } |
70 | get channel() { return this.cfg.get<UpdatesChannel>("updates.channel")!; } | 70 | get channel() { return this.cfg.get<UpdatesChannel>("updates.channel")!; } |
71 | get askBeforeDownload() { return this.cfg.get<boolean>("updates.askBeforeDownload")!; } | 71 | get askBeforeDownload() { return this.cfg.get<boolean>("updates.askBeforeDownload")!; } |
72 | get highlightingSemanticTokens() { return this.cfg.get<boolean>("highlighting.semanticTokens")!; } | ||
73 | get traceExtension() { return this.cfg.get<boolean>("trace.extension")!; } | 72 | get traceExtension() { return this.cfg.get<boolean>("trace.extension")!; } |
74 | 73 | ||
75 | get inlayHints() { | 74 | get inlayHints() { |
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index bd1c3de07..f7ed62d03 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts | |||
@@ -21,7 +21,7 @@ export class Ctx { | |||
21 | serverPath: string, | 21 | serverPath: string, |
22 | cwd: string, | 22 | cwd: string, |
23 | ): Promise<Ctx> { | 23 | ): Promise<Ctx> { |
24 | const client = await createClient(config, serverPath, cwd); | 24 | const client = await createClient(serverPath, cwd); |
25 | const res = new Ctx(config, extCtx, client, serverPath); | 25 | const res = new Ctx(config, extCtx, client, serverPath); |
26 | res.pushCleanup(client.start()); | 26 | res.pushCleanup(client.start()); |
27 | await client.onReady(); | 27 | await client.onReady(); |