diff options
Diffstat (limited to 'crates')
56 files changed, 1031 insertions, 392 deletions
diff --git a/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs b/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs index 864373aa5..0621487e8 100644 --- a/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs +++ b/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs | |||
@@ -98,7 +98,7 @@ fn already_has_from_impl( | |||
98 | }; | 98 | }; |
99 | let var_ty = hir_enum_var.fields(sema.db)[0].signature_ty(sema.db); | 99 | let var_ty = hir_enum_var.fields(sema.db)[0].signature_ty(sema.db); |
100 | 100 | ||
101 | e_ty.impls_trait(sema.db, from_trait, &[var_ty.clone()]) | 101 | e_ty.impls_trait(sema.db, from_trait, &[var_ty]) |
102 | } | 102 | } |
103 | 103 | ||
104 | #[cfg(test)] | 104 | #[cfg(test)] |
diff --git a/crates/ra_assists/src/handlers/introduce_variable.rs b/crates/ra_assists/src/handlers/introduce_variable.rs index 8d0f7e922..8c09e6bcd 100644 --- a/crates/ra_assists/src/handlers/introduce_variable.rs +++ b/crates/ra_assists/src/handlers/introduce_variable.rs | |||
@@ -124,7 +124,7 @@ fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> { | |||
124 | } | 124 | } |
125 | } | 125 | } |
126 | 126 | ||
127 | if ast::Stmt::cast(node.clone().into()).is_some() { | 127 | if ast::Stmt::cast(node.clone()).is_some() { |
128 | return Some((node, false)); | 128 | return Some((node, false)); |
129 | } | 129 | } |
130 | 130 | ||
diff --git a/crates/ra_assists/src/handlers/merge_imports.rs b/crates/ra_assists/src/handlers/merge_imports.rs index ef0ce0586..4be1238f1 100644 --- a/crates/ra_assists/src/handlers/merge_imports.rs +++ b/crates/ra_assists/src/handlers/merge_imports.rs | |||
@@ -30,7 +30,7 @@ pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> { | |||
30 | .filter_map(|dir| neighbor(&use_item, dir)) | 30 | .filter_map(|dir| neighbor(&use_item, dir)) |
31 | .filter_map(|it| Some((it.clone(), it.use_tree()?))) | 31 | .filter_map(|it| Some((it.clone(), it.use_tree()?))) |
32 | .find_map(|(use_item, use_tree)| { | 32 | .find_map(|(use_item, use_tree)| { |
33 | Some((try_merge_trees(&tree, &use_tree)?, use_item.clone())) | 33 | Some((try_merge_trees(&tree, &use_tree)?, use_item)) |
34 | })?; | 34 | })?; |
35 | 35 | ||
36 | rewriter.replace_ast(&tree, &merged); | 36 | rewriter.replace_ast(&tree, &merged); |
diff --git a/crates/ra_assists/src/handlers/split_import.rs b/crates/ra_assists/src/handlers/split_import.rs index d9244f22d..f25826796 100644 --- a/crates/ra_assists/src/handlers/split_import.rs +++ b/crates/ra_assists/src/handlers/split_import.rs | |||
@@ -37,7 +37,7 @@ pub(crate) fn split_import(ctx: AssistCtx) -> Option<Assist> { | |||
37 | 37 | ||
38 | #[cfg(test)] | 38 | #[cfg(test)] |
39 | mod tests { | 39 | mod tests { |
40 | use crate::helpers::{check_assist, check_assist_target}; | 40 | use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target}; |
41 | 41 | ||
42 | use super::*; | 42 | use super::*; |
43 | 43 | ||
@@ -63,4 +63,9 @@ mod tests { | |||
63 | fn split_import_target() { | 63 | fn split_import_target() { |
64 | check_assist_target(split_import, "use crate::<|>db::{RootDatabase, FileSymbol}", "::"); | 64 | check_assist_target(split_import, "use crate::<|>db::{RootDatabase, FileSymbol}", "::"); |
65 | } | 65 | } |
66 | |||
67 | #[test] | ||
68 | fn issue4044() { | ||
69 | check_assist_not_applicable(split_import, "use crate::<|>:::self;") | ||
70 | } | ||
66 | } | 71 | } |
diff --git a/crates/ra_db/src/fixture.rs b/crates/ra_db/src/fixture.rs index 7777ce81e..8248684ee 100644 --- a/crates/ra_db/src/fixture.rs +++ b/crates/ra_db/src/fixture.rs | |||
@@ -235,7 +235,7 @@ fn parse_meta(meta: &str) -> ParsedMeta { | |||
235 | "env" => { | 235 | "env" => { |
236 | for key in value.split(',') { | 236 | for key in value.split(',') { |
237 | if let Some((k, v)) = split1(key, '=') { | 237 | if let Some((k, v)) = split1(key, '=') { |
238 | env.set(k.into(), v.into()); | 238 | env.set(k, v.into()); |
239 | } | 239 | } |
240 | } | 240 | } |
241 | } | 241 | } |
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs index 5ddce98c6..ab14e2d5e 100644 --- a/crates/ra_db/src/input.rs +++ b/crates/ra_db/src/input.rs | |||
@@ -327,7 +327,7 @@ impl ExternSource { | |||
327 | self.extern_paths.iter().find_map(|(root_path, id)| { | 327 | self.extern_paths.iter().find_map(|(root_path, id)| { |
328 | if let Ok(rel_path) = path.strip_prefix(root_path) { | 328 | if let Ok(rel_path) = path.strip_prefix(root_path) { |
329 | let rel_path = RelativePathBuf::from_path(rel_path).ok()?; | 329 | let rel_path = RelativePathBuf::from_path(rel_path).ok()?; |
330 | Some((id.clone(), rel_path)) | 330 | Some((*id, rel_path)) |
331 | } else { | 331 | } else { |
332 | None | 332 | None |
333 | } | 333 | } |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 3801fce23..6e0d89466 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -759,6 +759,17 @@ impl MacroDef { | |||
759 | pub fn name(self, db: &dyn HirDatabase) -> Option<Name> { | 759 | pub fn name(self, db: &dyn HirDatabase) -> Option<Name> { |
760 | self.source(db).value.name().map(|it| it.as_name()) | 760 | self.source(db).value.name().map(|it| it.as_name()) |
761 | } | 761 | } |
762 | |||
763 | /// Indicate it is a proc-macro | ||
764 | pub fn is_proc_macro(&self) -> bool { | ||
765 | match self.id.kind { | ||
766 | hir_expand::MacroDefKind::Declarative => false, | ||
767 | hir_expand::MacroDefKind::BuiltIn(_) => false, | ||
768 | hir_expand::MacroDefKind::BuiltInDerive(_) => false, | ||
769 | hir_expand::MacroDefKind::BuiltInEager(_) => false, | ||
770 | hir_expand::MacroDefKind::CustomDerive(_) => true, | ||
771 | } | ||
772 | } | ||
762 | } | 773 | } |
763 | 774 | ||
764 | /// Invariant: `inner.as_assoc_item(db).is_some()` | 775 | /// Invariant: `inner.as_assoc_item(db).is_some()` |
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 0b477f0e9..5d6edc45c 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -195,6 +195,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
195 | self.analyze(field.syntax()).resolve_record_field(self.db, field) | 195 | self.analyze(field.syntax()).resolve_record_field(self.db, field) |
196 | } | 196 | } |
197 | 197 | ||
198 | pub fn resolve_record_field_pat(&self, field: &ast::RecordFieldPat) -> Option<StructField> { | ||
199 | self.analyze(field.syntax()).resolve_record_field_pat(self.db, field) | ||
200 | } | ||
201 | |||
198 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { | 202 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { |
199 | let sa = self.analyze(macro_call.syntax()); | 203 | let sa = self.analyze(macro_call.syntax()); |
200 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | 204 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); |
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs index 23af400b8..0ed6d0958 100644 --- a/crates/ra_hir/src/source_analyzer.rs +++ b/crates/ra_hir/src/source_analyzer.rs | |||
@@ -95,6 +95,7 @@ impl SourceAnalyzer { | |||
95 | } | 95 | } |
96 | 96 | ||
97 | fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> { | 97 | fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> { |
98 | // FIXME: macros, see `expr_id` | ||
98 | let src = InFile { file_id: self.file_id, value: pat }; | 99 | let src = InFile { file_id: self.file_id, value: pat }; |
99 | self.body_source_map.as_ref()?.node_pat(src) | 100 | self.body_source_map.as_ref()?.node_pat(src) |
100 | } | 101 | } |
@@ -167,6 +168,16 @@ impl SourceAnalyzer { | |||
167 | Some((struct_field.into(), local)) | 168 | Some((struct_field.into(), local)) |
168 | } | 169 | } |
169 | 170 | ||
171 | pub(crate) fn resolve_record_field_pat( | ||
172 | &self, | ||
173 | _db: &dyn HirDatabase, | ||
174 | field: &ast::RecordFieldPat, | ||
175 | ) -> Option<StructField> { | ||
176 | let pat_id = self.pat_id(&field.pat()?)?; | ||
177 | let struct_field = self.infer.as_ref()?.record_field_pat_resolution(pat_id)?; | ||
178 | Some(struct_field.into()) | ||
179 | } | ||
180 | |||
170 | pub(crate) fn resolve_macro_call( | 181 | pub(crate) fn resolve_macro_call( |
171 | &self, | 182 | &self, |
172 | db: &dyn HirDatabase, | 183 | db: &dyn HirDatabase, |
diff --git a/crates/ra_hir_def/src/body/lower.rs b/crates/ra_hir_def/src/body/lower.rs index 82a52804d..0caedd8d8 100644 --- a/crates/ra_hir_def/src/body/lower.rs +++ b/crates/ra_hir_def/src/body/lower.rs | |||
@@ -473,16 +473,14 @@ impl ExprCollector<'_> { | |||
473 | self.collect_block_items(&block); | 473 | self.collect_block_items(&block); |
474 | let statements = block | 474 | let statements = block |
475 | .statements() | 475 | .statements() |
476 | .filter_map(|s| match s { | 476 | .map(|s| match s { |
477 | ast::Stmt::LetStmt(stmt) => { | 477 | ast::Stmt::LetStmt(stmt) => { |
478 | let pat = self.collect_pat_opt(stmt.pat()); | 478 | let pat = self.collect_pat_opt(stmt.pat()); |
479 | let type_ref = stmt.ascribed_type().map(TypeRef::from_ast); | 479 | let type_ref = stmt.ascribed_type().map(TypeRef::from_ast); |
480 | let initializer = stmt.initializer().map(|e| self.collect_expr(e)); | 480 | let initializer = stmt.initializer().map(|e| self.collect_expr(e)); |
481 | Some(Statement::Let { pat, type_ref, initializer }) | 481 | Statement::Let { pat, type_ref, initializer } |
482 | } | ||
483 | ast::Stmt::ExprStmt(stmt) => { | ||
484 | Some(Statement::Expr(self.collect_expr_opt(stmt.expr()))) | ||
485 | } | 482 | } |
483 | ast::Stmt::ExprStmt(stmt) => Statement::Expr(self.collect_expr_opt(stmt.expr())), | ||
486 | }) | 484 | }) |
487 | .collect(); | 485 | .collect(); |
488 | let tail = block.expr().map(|e| self.collect_expr(e)); | 486 | let tail = block.expr().map(|e| self.collect_expr(e)); |
diff --git a/crates/ra_hir_def/src/body/scope.rs b/crates/ra_hir_def/src/body/scope.rs index 4d489f692..fe4137176 100644 --- a/crates/ra_hir_def/src/body/scope.rs +++ b/crates/ra_hir_def/src/body/scope.rs | |||
@@ -157,6 +157,10 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope | |||
157 | for arm in arms { | 157 | for arm in arms { |
158 | let scope = scopes.new_scope(scope); | 158 | let scope = scopes.new_scope(scope); |
159 | scopes.add_bindings(body, scope, arm.pat); | 159 | scopes.add_bindings(body, scope, arm.pat); |
160 | if let Some(guard) = arm.guard { | ||
161 | scopes.set_scope(guard, scope); | ||
162 | compute_expr_scopes(guard, body, scopes, scope); | ||
163 | } | ||
160 | scopes.set_scope(arm.expr, scope); | 164 | scopes.set_scope(arm.expr, scope); |
161 | compute_expr_scopes(arm.expr, body, scopes, scope); | 165 | compute_expr_scopes(arm.expr, body, scopes, scope); |
162 | } | 166 | } |
diff --git a/crates/ra_hir_expand/src/ast_id_map.rs b/crates/ra_hir_expand/src/ast_id_map.rs index a3ca302c2..d19569245 100644 --- a/crates/ra_hir_expand/src/ast_id_map.rs +++ b/crates/ra_hir_expand/src/ast_id_map.rs | |||
@@ -66,7 +66,7 @@ impl AstIdMap { | |||
66 | // change parent's id. This means that, say, adding a new function to a | 66 | // change parent's id. This means that, say, adding a new function to a |
67 | // trait does not change ids of top-level items, which helps caching. | 67 | // trait does not change ids of top-level items, which helps caching. |
68 | bfs(node, |it| { | 68 | bfs(node, |it| { |
69 | if let Some(module_item) = ast::ModuleItem::cast(it.clone()) { | 69 | if let Some(module_item) = ast::ModuleItem::cast(it) { |
70 | res.alloc(module_item.syntax()); | 70 | res.alloc(module_item.syntax()); |
71 | } | 71 | } |
72 | }); | 72 | }); |
diff --git a/crates/ra_hir_expand/src/builtin_macro.rs b/crates/ra_hir_expand/src/builtin_macro.rs index f9d3787f6..3da137f2e 100644 --- a/crates/ra_hir_expand/src/builtin_macro.rs +++ b/crates/ra_hir_expand/src/builtin_macro.rs | |||
@@ -301,7 +301,7 @@ fn relative_file(db: &dyn AstDatabase, call_id: MacroCallId, path: &str) -> Opti | |||
301 | } | 301 | } |
302 | 302 | ||
303 | // Extern paths ? | 303 | // Extern paths ? |
304 | let krate = db.relevant_crates(call_site).get(0)?.clone(); | 304 | let krate = *db.relevant_crates(call_site).get(0)?; |
305 | let (extern_source_id, relative_file) = | 305 | let (extern_source_id, relative_file) = |
306 | db.crate_graph()[krate].extern_source.extern_path(path)?; | 306 | db.crate_graph()[krate].extern_source.extern_path(path)?; |
307 | 307 | ||
@@ -329,7 +329,7 @@ fn include_expand( | |||
329 | 329 | ||
330 | // FIXME: | 330 | // FIXME: |
331 | // Handle include as expression | 331 | // Handle include as expression |
332 | let res = parse_to_token_tree(&db.file_text(file_id.into())) | 332 | let res = parse_to_token_tree(&db.file_text(file_id)) |
333 | .ok_or_else(|| mbe::ExpandError::ConversionError)? | 333 | .ok_or_else(|| mbe::ExpandError::ConversionError)? |
334 | .0; | 334 | .0; |
335 | 335 | ||
@@ -340,7 +340,7 @@ fn get_env_inner(db: &dyn AstDatabase, arg_id: EagerMacroId, key: &str) -> Optio | |||
340 | let call_id: MacroCallId = arg_id.into(); | 340 | let call_id: MacroCallId = arg_id.into(); |
341 | let original_file = call_id.as_file().original_file(db); | 341 | let original_file = call_id.as_file().original_file(db); |
342 | 342 | ||
343 | let krate = db.relevant_crates(original_file).get(0)?.clone(); | 343 | let krate = *db.relevant_crates(original_file).get(0)?; |
344 | db.crate_graph()[krate].env.get(key) | 344 | db.crate_graph()[krate].env.get(key) |
345 | } | 345 | } |
346 | 346 | ||
@@ -447,7 +447,7 @@ mod tests { | |||
447 | file_id: file_id.into(), | 447 | file_id: file_id.into(), |
448 | }; | 448 | }; |
449 | 449 | ||
450 | let id: MacroCallId = db.intern_eager_expansion(eager.into()).into(); | 450 | let id: MacroCallId = db.intern_eager_expansion(eager).into(); |
451 | id.as_file() | 451 | id.as_file() |
452 | } | 452 | } |
453 | }; | 453 | }; |
diff --git a/crates/ra_hir_ty/Cargo.toml b/crates/ra_hir_ty/Cargo.toml index 177bdbcb0..04d3cd6a2 100644 --- a/crates/ra_hir_ty/Cargo.toml +++ b/crates/ra_hir_ty/Cargo.toml | |||
@@ -27,9 +27,9 @@ test_utils = { path = "../test_utils" } | |||
27 | 27 | ||
28 | scoped-tls = "1" | 28 | scoped-tls = "1" |
29 | 29 | ||
30 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } | 30 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "2c072cc830d04af5f10b390e6643327f85108282" } |
31 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } | 31 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "2c072cc830d04af5f10b390e6643327f85108282" } |
32 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } | 32 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "2c072cc830d04af5f10b390e6643327f85108282" } |
33 | 33 | ||
34 | [dev-dependencies] | 34 | [dev-dependencies] |
35 | insta = "0.16.0" | 35 | insta = "0.16.0" |
diff --git a/crates/ra_hir_ty/src/_match.rs b/crates/ra_hir_ty/src/_match.rs index 688026a04..779e78574 100644 --- a/crates/ra_hir_ty/src/_match.rs +++ b/crates/ra_hir_ty/src/_match.rs | |||
@@ -235,10 +235,19 @@ impl From<PatId> for PatIdOrWild { | |||
235 | } | 235 | } |
236 | } | 236 | } |
237 | 237 | ||
238 | impl From<&PatId> for PatIdOrWild { | ||
239 | fn from(pat_id: &PatId) -> Self { | ||
240 | Self::PatId(*pat_id) | ||
241 | } | ||
242 | } | ||
243 | |||
238 | #[derive(Debug, Clone, Copy, PartialEq)] | 244 | #[derive(Debug, Clone, Copy, PartialEq)] |
239 | pub enum MatchCheckErr { | 245 | pub enum MatchCheckErr { |
240 | NotImplemented, | 246 | NotImplemented, |
241 | MalformedMatchArm, | 247 | MalformedMatchArm, |
248 | /// Used when type inference cannot resolve the type of | ||
249 | /// a pattern or expression. | ||
250 | Unknown, | ||
242 | } | 251 | } |
243 | 252 | ||
244 | /// The return type of `is_useful` is either an indication of usefulness | 253 | /// The return type of `is_useful` is either an indication of usefulness |
@@ -290,10 +299,14 @@ impl PatStack { | |||
290 | Self::from_slice(&self.0[1..]) | 299 | Self::from_slice(&self.0[1..]) |
291 | } | 300 | } |
292 | 301 | ||
293 | fn replace_head_with<T: Into<PatIdOrWild> + Copy>(&self, pat_ids: &[T]) -> PatStack { | 302 | fn replace_head_with<I, T>(&self, pats: I) -> PatStack |
303 | where | ||
304 | I: Iterator<Item = T>, | ||
305 | T: Into<PatIdOrWild>, | ||
306 | { | ||
294 | let mut patterns: PatStackInner = smallvec![]; | 307 | let mut patterns: PatStackInner = smallvec![]; |
295 | for pat in pat_ids { | 308 | for pat in pats { |
296 | patterns.push((*pat).into()); | 309 | patterns.push(pat.into()); |
297 | } | 310 | } |
298 | for pat in &self.0[1..] { | 311 | for pat in &self.0[1..] { |
299 | patterns.push(*pat); | 312 | patterns.push(*pat); |
@@ -330,7 +343,7 @@ impl PatStack { | |||
330 | return Err(MatchCheckErr::NotImplemented); | 343 | return Err(MatchCheckErr::NotImplemented); |
331 | } | 344 | } |
332 | 345 | ||
333 | Some(self.replace_head_with(pat_ids)) | 346 | Some(self.replace_head_with(pat_ids.iter())) |
334 | } | 347 | } |
335 | (Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => { | 348 | (Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => { |
336 | match cx.body.exprs[lit_expr] { | 349 | match cx.body.exprs[lit_expr] { |
@@ -382,7 +395,7 @@ impl PatStack { | |||
382 | new_patterns.push((*pat_id).into()); | 395 | new_patterns.push((*pat_id).into()); |
383 | } | 396 | } |
384 | 397 | ||
385 | Some(self.replace_head_with(&new_patterns)) | 398 | Some(self.replace_head_with(new_patterns.into_iter())) |
386 | } else { | 399 | } else { |
387 | return Err(MatchCheckErr::MalformedMatchArm); | 400 | return Err(MatchCheckErr::MalformedMatchArm); |
388 | } | 401 | } |
@@ -390,13 +403,41 @@ impl PatStack { | |||
390 | // If there is no ellipsis in the tuple pattern, the number | 403 | // If there is no ellipsis in the tuple pattern, the number |
391 | // of patterns must equal the constructor arity. | 404 | // of patterns must equal the constructor arity. |
392 | if pat_ids.len() == constructor_arity { | 405 | if pat_ids.len() == constructor_arity { |
393 | Some(self.replace_head_with(pat_ids)) | 406 | Some(self.replace_head_with(pat_ids.into_iter())) |
394 | } else { | 407 | } else { |
395 | return Err(MatchCheckErr::MalformedMatchArm); | 408 | return Err(MatchCheckErr::MalformedMatchArm); |
396 | } | 409 | } |
397 | } | 410 | } |
398 | } | 411 | } |
399 | } | 412 | } |
413 | (Pat::Record { args: ref arg_patterns, .. }, Constructor::Enum(e)) => { | ||
414 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); | ||
415 | if !enum_variant_matches(cx, pat_id, *e) { | ||
416 | None | ||
417 | } else { | ||
418 | match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { | ||
419 | VariantData::Record(struct_field_arena) => { | ||
420 | // Here we treat any missing fields in the record as the wild pattern, as | ||
421 | // if the record has ellipsis. We want to do this here even if the | ||
422 | // record does not contain ellipsis, because it allows us to continue | ||
423 | // enforcing exhaustiveness for the rest of the match statement. | ||
424 | // | ||
425 | // Creating the diagnostic for the missing field in the pattern | ||
426 | // should be done in a different diagnostic. | ||
427 | let patterns = struct_field_arena.iter().map(|(_, struct_field)| { | ||
428 | arg_patterns | ||
429 | .iter() | ||
430 | .find(|pat| pat.name == struct_field.name) | ||
431 | .map(|pat| PatIdOrWild::from(pat.pat)) | ||
432 | .unwrap_or(PatIdOrWild::Wild) | ||
433 | }); | ||
434 | |||
435 | Some(self.replace_head_with(patterns)) | ||
436 | } | ||
437 | _ => return Err(MatchCheckErr::Unknown), | ||
438 | } | ||
439 | } | ||
440 | } | ||
400 | (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented), | 441 | (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented), |
401 | (_, _) => return Err(MatchCheckErr::NotImplemented), | 442 | (_, _) => return Err(MatchCheckErr::NotImplemented), |
402 | }; | 443 | }; |
@@ -655,8 +696,8 @@ impl Constructor { | |||
655 | Constructor::Enum(e) => { | 696 | Constructor::Enum(e) => { |
656 | match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { | 697 | match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { |
657 | VariantData::Tuple(struct_field_data) => struct_field_data.len(), | 698 | VariantData::Tuple(struct_field_data) => struct_field_data.len(), |
699 | VariantData::Record(struct_field_data) => struct_field_data.len(), | ||
658 | VariantData::Unit => 0, | 700 | VariantData::Unit => 0, |
659 | _ => return Err(MatchCheckErr::NotImplemented), | ||
660 | } | 701 | } |
661 | } | 702 | } |
662 | }; | 703 | }; |
@@ -695,10 +736,10 @@ fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Opt | |||
695 | Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), | 736 | Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), |
696 | _ => return Err(MatchCheckErr::NotImplemented), | 737 | _ => return Err(MatchCheckErr::NotImplemented), |
697 | }, | 738 | }, |
698 | Pat::TupleStruct { .. } | Pat::Path(_) => { | 739 | Pat::TupleStruct { .. } | Pat::Path(_) | Pat::Record { .. } => { |
699 | let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); | 740 | let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); |
700 | let variant_id = | 741 | let variant_id = |
701 | cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::NotImplemented)?; | 742 | cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::Unknown)?; |
702 | match variant_id { | 743 | match variant_id { |
703 | VariantId::EnumVariantId(enum_variant_id) => { | 744 | VariantId::EnumVariantId(enum_variant_id) => { |
704 | Some(Constructor::Enum(enum_variant_id)) | 745 | Some(Constructor::Enum(enum_variant_id)) |
@@ -759,20 +800,22 @@ mod tests { | |||
759 | pub(super) use insta::assert_snapshot; | 800 | pub(super) use insta::assert_snapshot; |
760 | pub(super) use ra_db::fixture::WithFixture; | 801 | pub(super) use ra_db::fixture::WithFixture; |
761 | 802 | ||
762 | pub(super) use crate::test_db::TestDB; | 803 | pub(super) use crate::{diagnostics::MissingMatchArms, test_db::TestDB}; |
763 | 804 | ||
764 | pub(super) fn check_diagnostic_message(content: &str) -> String { | 805 | pub(super) fn check_diagnostic_message(content: &str) -> String { |
765 | TestDB::with_single_file(content).0.diagnostics().0 | 806 | TestDB::with_single_file(content).0.diagnostic::<MissingMatchArms>().0 |
766 | } | 807 | } |
767 | 808 | ||
768 | pub(super) fn check_diagnostic(content: &str) { | 809 | pub(super) fn check_diagnostic(content: &str) { |
769 | let diagnostic_count = TestDB::with_single_file(content).0.diagnostics().1; | 810 | let diagnostic_count = |
811 | TestDB::with_single_file(content).0.diagnostic::<MissingMatchArms>().1; | ||
770 | 812 | ||
771 | assert_eq!(1, diagnostic_count, "no diagnostic reported"); | 813 | assert_eq!(1, diagnostic_count, "no diagnostic reported"); |
772 | } | 814 | } |
773 | 815 | ||
774 | pub(super) fn check_no_diagnostic(content: &str) { | 816 | pub(super) fn check_no_diagnostic(content: &str) { |
775 | let diagnostic_count = TestDB::with_single_file(content).0.diagnostics().1; | 817 | let diagnostic_count = |
818 | TestDB::with_single_file(content).0.diagnostic::<MissingMatchArms>().1; | ||
776 | 819 | ||
777 | assert_eq!(0, diagnostic_count, "expected no diagnostic, found one"); | 820 | assert_eq!(0, diagnostic_count, "expected no diagnostic, found one"); |
778 | } | 821 | } |
@@ -1532,6 +1575,236 @@ mod tests { | |||
1532 | } | 1575 | } |
1533 | 1576 | ||
1534 | #[test] | 1577 | #[test] |
1578 | fn enum_record_no_arms() { | ||
1579 | let content = r" | ||
1580 | enum Either { | ||
1581 | A { foo: bool }, | ||
1582 | B, | ||
1583 | } | ||
1584 | fn test_fn() { | ||
1585 | let a = Either::A { foo: true }; | ||
1586 | match a { | ||
1587 | } | ||
1588 | } | ||
1589 | "; | ||
1590 | |||
1591 | check_diagnostic(content); | ||
1592 | } | ||
1593 | |||
1594 | #[test] | ||
1595 | fn enum_record_missing_arms() { | ||
1596 | let content = r" | ||
1597 | enum Either { | ||
1598 | A { foo: bool }, | ||
1599 | B, | ||
1600 | } | ||
1601 | fn test_fn() { | ||
1602 | let a = Either::A { foo: true }; | ||
1603 | match a { | ||
1604 | Either::A { foo: true } => (), | ||
1605 | } | ||
1606 | } | ||
1607 | "; | ||
1608 | |||
1609 | check_diagnostic(content); | ||
1610 | } | ||
1611 | |||
1612 | #[test] | ||
1613 | fn enum_record_no_diagnostic() { | ||
1614 | let content = r" | ||
1615 | enum Either { | ||
1616 | A { foo: bool }, | ||
1617 | B, | ||
1618 | } | ||
1619 | fn test_fn() { | ||
1620 | let a = Either::A { foo: true }; | ||
1621 | match a { | ||
1622 | Either::A { foo: true } => (), | ||
1623 | Either::A { foo: false } => (), | ||
1624 | Either::B => (), | ||
1625 | } | ||
1626 | } | ||
1627 | "; | ||
1628 | |||
1629 | check_no_diagnostic(content); | ||
1630 | } | ||
1631 | |||
1632 | #[test] | ||
1633 | fn enum_record_missing_field_no_diagnostic() { | ||
1634 | let content = r" | ||
1635 | enum Either { | ||
1636 | A { foo: bool }, | ||
1637 | B, | ||
1638 | } | ||
1639 | fn test_fn() { | ||
1640 | let a = Either::B; | ||
1641 | match a { | ||
1642 | Either::A { } => (), | ||
1643 | Either::B => (), | ||
1644 | } | ||
1645 | } | ||
1646 | "; | ||
1647 | |||
1648 | // When `Either::A` is missing a struct member, we don't want | ||
1649 | // to fire the missing match arm diagnostic. This should fire | ||
1650 | // some other diagnostic. | ||
1651 | check_no_diagnostic(content); | ||
1652 | } | ||
1653 | |||
1654 | #[test] | ||
1655 | fn enum_record_missing_field_missing_match_arm() { | ||
1656 | let content = r" | ||
1657 | enum Either { | ||
1658 | A { foo: bool }, | ||
1659 | B, | ||
1660 | } | ||
1661 | fn test_fn() { | ||
1662 | let a = Either::B; | ||
1663 | match a { | ||
1664 | Either::A { } => (), | ||
1665 | } | ||
1666 | } | ||
1667 | "; | ||
1668 | |||
1669 | // Even though `Either::A` is missing fields, we still want to fire | ||
1670 | // the missing arm diagnostic here, since we know `Either::B` is missing. | ||
1671 | check_diagnostic(content); | ||
1672 | } | ||
1673 | |||
1674 | #[test] | ||
1675 | fn enum_record_no_diagnostic_wild() { | ||
1676 | let content = r" | ||
1677 | enum Either { | ||
1678 | A { foo: bool }, | ||
1679 | B, | ||
1680 | } | ||
1681 | fn test_fn() { | ||
1682 | let a = Either::A { foo: true }; | ||
1683 | match a { | ||
1684 | Either::A { foo: _ } => (), | ||
1685 | Either::B => (), | ||
1686 | } | ||
1687 | } | ||
1688 | "; | ||
1689 | |||
1690 | check_no_diagnostic(content); | ||
1691 | } | ||
1692 | |||
1693 | #[test] | ||
1694 | fn enum_record_fields_out_of_order_missing_arm() { | ||
1695 | let content = r" | ||
1696 | enum Either { | ||
1697 | A { foo: bool, bar: () }, | ||
1698 | B, | ||
1699 | } | ||
1700 | fn test_fn() { | ||
1701 | let a = Either::A { foo: true }; | ||
1702 | match a { | ||
1703 | Either::A { bar: (), foo: false } => (), | ||
1704 | Either::A { foo: true, bar: () } => (), | ||
1705 | } | ||
1706 | } | ||
1707 | "; | ||
1708 | |||
1709 | check_diagnostic(content); | ||
1710 | } | ||
1711 | |||
1712 | #[test] | ||
1713 | fn enum_record_fields_out_of_order_no_diagnostic() { | ||
1714 | let content = r" | ||
1715 | enum Either { | ||
1716 | A { foo: bool, bar: () }, | ||
1717 | B, | ||
1718 | } | ||
1719 | fn test_fn() { | ||
1720 | let a = Either::A { foo: true }; | ||
1721 | match a { | ||
1722 | Either::A { bar: (), foo: false } => (), | ||
1723 | Either::A { foo: true, bar: () } => (), | ||
1724 | Either::B => (), | ||
1725 | } | ||
1726 | } | ||
1727 | "; | ||
1728 | |||
1729 | check_no_diagnostic(content); | ||
1730 | } | ||
1731 | |||
1732 | #[test] | ||
1733 | fn enum_record_ellipsis_missing_arm() { | ||
1734 | let content = r" | ||
1735 | enum Either { | ||
1736 | A { foo: bool, bar: bool }, | ||
1737 | B, | ||
1738 | } | ||
1739 | fn test_fn() { | ||
1740 | match Either::B { | ||
1741 | Either::A { foo: true, .. } => (), | ||
1742 | Either::B => (), | ||
1743 | } | ||
1744 | } | ||
1745 | "; | ||
1746 | |||
1747 | check_diagnostic(content); | ||
1748 | } | ||
1749 | |||
1750 | #[test] | ||
1751 | fn enum_record_ellipsis_no_diagnostic() { | ||
1752 | let content = r" | ||
1753 | enum Either { | ||
1754 | A { foo: bool, bar: bool }, | ||
1755 | B, | ||
1756 | } | ||
1757 | fn test_fn() { | ||
1758 | let a = Either::A { foo: true }; | ||
1759 | match a { | ||
1760 | Either::A { foo: true, .. } => (), | ||
1761 | Either::A { foo: false, .. } => (), | ||
1762 | Either::B => (), | ||
1763 | } | ||
1764 | } | ||
1765 | "; | ||
1766 | |||
1767 | check_no_diagnostic(content); | ||
1768 | } | ||
1769 | |||
1770 | #[test] | ||
1771 | fn enum_record_ellipsis_all_fields_missing_arm() { | ||
1772 | let content = r" | ||
1773 | enum Either { | ||
1774 | A { foo: bool, bar: bool }, | ||
1775 | B, | ||
1776 | } | ||
1777 | fn test_fn() { | ||
1778 | let a = Either::B; | ||
1779 | match a { | ||
1780 | Either::A { .. } => (), | ||
1781 | } | ||
1782 | } | ||
1783 | "; | ||
1784 | |||
1785 | check_diagnostic(content); | ||
1786 | } | ||
1787 | |||
1788 | #[test] | ||
1789 | fn enum_record_ellipsis_all_fields_no_diagnostic() { | ||
1790 | let content = r" | ||
1791 | enum Either { | ||
1792 | A { foo: bool, bar: bool }, | ||
1793 | B, | ||
1794 | } | ||
1795 | fn test_fn() { | ||
1796 | let a = Either::B; | ||
1797 | match a { | ||
1798 | Either::A { .. } => (), | ||
1799 | Either::B => (), | ||
1800 | } | ||
1801 | } | ||
1802 | "; | ||
1803 | |||
1804 | check_no_diagnostic(content); | ||
1805 | } | ||
1806 | |||
1807 | #[test] | ||
1535 | fn enum_tuple_partial_ellipsis_no_diagnostic() { | 1808 | fn enum_tuple_partial_ellipsis_no_diagnostic() { |
1536 | let content = r" | 1809 | let content = r" |
1537 | enum Either { | 1810 | enum Either { |
@@ -1689,25 +1962,6 @@ mod false_negatives { | |||
1689 | } | 1962 | } |
1690 | 1963 | ||
1691 | #[test] | 1964 | #[test] |
1692 | fn enum_record() { | ||
1693 | let content = r" | ||
1694 | enum Either { | ||
1695 | A { foo: u32 }, | ||
1696 | B, | ||
1697 | } | ||
1698 | fn test_fn() { | ||
1699 | match Either::B { | ||
1700 | Either::A { foo: 5 } => (), | ||
1701 | } | ||
1702 | } | ||
1703 | "; | ||
1704 | |||
1705 | // This is a false negative. | ||
1706 | // We don't currently handle enum record types. | ||
1707 | check_no_diagnostic(content); | ||
1708 | } | ||
1709 | |||
1710 | #[test] | ||
1711 | fn internal_or() { | 1965 | fn internal_or() { |
1712 | let content = r" | 1966 | let content = r" |
1713 | fn test_fn() { | 1967 | fn test_fn() { |
@@ -1796,4 +2050,22 @@ mod false_negatives { | |||
1796 | // We don't currently handle tuple patterns with ellipsis. | 2050 | // We don't currently handle tuple patterns with ellipsis. |
1797 | check_no_diagnostic(content); | 2051 | check_no_diagnostic(content); |
1798 | } | 2052 | } |
2053 | |||
2054 | #[test] | ||
2055 | fn struct_missing_arm() { | ||
2056 | let content = r" | ||
2057 | struct Foo { | ||
2058 | a: bool, | ||
2059 | } | ||
2060 | fn test_fn(f: Foo) { | ||
2061 | match f { | ||
2062 | Foo { a: true } => {}, | ||
2063 | } | ||
2064 | } | ||
2065 | "; | ||
2066 | |||
2067 | // This is a false negative. | ||
2068 | // We don't currently handle structs. | ||
2069 | check_no_diagnostic(content); | ||
2070 | } | ||
1799 | } | 2071 | } |
diff --git a/crates/ra_hir_ty/src/db.rs b/crates/ra_hir_ty/src/db.rs index 33da16b48..9e5dfeab3 100644 --- a/crates/ra_hir_ty/src/db.rs +++ b/crates/ra_hir_ty/src/db.rs | |||
@@ -107,6 +107,13 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { | |||
107 | krate: CrateId, | 107 | krate: CrateId, |
108 | goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>, | 108 | goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>, |
109 | ) -> Option<crate::traits::Solution>; | 109 | ) -> Option<crate::traits::Solution>; |
110 | |||
111 | #[salsa::invoke(crate::traits::chalk::program_clauses_for_chalk_env_query)] | ||
112 | fn program_clauses_for_chalk_env( | ||
113 | &self, | ||
114 | krate: CrateId, | ||
115 | env: chalk_ir::Environment<chalk::Interner>, | ||
116 | ) -> chalk_ir::ProgramClauses<chalk::Interner>; | ||
110 | } | 117 | } |
111 | 118 | ||
112 | fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { | 119 | fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { |
diff --git a/crates/ra_hir_ty/src/infer.rs b/crates/ra_hir_ty/src/infer.rs index b6d9b3438..dfb6a435f 100644 --- a/crates/ra_hir_ty/src/infer.rs +++ b/crates/ra_hir_ty/src/infer.rs | |||
@@ -127,6 +127,7 @@ pub struct InferenceResult { | |||
127 | field_resolutions: FxHashMap<ExprId, StructFieldId>, | 127 | field_resolutions: FxHashMap<ExprId, StructFieldId>, |
128 | /// For each field in record literal, records the field it resolves to. | 128 | /// For each field in record literal, records the field it resolves to. |
129 | record_field_resolutions: FxHashMap<ExprId, StructFieldId>, | 129 | record_field_resolutions: FxHashMap<ExprId, StructFieldId>, |
130 | record_field_pat_resolutions: FxHashMap<PatId, StructFieldId>, | ||
130 | /// For each struct literal, records the variant it resolves to. | 131 | /// For each struct literal, records the variant it resolves to. |
131 | variant_resolutions: FxHashMap<ExprOrPatId, VariantId>, | 132 | variant_resolutions: FxHashMap<ExprOrPatId, VariantId>, |
132 | /// For each associated item record what it resolves to | 133 | /// For each associated item record what it resolves to |
@@ -147,6 +148,9 @@ impl InferenceResult { | |||
147 | pub fn record_field_resolution(&self, expr: ExprId) -> Option<StructFieldId> { | 148 | pub fn record_field_resolution(&self, expr: ExprId) -> Option<StructFieldId> { |
148 | self.record_field_resolutions.get(&expr).copied() | 149 | self.record_field_resolutions.get(&expr).copied() |
149 | } | 150 | } |
151 | pub fn record_field_pat_resolution(&self, pat: PatId) -> Option<StructFieldId> { | ||
152 | self.record_field_pat_resolutions.get(&pat).copied() | ||
153 | } | ||
150 | pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> { | 154 | pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> { |
151 | self.variant_resolutions.get(&id.into()).copied() | 155 | self.variant_resolutions.get(&id.into()).copied() |
152 | } | 156 | } |
diff --git a/crates/ra_hir_ty/src/infer/pat.rs b/crates/ra_hir_ty/src/infer/pat.rs index 8ec4d4ace..7c2ad4384 100644 --- a/crates/ra_hir_ty/src/infer/pat.rs +++ b/crates/ra_hir_ty/src/infer/pat.rs | |||
@@ -7,6 +7,7 @@ use hir_def::{ | |||
7 | expr::{BindingAnnotation, Pat, PatId, RecordFieldPat}, | 7 | expr::{BindingAnnotation, Pat, PatId, RecordFieldPat}, |
8 | path::Path, | 8 | path::Path, |
9 | type_ref::Mutability, | 9 | type_ref::Mutability, |
10 | StructFieldId, | ||
10 | }; | 11 | }; |
11 | use hir_expand::name::Name; | 12 | use hir_expand::name::Name; |
12 | use test_utils::tested_by; | 13 | use test_utils::tested_by; |
@@ -67,6 +68,11 @@ impl<'a> InferenceContext<'a> { | |||
67 | let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); | 68 | let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); |
68 | for subpat in subpats { | 69 | for subpat in subpats { |
69 | let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name)); | 70 | let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name)); |
71 | if let Some(local_id) = matching_field { | ||
72 | let field_def = StructFieldId { parent: def.unwrap(), local_id }; | ||
73 | self.result.record_field_pat_resolutions.insert(subpat.pat, field_def); | ||
74 | } | ||
75 | |||
70 | let expected_ty = | 76 | let expected_ty = |
71 | matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs)); | 77 | matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs)); |
72 | let expected_ty = self.normalize_associated_types_in(expected_ty); | 78 | let expected_ty = self.normalize_associated_types_in(expected_ty); |
diff --git a/crates/ra_hir_ty/src/test_db.rs b/crates/ra_hir_ty/src/test_db.rs index 3a4d58bf9..8498d3d96 100644 --- a/crates/ra_hir_ty/src/test_db.rs +++ b/crates/ra_hir_ty/src/test_db.rs | |||
@@ -12,7 +12,7 @@ use ra_db::{ | |||
12 | }; | 12 | }; |
13 | use stdx::format_to; | 13 | use stdx::format_to; |
14 | 14 | ||
15 | use crate::{db::HirDatabase, expr::ExprValidator}; | 15 | use crate::{db::HirDatabase, diagnostics::Diagnostic, expr::ExprValidator}; |
16 | 16 | ||
17 | #[salsa::database( | 17 | #[salsa::database( |
18 | ra_db::SourceDatabaseExtStorage, | 18 | ra_db::SourceDatabaseExtStorage, |
@@ -104,10 +104,7 @@ impl TestDB { | |||
104 | panic!("Can't find module for file") | 104 | panic!("Can't find module for file") |
105 | } | 105 | } |
106 | 106 | ||
107 | // FIXME: don't duplicate this | 107 | fn diag<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) { |
108 | pub fn diagnostics(&self) -> (String, u32) { | ||
109 | let mut buf = String::new(); | ||
110 | let mut count = 0; | ||
111 | let crate_graph = self.crate_graph(); | 108 | let crate_graph = self.crate_graph(); |
112 | for krate in crate_graph.iter() { | 109 | for krate in crate_graph.iter() { |
113 | let crate_def_map = self.crate_def_map(krate); | 110 | let crate_def_map = self.crate_def_map(krate); |
@@ -132,15 +129,36 @@ impl TestDB { | |||
132 | 129 | ||
133 | for f in fns { | 130 | for f in fns { |
134 | let infer = self.infer(f.into()); | 131 | let infer = self.infer(f.into()); |
135 | let mut sink = DiagnosticSink::new(|d| { | 132 | let mut sink = DiagnosticSink::new(&mut cb); |
136 | format_to!(buf, "{:?}: {}\n", d.syntax_node(self).text(), d.message()); | ||
137 | count += 1; | ||
138 | }); | ||
139 | infer.add_diagnostics(self, f, &mut sink); | 133 | infer.add_diagnostics(self, f, &mut sink); |
140 | let mut validator = ExprValidator::new(f, infer, &mut sink); | 134 | let mut validator = ExprValidator::new(f, infer, &mut sink); |
141 | validator.validate_body(self); | 135 | validator.validate_body(self); |
142 | } | 136 | } |
143 | } | 137 | } |
138 | } | ||
139 | |||
140 | pub fn diagnostics(&self) -> (String, u32) { | ||
141 | let mut buf = String::new(); | ||
142 | let mut count = 0; | ||
143 | self.diag(|d| { | ||
144 | format_to!(buf, "{:?}: {}\n", d.syntax_node(self).text(), d.message()); | ||
145 | count += 1; | ||
146 | }); | ||
147 | (buf, count) | ||
148 | } | ||
149 | |||
150 | /// Like `diagnostics`, but filtered for a single diagnostic. | ||
151 | pub fn diagnostic<D: Diagnostic>(&self) -> (String, u32) { | ||
152 | let mut buf = String::new(); | ||
153 | let mut count = 0; | ||
154 | self.diag(|d| { | ||
155 | // We want to filter diagnostics by the particular one we are testing for, to | ||
156 | // avoid surprising results in tests. | ||
157 | if d.downcast_ref::<D>().is_some() { | ||
158 | format_to!(buf, "{:?}: {}\n", d.syntax_node(self).text(), d.message()); | ||
159 | count += 1; | ||
160 | }; | ||
161 | }); | ||
144 | (buf, count) | 162 | (buf, count) |
145 | } | 163 | } |
146 | } | 164 | } |
diff --git a/crates/ra_hir_ty/src/tests/patterns.rs b/crates/ra_hir_ty/src/tests/patterns.rs index 07cbc521a..6ea51d5d3 100644 --- a/crates/ra_hir_ty/src/tests/patterns.rs +++ b/crates/ra_hir_ty/src/tests/patterns.rs | |||
@@ -455,3 +455,29 @@ fn test() { | |||
455 | "### | 455 | "### |
456 | ); | 456 | ); |
457 | } | 457 | } |
458 | |||
459 | #[test] | ||
460 | fn infer_guard() { | ||
461 | assert_snapshot!( | ||
462 | infer(r#" | ||
463 | struct S; | ||
464 | impl S { fn foo(&self) -> bool { false } } | ||
465 | |||
466 | fn main() { | ||
467 | match S { | ||
468 | s if s.foo() => (), | ||
469 | } | ||
470 | } | ||
471 | "#), @" | ||
472 | [28; 32) 'self': &S | ||
473 | [42; 51) '{ false }': bool | ||
474 | [44; 49) 'false': bool | ||
475 | [65; 116) '{ ... } }': () | ||
476 | [71; 114) 'match ... }': () | ||
477 | [77; 78) 'S': S | ||
478 | [89; 90) 's': S | ||
479 | [94; 95) 's': S | ||
480 | [94; 101) 's.foo()': bool | ||
481 | [105; 107) '()': () | ||
482 | ") | ||
483 | } | ||
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs index 05791a848..6bc6d474c 100644 --- a/crates/ra_hir_ty/src/traits.rs +++ b/crates/ra_hir_ty/src/traits.rs | |||
@@ -225,7 +225,7 @@ fn solution_from_chalk( | |||
225 | None => unimplemented!(), | 225 | None => unimplemented!(), |
226 | }) | 226 | }) |
227 | .collect(); | 227 | .collect(); |
228 | let result = Canonical { value, num_vars: subst.binders.len() }; | 228 | let result = Canonical { value, num_vars: subst.binders.len(&Interner) }; |
229 | SolutionVariables(result) | 229 | SolutionVariables(result) |
230 | }; | 230 | }; |
231 | match solution { | 231 | match solution { |
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs index e00a82db2..1ccb7c3b4 100644 --- a/crates/ra_hir_ty/src/traits/chalk.rs +++ b/crates/ra_hir_ty/src/traits/chalk.rs | |||
@@ -4,8 +4,8 @@ use std::{fmt, sync::Arc}; | |||
4 | use log::debug; | 4 | use log::debug; |
5 | 5 | ||
6 | use chalk_ir::{ | 6 | use chalk_ir::{ |
7 | cast::Cast, fold::shift::Shift, Goal, GoalData, Parameter, PlaceholderIndex, TypeName, | 7 | cast::Cast, fold::shift::Shift, interner::HasInterner, Goal, GoalData, Parameter, |
8 | UniverseIndex, | 8 | PlaceholderIndex, TypeName, UniverseIndex, |
9 | }; | 9 | }; |
10 | 10 | ||
11 | use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId}; | 11 | use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId}; |
@@ -33,8 +33,10 @@ impl chalk_ir::interner::Interner for Interner { | |||
33 | type InternedGoals = Vec<Goal<Self>>; | 33 | type InternedGoals = Vec<Goal<Self>>; |
34 | type InternedSubstitution = Vec<Parameter<Self>>; | 34 | type InternedSubstitution = Vec<Parameter<Self>>; |
35 | type InternedProgramClause = chalk_ir::ProgramClauseData<Self>; | 35 | type InternedProgramClause = chalk_ir::ProgramClauseData<Self>; |
36 | type InternedProgramClauses = Vec<chalk_ir::ProgramClause<Self>>; | 36 | type InternedProgramClauses = Arc<[chalk_ir::ProgramClause<Self>]>; |
37 | type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>; | 37 | type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>; |
38 | type InternedParameterKinds = Vec<chalk_ir::ParameterKind<()>>; | ||
39 | type InternedCanonicalVarKinds = Vec<chalk_ir::ParameterKind<UniverseIndex>>; | ||
38 | type Identifier = TypeAliasId; | 40 | type Identifier = TypeAliasId; |
39 | type DefId = InternId; | 41 | type DefId = InternId; |
40 | 42 | ||
@@ -60,6 +62,27 @@ impl chalk_ir::interner::Interner for Interner { | |||
60 | tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt))) | 62 | tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt))) |
61 | } | 63 | } |
62 | 64 | ||
65 | fn debug_projection_ty( | ||
66 | proj: &chalk_ir::ProjectionTy<Interner>, | ||
67 | fmt: &mut fmt::Formatter<'_>, | ||
68 | ) -> Option<fmt::Result> { | ||
69 | tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt))) | ||
70 | } | ||
71 | |||
72 | fn debug_opaque_ty( | ||
73 | opaque_ty: &chalk_ir::OpaqueTy<Interner>, | ||
74 | fmt: &mut fmt::Formatter<'_>, | ||
75 | ) -> Option<fmt::Result> { | ||
76 | tls::with_current_program(|prog| Some(prog?.debug_opaque_ty(opaque_ty, fmt))) | ||
77 | } | ||
78 | |||
79 | fn debug_opaque_ty_id( | ||
80 | opaque_ty_id: chalk_ir::OpaqueTyId<Self>, | ||
81 | fmt: &mut fmt::Formatter<'_>, | ||
82 | ) -> Option<fmt::Result> { | ||
83 | tls::with_current_program(|prog| Some(prog?.debug_opaque_ty_id(opaque_ty_id, fmt))) | ||
84 | } | ||
85 | |||
63 | fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> { | 86 | fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> { |
64 | tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt))) | 87 | tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt))) |
65 | } | 88 | } |
@@ -202,15 +225,15 @@ impl chalk_ir::interner::Interner for Interner { | |||
202 | fn intern_program_clauses( | 225 | fn intern_program_clauses( |
203 | &self, | 226 | &self, |
204 | data: impl IntoIterator<Item = chalk_ir::ProgramClause<Self>>, | 227 | data: impl IntoIterator<Item = chalk_ir::ProgramClause<Self>>, |
205 | ) -> Vec<chalk_ir::ProgramClause<Self>> { | 228 | ) -> Arc<[chalk_ir::ProgramClause<Self>]> { |
206 | data.into_iter().collect() | 229 | data.into_iter().collect() |
207 | } | 230 | } |
208 | 231 | ||
209 | fn program_clauses_data<'a>( | 232 | fn program_clauses_data<'a>( |
210 | &self, | 233 | &self, |
211 | clauses: &'a Vec<chalk_ir::ProgramClause<Self>>, | 234 | clauses: &'a Arc<[chalk_ir::ProgramClause<Self>]>, |
212 | ) -> &'a [chalk_ir::ProgramClause<Self>] { | 235 | ) -> &'a [chalk_ir::ProgramClause<Self>] { |
213 | clauses | 236 | &clauses |
214 | } | 237 | } |
215 | 238 | ||
216 | fn intern_quantified_where_clauses( | 239 | fn intern_quantified_where_clauses( |
@@ -226,6 +249,34 @@ impl chalk_ir::interner::Interner for Interner { | |||
226 | ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] { | 249 | ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] { |
227 | clauses | 250 | clauses |
228 | } | 251 | } |
252 | |||
253 | fn intern_parameter_kinds( | ||
254 | &self, | ||
255 | data: impl IntoIterator<Item = chalk_ir::ParameterKind<()>>, | ||
256 | ) -> Self::InternedParameterKinds { | ||
257 | data.into_iter().collect() | ||
258 | } | ||
259 | |||
260 | fn parameter_kinds_data<'a>( | ||
261 | &self, | ||
262 | parameter_kinds: &'a Self::InternedParameterKinds, | ||
263 | ) -> &'a [chalk_ir::ParameterKind<()>] { | ||
264 | ¶meter_kinds | ||
265 | } | ||
266 | |||
267 | fn intern_canonical_var_kinds( | ||
268 | &self, | ||
269 | data: impl IntoIterator<Item = chalk_ir::ParameterKind<UniverseIndex>>, | ||
270 | ) -> Self::InternedCanonicalVarKinds { | ||
271 | data.into_iter().collect() | ||
272 | } | ||
273 | |||
274 | fn canonical_var_kinds_data<'a>( | ||
275 | &self, | ||
276 | canonical_var_kinds: &'a Self::InternedCanonicalVarKinds, | ||
277 | ) -> &'a [chalk_ir::ParameterKind<UniverseIndex>] { | ||
278 | &canonical_var_kinds | ||
279 | } | ||
229 | } | 280 | } |
230 | 281 | ||
231 | impl chalk_ir::interner::HasInterner for Interner { | 282 | impl chalk_ir::interner::HasInterner for Interner { |
@@ -268,9 +319,12 @@ impl ToChalk for Ty { | |||
268 | Ty::Projection(proj_ty) => { | 319 | Ty::Projection(proj_ty) => { |
269 | let associated_ty_id = proj_ty.associated_ty.to_chalk(db); | 320 | let associated_ty_id = proj_ty.associated_ty.to_chalk(db); |
270 | let substitution = proj_ty.parameters.to_chalk(db); | 321 | let substitution = proj_ty.parameters.to_chalk(db); |
271 | chalk_ir::AliasTy { associated_ty_id, substitution } | 322 | chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy { |
272 | .cast(&Interner) | 323 | associated_ty_id, |
273 | .intern(&Interner) | 324 | substitution, |
325 | }) | ||
326 | .cast(&Interner) | ||
327 | .intern(&Interner) | ||
274 | } | 328 | } |
275 | Ty::Placeholder(id) => { | 329 | Ty::Placeholder(id) => { |
276 | let interned_id = db.intern_type_param_id(id); | 330 | let interned_id = db.intern_type_param_id(id); |
@@ -314,16 +368,17 @@ impl ToChalk for Ty { | |||
314 | ); | 368 | ); |
315 | Ty::Placeholder(db.lookup_intern_type_param_id(interned_id)) | 369 | Ty::Placeholder(db.lookup_intern_type_param_id(interned_id)) |
316 | } | 370 | } |
317 | chalk_ir::TyData::Alias(proj) => { | 371 | chalk_ir::TyData::Alias(chalk_ir::AliasTy::Projection(proj)) => { |
318 | let associated_ty = from_chalk(db, proj.associated_ty_id); | 372 | let associated_ty = from_chalk(db, proj.associated_ty_id); |
319 | let parameters = from_chalk(db, proj.substitution); | 373 | let parameters = from_chalk(db, proj.substitution); |
320 | Ty::Projection(ProjectionTy { associated_ty, parameters }) | 374 | Ty::Projection(ProjectionTy { associated_ty, parameters }) |
321 | } | 375 | } |
376 | chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(_)) => unimplemented!(), | ||
322 | chalk_ir::TyData::Function(_) => unimplemented!(), | 377 | chalk_ir::TyData::Function(_) => unimplemented!(), |
323 | chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx), | 378 | chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx), |
324 | chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, | 379 | chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, |
325 | chalk_ir::TyData::Dyn(where_clauses) => { | 380 | chalk_ir::TyData::Dyn(where_clauses) => { |
326 | assert_eq!(where_clauses.bounds.binders.len(), 1); | 381 | assert_eq!(where_clauses.bounds.binders.len(&Interner), 1); |
327 | let predicates = where_clauses | 382 | let predicates = where_clauses |
328 | .bounds | 383 | .bounds |
329 | .skip_binders() | 384 | .skip_binders() |
@@ -404,6 +459,7 @@ impl ToChalk for TypeCtor { | |||
404 | match type_name { | 459 | match type_name { |
405 | TypeName::Struct(struct_id) => db.lookup_intern_type_ctor(struct_id.into()), | 460 | TypeName::Struct(struct_id) => db.lookup_intern_type_ctor(struct_id.into()), |
406 | TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)), | 461 | TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)), |
462 | TypeName::OpaqueType(_) => unreachable!(), | ||
407 | TypeName::Error => { | 463 | TypeName::Error => { |
408 | // this should not be reached, since we don't represent TypeName::Error with TypeCtor | 464 | // this should not be reached, since we don't represent TypeName::Error with TypeCtor |
409 | unreachable!() | 465 | unreachable!() |
@@ -460,7 +516,8 @@ impl ToChalk for GenericPredicate { | |||
460 | } | 516 | } |
461 | GenericPredicate::Projection(projection_pred) => { | 517 | GenericPredicate::Projection(projection_pred) => { |
462 | let ty = projection_pred.ty.to_chalk(db).shifted_in(&Interner); | 518 | let ty = projection_pred.ty.to_chalk(db).shifted_in(&Interner); |
463 | let alias = projection_pred.projection_ty.to_chalk(db).shifted_in(&Interner); | 519 | let projection = projection_pred.projection_ty.to_chalk(db).shifted_in(&Interner); |
520 | let alias = chalk_ir::AliasTy::Projection(projection); | ||
464 | make_binders(chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias, ty }), 0) | 521 | make_binders(chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias, ty }), 0) |
465 | } | 522 | } |
466 | GenericPredicate::Error => panic!("tried passing GenericPredicate::Error to Chalk"), | 523 | GenericPredicate::Error => panic!("tried passing GenericPredicate::Error to Chalk"), |
@@ -481,7 +538,13 @@ impl ToChalk for GenericPredicate { | |||
481 | GenericPredicate::Implemented(from_chalk(db, tr)) | 538 | GenericPredicate::Implemented(from_chalk(db, tr)) |
482 | } | 539 | } |
483 | chalk_ir::WhereClause::AliasEq(projection_eq) => { | 540 | chalk_ir::WhereClause::AliasEq(projection_eq) => { |
484 | let projection_ty = from_chalk(db, projection_eq.alias); | 541 | let projection_ty = from_chalk( |
542 | db, | ||
543 | match projection_eq.alias { | ||
544 | chalk_ir::AliasTy::Projection(p) => p, | ||
545 | _ => unimplemented!(), | ||
546 | }, | ||
547 | ); | ||
485 | let ty = from_chalk(db, projection_eq.ty); | 548 | let ty = from_chalk(db, projection_eq.ty); |
486 | GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty }) | 549 | GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty }) |
487 | } | 550 | } |
@@ -490,10 +553,10 @@ impl ToChalk for GenericPredicate { | |||
490 | } | 553 | } |
491 | 554 | ||
492 | impl ToChalk for ProjectionTy { | 555 | impl ToChalk for ProjectionTy { |
493 | type Chalk = chalk_ir::AliasTy<Interner>; | 556 | type Chalk = chalk_ir::ProjectionTy<Interner>; |
494 | 557 | ||
495 | fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasTy<Interner> { | 558 | fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::ProjectionTy<Interner> { |
496 | chalk_ir::AliasTy { | 559 | chalk_ir::ProjectionTy { |
497 | associated_ty_id: self.associated_ty.to_chalk(db), | 560 | associated_ty_id: self.associated_ty.to_chalk(db), |
498 | substitution: self.parameters.to_chalk(db), | 561 | substitution: self.parameters.to_chalk(db), |
499 | } | 562 | } |
@@ -501,7 +564,7 @@ impl ToChalk for ProjectionTy { | |||
501 | 564 | ||
502 | fn from_chalk( | 565 | fn from_chalk( |
503 | db: &dyn HirDatabase, | 566 | db: &dyn HirDatabase, |
504 | projection_ty: chalk_ir::AliasTy<Interner>, | 567 | projection_ty: chalk_ir::ProjectionTy<Interner>, |
505 | ) -> ProjectionTy { | 568 | ) -> ProjectionTy { |
506 | ProjectionTy { | 569 | ProjectionTy { |
507 | associated_ty: from_chalk(db, projection_ty.associated_ty_id), | 570 | associated_ty: from_chalk(db, projection_ty.associated_ty_id), |
@@ -514,7 +577,10 @@ impl ToChalk for super::ProjectionPredicate { | |||
514 | type Chalk = chalk_ir::AliasEq<Interner>; | 577 | type Chalk = chalk_ir::AliasEq<Interner>; |
515 | 578 | ||
516 | fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq<Interner> { | 579 | fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq<Interner> { |
517 | chalk_ir::AliasEq { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) } | 580 | chalk_ir::AliasEq { |
581 | alias: chalk_ir::AliasTy::Projection(self.projection_ty.to_chalk(db)), | ||
582 | ty: self.ty.to_chalk(db), | ||
583 | } | ||
518 | } | 584 | } |
519 | 585 | ||
520 | fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq<Interner>) -> Self { | 586 | fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq<Interner>) -> Self { |
@@ -540,17 +606,24 @@ impl ToChalk for Obligation { | |||
540 | impl<T> ToChalk for Canonical<T> | 606 | impl<T> ToChalk for Canonical<T> |
541 | where | 607 | where |
542 | T: ToChalk, | 608 | T: ToChalk, |
609 | T::Chalk: HasInterner<Interner = Interner>, | ||
543 | { | 610 | { |
544 | type Chalk = chalk_ir::Canonical<T::Chalk>; | 611 | type Chalk = chalk_ir::Canonical<T::Chalk>; |
545 | 612 | ||
546 | fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> { | 613 | fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> { |
547 | let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT); | 614 | let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT); |
548 | let value = self.value.to_chalk(db); | 615 | let value = self.value.to_chalk(db); |
549 | chalk_ir::Canonical { value, binders: vec![parameter; self.num_vars] } | 616 | chalk_ir::Canonical { |
617 | value, | ||
618 | binders: chalk_ir::CanonicalVarKinds::from(&Interner, vec![parameter; self.num_vars]), | ||
619 | } | ||
550 | } | 620 | } |
551 | 621 | ||
552 | fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> { | 622 | fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> { |
553 | Canonical { num_vars: canonical.binders.len(), value: from_chalk(db, canonical.value) } | 623 | Canonical { |
624 | num_vars: canonical.binders.len(&Interner), | ||
625 | value: from_chalk(db, canonical.value), | ||
626 | } | ||
554 | } | 627 | } |
555 | } | 628 | } |
556 | 629 | ||
@@ -649,9 +722,15 @@ impl ToChalk for builtin::BuiltinImplAssocTyValueData { | |||
649 | } | 722 | } |
650 | } | 723 | } |
651 | 724 | ||
652 | fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { | 725 | fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> |
726 | where | ||
727 | T: HasInterner<Interner = Interner>, | ||
728 | { | ||
653 | chalk_ir::Binders::new( | 729 | chalk_ir::Binders::new( |
654 | std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars).collect(), | 730 | chalk_ir::ParameterKinds::from( |
731 | &Interner, | ||
732 | std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars), | ||
733 | ), | ||
655 | value, | 734 | value, |
656 | ) | 735 | ) |
657 | } | 736 | } |
@@ -799,6 +878,28 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { | |||
799 | // FIXME tell Chalk about well-known traits (here and in trait_datum) | 878 | // FIXME tell Chalk about well-known traits (here and in trait_datum) |
800 | None | 879 | None |
801 | } | 880 | } |
881 | |||
882 | fn program_clauses_for_env( | ||
883 | &self, | ||
884 | environment: &chalk_ir::Environment<Interner>, | ||
885 | ) -> chalk_ir::ProgramClauses<Interner> { | ||
886 | self.db.program_clauses_for_chalk_env(self.krate, environment.clone()) | ||
887 | } | ||
888 | |||
889 | fn opaque_ty_data( | ||
890 | &self, | ||
891 | _id: chalk_ir::OpaqueTyId<Interner>, | ||
892 | ) -> Arc<chalk_rust_ir::OpaqueTyDatum<Interner>> { | ||
893 | unimplemented!() | ||
894 | } | ||
895 | } | ||
896 | |||
897 | pub(crate) fn program_clauses_for_chalk_env_query( | ||
898 | db: &dyn HirDatabase, | ||
899 | krate: CrateId, | ||
900 | environment: chalk_ir::Environment<Interner>, | ||
901 | ) -> chalk_ir::ProgramClauses<Interner> { | ||
902 | chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment) | ||
802 | } | 903 | } |
803 | 904 | ||
804 | pub(crate) fn associated_ty_data_query( | 905 | pub(crate) fn associated_ty_data_query( |
diff --git a/crates/ra_hir_ty/src/traits/chalk/tls.rs b/crates/ra_hir_ty/src/traits/chalk/tls.rs index fa8e4d1ad..4867cb17e 100644 --- a/crates/ra_hir_ty/src/traits/chalk/tls.rs +++ b/crates/ra_hir_ty/src/traits/chalk/tls.rs | |||
@@ -121,19 +121,38 @@ impl DebugContext<'_> { | |||
121 | write!(fmt, "{}::{}", trait_data.name, type_alias_data.name) | 121 | write!(fmt, "{}::{}", trait_data.name, type_alias_data.name) |
122 | } | 122 | } |
123 | 123 | ||
124 | pub fn debug_opaque_ty_id( | ||
125 | &self, | ||
126 | opaque_ty_id: chalk_ir::OpaqueTyId<Interner>, | ||
127 | fmt: &mut fmt::Formatter<'_>, | ||
128 | ) -> Result<(), fmt::Error> { | ||
129 | fmt.debug_struct("OpaqueTyId").field("index", &opaque_ty_id.0).finish() | ||
130 | } | ||
131 | |||
124 | pub fn debug_alias( | 132 | pub fn debug_alias( |
125 | &self, | 133 | &self, |
126 | alias: &AliasTy<Interner>, | 134 | alias_ty: &AliasTy<Interner>, |
135 | fmt: &mut fmt::Formatter<'_>, | ||
136 | ) -> Result<(), fmt::Error> { | ||
137 | match alias_ty { | ||
138 | AliasTy::Projection(projection_ty) => self.debug_projection_ty(projection_ty, fmt), | ||
139 | AliasTy::Opaque(opaque_ty) => self.debug_opaque_ty(opaque_ty, fmt), | ||
140 | } | ||
141 | } | ||
142 | |||
143 | pub fn debug_projection_ty( | ||
144 | &self, | ||
145 | projection_ty: &chalk_ir::ProjectionTy<Interner>, | ||
127 | fmt: &mut fmt::Formatter<'_>, | 146 | fmt: &mut fmt::Formatter<'_>, |
128 | ) -> Result<(), fmt::Error> { | 147 | ) -> Result<(), fmt::Error> { |
129 | let type_alias: TypeAliasId = from_chalk(self.0, alias.associated_ty_id); | 148 | let type_alias: TypeAliasId = from_chalk(self.0, projection_ty.associated_ty_id); |
130 | let type_alias_data = self.0.type_alias_data(type_alias); | 149 | let type_alias_data = self.0.type_alias_data(type_alias); |
131 | let trait_ = match type_alias.lookup(self.0.upcast()).container { | 150 | let trait_ = match type_alias.lookup(self.0.upcast()).container { |
132 | AssocContainerId::TraitId(t) => t, | 151 | AssocContainerId::TraitId(t) => t, |
133 | _ => panic!("associated type not in trait"), | 152 | _ => panic!("associated type not in trait"), |
134 | }; | 153 | }; |
135 | let trait_data = self.0.trait_data(trait_); | 154 | let trait_data = self.0.trait_data(trait_); |
136 | let params = alias.substitution.parameters(&Interner); | 155 | let params = projection_ty.substitution.parameters(&Interner); |
137 | write!(fmt, "<{:?} as {}", ¶ms[0], trait_data.name,)?; | 156 | write!(fmt, "<{:?} as {}", ¶ms[0], trait_data.name,)?; |
138 | if params.len() > 1 { | 157 | if params.len() > 1 { |
139 | write!( | 158 | write!( |
@@ -145,6 +164,14 @@ impl DebugContext<'_> { | |||
145 | write!(fmt, ">::{}", type_alias_data.name) | 164 | write!(fmt, ">::{}", type_alias_data.name) |
146 | } | 165 | } |
147 | 166 | ||
167 | pub fn debug_opaque_ty( | ||
168 | &self, | ||
169 | opaque_ty: &chalk_ir::OpaqueTy<Interner>, | ||
170 | fmt: &mut fmt::Formatter<'_>, | ||
171 | ) -> Result<(), fmt::Error> { | ||
172 | write!(fmt, "{:?}", opaque_ty.opaque_ty_id) | ||
173 | } | ||
174 | |||
148 | pub fn debug_ty( | 175 | pub fn debug_ty( |
149 | &self, | 176 | &self, |
150 | ty: &chalk_ir::Ty<Interner>, | 177 | ty: &chalk_ir::Ty<Interner>, |
diff --git a/crates/ra_ide/src/completion/presentation.rs b/crates/ra_ide/src/completion/presentation.rs index bb12a1bdc..f8dac1d54 100644 --- a/crates/ra_ide/src/completion/presentation.rs +++ b/crates/ra_ide/src/completion/presentation.rs | |||
@@ -161,6 +161,12 @@ impl Completions { | |||
161 | name: Option<String>, | 161 | name: Option<String>, |
162 | macro_: hir::MacroDef, | 162 | macro_: hir::MacroDef, |
163 | ) { | 163 | ) { |
164 | // FIXME: Currently proc-macro do not have ast-node, | ||
165 | // such that it does not have source | ||
166 | if macro_.is_proc_macro() { | ||
167 | return; | ||
168 | } | ||
169 | |||
164 | let name = match name { | 170 | let name = match name { |
165 | Some(it) => it, | 171 | Some(it) => it, |
166 | None => return, | 172 | None => return, |
diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index f5a063351..753d2ef6a 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs | |||
@@ -96,7 +96,7 @@ fn try_extend_selection( | |||
96 | return Some(node.text_range()); | 96 | return Some(node.text_range()); |
97 | } | 97 | } |
98 | 98 | ||
99 | let node = shallowest_node(&node.into()); | 99 | let node = shallowest_node(&node); |
100 | 100 | ||
101 | if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { | 101 | if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { |
102 | if let Some(range) = extend_list_item(&node) { | 102 | if let Some(range) = extend_list_item(&node) { |
diff --git a/crates/ra_ide/src/goto_definition.rs b/crates/ra_ide/src/goto_definition.rs index 8aed94d16..9998ca5a3 100644 --- a/crates/ra_ide/src/goto_definition.rs +++ b/crates/ra_ide/src/goto_definition.rs | |||
@@ -62,10 +62,9 @@ pub(crate) enum ReferenceResult { | |||
62 | 62 | ||
63 | impl ReferenceResult { | 63 | impl ReferenceResult { |
64 | fn to_vec(self) -> Vec<NavigationTarget> { | 64 | fn to_vec(self) -> Vec<NavigationTarget> { |
65 | use self::ReferenceResult::*; | ||
66 | match self { | 65 | match self { |
67 | Exact(target) => vec![target], | 66 | ReferenceResult::Exact(target) => vec![target], |
68 | Approximate(vec) => vec, | 67 | ReferenceResult::Approximate(vec) => vec, |
69 | } | 68 | } |
70 | } | 69 | } |
71 | } | 70 | } |
@@ -74,8 +73,6 @@ pub(crate) fn reference_definition( | |||
74 | sema: &Semantics<RootDatabase>, | 73 | sema: &Semantics<RootDatabase>, |
75 | name_ref: &ast::NameRef, | 74 | name_ref: &ast::NameRef, |
76 | ) -> ReferenceResult { | 75 | ) -> ReferenceResult { |
77 | use self::ReferenceResult::*; | ||
78 | |||
79 | let name_kind = classify_name_ref(sema, name_ref); | 76 | let name_kind = classify_name_ref(sema, name_ref); |
80 | if let Some(def) = name_kind { | 77 | if let Some(def) = name_kind { |
81 | let def = def.definition(); | 78 | let def = def.definition(); |
@@ -91,7 +88,7 @@ pub(crate) fn reference_definition( | |||
91 | .into_iter() | 88 | .into_iter() |
92 | .map(|s| s.to_nav(sema.db)) | 89 | .map(|s| s.to_nav(sema.db)) |
93 | .collect(); | 90 | .collect(); |
94 | Approximate(navs) | 91 | ReferenceResult::Approximate(navs) |
95 | } | 92 | } |
96 | 93 | ||
97 | #[cfg(test)] | 94 | #[cfg(test)] |
@@ -399,6 +396,25 @@ mod tests { | |||
399 | } | 396 | } |
400 | 397 | ||
401 | #[test] | 398 | #[test] |
399 | fn goto_def_for_record_pat_fields() { | ||
400 | covers!(ra_ide_db::goto_def_for_record_field_pats); | ||
401 | check_goto( | ||
402 | r" | ||
403 | //- /lib.rs | ||
404 | struct Foo { | ||
405 | spam: u32, | ||
406 | } | ||
407 | |||
408 | fn bar(foo: Foo) -> Foo { | ||
409 | let Foo { spam<|>: _, } = foo | ||
410 | } | ||
411 | ", | ||
412 | "spam RECORD_FIELD_DEF FileId(1) [17; 26) [17; 21)", | ||
413 | "spam: u32|spam", | ||
414 | ); | ||
415 | } | ||
416 | |||
417 | #[test] | ||
402 | fn goto_def_for_record_fields_macros() { | 418 | fn goto_def_for_record_fields_macros() { |
403 | check_goto( | 419 | check_goto( |
404 | r" | 420 | r" |
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html index 214dcbb62..ccb1fc751 100644 --- a/crates/ra_ide/src/snapshots/highlighting.html +++ b/crates/ra_ide/src/snapshots/highlighting.html | |||
@@ -50,12 +50,12 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
50 | <span class="keyword">fn</span> <span class="function declaration">main</span>() { | 50 | <span class="keyword">fn</span> <span class="function declaration">main</span>() { |
51 | <span class="macro">println!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>); | 51 | <span class="macro">println!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>); |
52 | 52 | ||
53 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = Vec::new(); | 53 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = <span class="unresolved_reference">Vec</span>::<span class="unresolved_reference">new</span>(); |
54 | <span class="keyword control">if</span> <span class="keyword">true</span> { | 54 | <span class="keyword control">if</span> <span class="keyword">true</span> { |
55 | <span class="keyword">let</span> <span class="variable declaration">x</span> = <span class="numeric_literal">92</span>; | 55 | <span class="keyword">let</span> <span class="variable declaration">x</span> = <span class="numeric_literal">92</span>; |
56 | <span class="variable mutable">vec</span>.push(<span class="struct">Foo</span> { <span class="field">x</span>, <span class="field">y</span>: <span class="numeric_literal">1</span> }); | 56 | <span class="variable mutable">vec</span>.<span class="unresolved_reference">push</span>(<span class="struct">Foo</span> { <span class="field">x</span>, <span class="field">y</span>: <span class="numeric_literal">1</span> }); |
57 | } | 57 | } |
58 | <span class="keyword unsafe">unsafe</span> { <span class="variable mutable">vec</span>.set_len(<span class="numeric_literal">0</span>); } | 58 | <span class="keyword unsafe">unsafe</span> { <span class="variable mutable">vec</span>.<span class="unresolved_reference">set_len</span>(<span class="numeric_literal">0</span>); } |
59 | 59 | ||
60 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">x</span> = <span class="numeric_literal">42</span>; | 60 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">x</span> = <span class="numeric_literal">42</span>; |
61 | <span class="keyword">let</span> <span class="variable declaration mutable">y</span> = &<span class="keyword">mut</span> <span class="variable mutable">x</span>; | 61 | <span class="keyword">let</span> <span class="variable declaration mutable">y</span> = &<span class="keyword">mut</span> <span class="variable mutable">x</span>; |
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html index dddbfc0dd..3df82c45f 100644 --- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html +++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html | |||
@@ -28,11 +28,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
28 | </style> | 28 | </style> |
29 | <pre><code><span class="keyword">fn</span> <span class="function declaration">main</span>() { | 29 | <pre><code><span class="keyword">fn</span> <span class="function declaration">main</span>() { |
30 | <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string_literal">"hello"</span>; | 30 | <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string_literal">"hello"</span>; |
31 | <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="2705725358298919760" style="color: hsl(17,51%,74%);">x</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string(); | 31 | <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="2705725358298919760" style="color: hsl(17,51%,74%);">x</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.<span class="unresolved_reference">to_string</span>(); |
32 | <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="3365759661443752373" style="color: hsl(127,76%,66%);">y</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string(); | 32 | <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="3365759661443752373" style="color: hsl(127,76%,66%);">y</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.<span class="unresolved_reference">to_string</span>(); |
33 | 33 | ||
34 | <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span> = <span class="string_literal">"other color please!"</span>; | 34 | <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span> = <span class="string_literal">"other color please!"</span>; |
35 | <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="6717528807933952652" style="color: hsl(85,49%,84%);">y</span> = <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span>.to_string(); | 35 | <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="6717528807933952652" style="color: hsl(85,49%,84%);">y</span> = <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span>.<span class="unresolved_reference">to_string</span>(); |
36 | } | 36 | } |
37 | 37 | ||
38 | <span class="keyword">fn</span> <span class="function declaration">bar</span>() { | 38 | <span class="keyword">fn</span> <span class="function declaration">bar</span>() { |
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs index 7b15b82bd..93d502875 100644 --- a/crates/ra_ide/src/syntax_highlighting.rs +++ b/crates/ra_ide/src/syntax_highlighting.rs | |||
@@ -239,20 +239,21 @@ fn highlight_element( | |||
239 | NAME_REF if element.ancestors().any(|it| it.kind() == ATTR) => return None, | 239 | NAME_REF if element.ancestors().any(|it| it.kind() == ATTR) => return None, |
240 | NAME_REF => { | 240 | NAME_REF => { |
241 | let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap(); | 241 | let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap(); |
242 | let name_kind = classify_name_ref(sema, &name_ref)?; | 242 | match classify_name_ref(sema, &name_ref) { |
243 | 243 | Some(name_kind) => match name_kind { | |
244 | match name_kind { | 244 | NameRefClass::Definition(def) => { |
245 | NameRefClass::Definition(def) => { | 245 | if let Definition::Local(local) = &def { |
246 | if let Definition::Local(local) = &def { | 246 | if let Some(name) = local.name(db) { |
247 | if let Some(name) = local.name(db) { | 247 | let shadow_count = |
248 | let shadow_count = | 248 | bindings_shadow_count.entry(name.clone()).or_default(); |
249 | bindings_shadow_count.entry(name.clone()).or_default(); | 249 | binding_hash = Some(calc_binding_hash(&name, *shadow_count)) |
250 | binding_hash = Some(calc_binding_hash(&name, *shadow_count)) | 250 | } |
251 | } | 251 | }; |
252 | }; | 252 | highlight_name(db, def) |
253 | highlight_name(db, def) | 253 | } |
254 | } | 254 | NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(), |
255 | NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(), | 255 | }, |
256 | None => HighlightTag::UnresolvedReference.into(), | ||
256 | } | 257 | } |
257 | } | 258 | } |
258 | 259 | ||
diff --git a/crates/ra_ide/src/syntax_highlighting/tags.rs b/crates/ra_ide/src/syntax_highlighting/tags.rs index e8b138e1a..f2c421654 100644 --- a/crates/ra_ide/src/syntax_highlighting/tags.rs +++ b/crates/ra_ide/src/syntax_highlighting/tags.rs | |||
@@ -38,6 +38,7 @@ pub enum HighlightTag { | |||
38 | TypeParam, | 38 | TypeParam, |
39 | Union, | 39 | Union, |
40 | Local, | 40 | Local, |
41 | UnresolvedReference, | ||
41 | } | 42 | } |
42 | 43 | ||
43 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] | 44 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] |
@@ -79,6 +80,7 @@ impl HighlightTag { | |||
79 | HighlightTag::TypeParam => "type_param", | 80 | HighlightTag::TypeParam => "type_param", |
80 | HighlightTag::Union => "union", | 81 | HighlightTag::Union => "union", |
81 | HighlightTag::Local => "variable", | 82 | HighlightTag::Local => "variable", |
83 | HighlightTag::UnresolvedReference => "unresolved_reference", | ||
82 | } | 84 | } |
83 | } | 85 | } |
84 | } | 86 | } |
diff --git a/crates/ra_ide_db/src/defs.rs b/crates/ra_ide_db/src/defs.rs index 49a8c74fb..785613b82 100644 --- a/crates/ra_ide_db/src/defs.rs +++ b/crates/ra_ide_db/src/defs.rs | |||
@@ -180,6 +180,7 @@ fn classify_name_inner(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Opti | |||
180 | } | 180 | } |
181 | } | 181 | } |
182 | 182 | ||
183 | #[derive(Debug)] | ||
183 | pub enum NameRefClass { | 184 | pub enum NameRefClass { |
184 | Definition(Definition), | 185 | Definition(Definition), |
185 | FieldShorthand { local: Local, field: Definition }, | 186 | FieldShorthand { local: Local, field: Definition }, |
@@ -229,6 +230,14 @@ pub fn classify_name_ref( | |||
229 | } | 230 | } |
230 | } | 231 | } |
231 | 232 | ||
233 | if let Some(record_field_pat) = ast::RecordFieldPat::cast(parent.clone()) { | ||
234 | tested_by!(goto_def_for_record_field_pats; force); | ||
235 | if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) { | ||
236 | let field = Definition::StructField(field); | ||
237 | return Some(NameRefClass::Definition(field)); | ||
238 | } | ||
239 | } | ||
240 | |||
232 | if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { | 241 | if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { |
233 | tested_by!(goto_def_for_macros; force); | 242 | tested_by!(goto_def_for_macros; force); |
234 | if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { | 243 | if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { |
diff --git a/crates/ra_ide_db/src/marks.rs b/crates/ra_ide_db/src/marks.rs index 4f0a22af0..03b4be21c 100644 --- a/crates/ra_ide_db/src/marks.rs +++ b/crates/ra_ide_db/src/marks.rs | |||
@@ -6,5 +6,6 @@ test_utils::marks![ | |||
6 | goto_def_for_fields | 6 | goto_def_for_fields |
7 | goto_def_for_record_fields | 7 | goto_def_for_record_fields |
8 | goto_def_for_field_init_shorthand | 8 | goto_def_for_field_init_shorthand |
9 | goto_def_for_record_field_pats | ||
9 | search_filters_by_range | 10 | search_filters_by_range |
10 | ]; | 11 | ]; |
diff --git a/crates/ra_mbe/src/mbe_expander/matcher.rs b/crates/ra_mbe/src/mbe_expander/matcher.rs index 2579382da..78f9efa1b 100644 --- a/crates/ra_mbe/src/mbe_expander/matcher.rs +++ b/crates/ra_mbe/src/mbe_expander/matcher.rs | |||
@@ -187,7 +187,11 @@ impl<'a> TtIter<'a> { | |||
187 | _ => false, | 187 | _ => false, |
188 | }, | 188 | }, |
189 | Separator::Literal(lhs) => match fork.expect_literal() { | 189 | Separator::Literal(lhs) => match fork.expect_literal() { |
190 | Ok(rhs) => rhs.text == lhs.text, | 190 | Ok(rhs) => match rhs { |
191 | tt::Leaf::Literal(rhs) => rhs.text == lhs.text, | ||
192 | tt::Leaf::Ident(rhs) => rhs.text == lhs.text, | ||
193 | tt::Leaf::Punct(_) => false, | ||
194 | }, | ||
191 | _ => false, | 195 | _ => false, |
192 | }, | 196 | }, |
193 | Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() { | 197 | Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() { |
@@ -202,6 +206,13 @@ impl<'a> TtIter<'a> { | |||
202 | } | 206 | } |
203 | 207 | ||
204 | pub(crate) fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> { | 208 | pub(crate) fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> { |
209 | match self.peek_n(0) { | ||
210 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => { | ||
211 | return self.expect_lifetime(); | ||
212 | } | ||
213 | _ => (), | ||
214 | } | ||
215 | |||
205 | let tt = self.next().ok_or_else(|| ())?.clone(); | 216 | let tt = self.next().ok_or_else(|| ())?.clone(); |
206 | let punct = match tt { | 217 | let punct = match tt { |
207 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => { | 218 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => { |
@@ -255,13 +266,21 @@ impl<'a> TtIter<'a> { | |||
255 | } | 266 | } |
256 | } | 267 | } |
257 | 268 | ||
258 | pub(crate) fn expect_lifetime(&mut self) -> Result<&tt::Ident, ()> { | 269 | pub(crate) fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> { |
259 | let ident = self.expect_ident()?; | 270 | let punct = self.expect_punct()?; |
260 | // check if it start from "`" | 271 | if punct.char != '\'' { |
261 | if !ident.text.starts_with('\'') { | ||
262 | return Err(()); | 272 | return Err(()); |
263 | } | 273 | } |
264 | Ok(ident) | 274 | let ident = self.expect_ident()?; |
275 | |||
276 | Ok(tt::Subtree { | ||
277 | delimiter: None, | ||
278 | token_trees: vec![ | ||
279 | tt::Leaf::Punct(punct.clone()).into(), | ||
280 | tt::Leaf::Ident(ident.clone()).into(), | ||
281 | ], | ||
282 | } | ||
283 | .into()) | ||
265 | } | 284 | } |
266 | 285 | ||
267 | pub(crate) fn expect_fragment( | 286 | pub(crate) fn expect_fragment( |
@@ -274,7 +293,10 @@ impl<'a> TtIter<'a> { | |||
274 | } | 293 | } |
275 | 294 | ||
276 | impl<'a> TreeSink for OffsetTokenSink<'a> { | 295 | impl<'a> TreeSink for OffsetTokenSink<'a> { |
277 | fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) { | 296 | fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { |
297 | if kind == SyntaxKind::LIFETIME { | ||
298 | n_tokens = 2; | ||
299 | } | ||
278 | for _ in 0..n_tokens { | 300 | for _ in 0..n_tokens { |
279 | self.cursor = self.cursor.bump_subtree(); | 301 | self.cursor = self.cursor.bump_subtree(); |
280 | } | 302 | } |
@@ -286,7 +308,7 @@ impl<'a> TtIter<'a> { | |||
286 | } | 308 | } |
287 | } | 309 | } |
288 | 310 | ||
289 | let buffer = TokenBuffer::new(self.inner.as_slice()); | 311 | let buffer = TokenBuffer::new(&self.inner.as_slice()); |
290 | let mut src = SubtreeTokenSource::new(&buffer); | 312 | let mut src = SubtreeTokenSource::new(&buffer); |
291 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; | 313 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; |
292 | 314 | ||
@@ -422,7 +444,7 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragmen | |||
422 | "tt" => input.expect_tt().map(Some).map_err(|()| err!()), | 444 | "tt" => input.expect_tt().map(Some).map_err(|()| err!()), |
423 | "lifetime" => input | 445 | "lifetime" => input |
424 | .expect_lifetime() | 446 | .expect_lifetime() |
425 | .map(|ident| Some(tt::Leaf::Ident(ident.clone()).into())) | 447 | .map(|tt| Some(tt)) |
426 | .map_err(|()| err!("expected lifetime")), | 448 | .map_err(|()| err!("expected lifetime")), |
427 | "literal" => input | 449 | "literal" => input |
428 | .expect_literal() | 450 | .expect_literal() |
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 91e324db9..d7866452d 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -50,6 +50,26 @@ impl<'a> SubtreeTokenSource<'a> { | |||
50 | } | 50 | } |
51 | 51 | ||
52 | fn get(&self, pos: usize) -> Ref<Option<TtToken>> { | 52 | fn get(&self, pos: usize) -> Ref<Option<TtToken>> { |
53 | fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> { | ||
54 | let tkn = c.token_tree(); | ||
55 | |||
56 | if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn { | ||
57 | if punct.char == '\'' { | ||
58 | let next = c.bump(); | ||
59 | if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() { | ||
60 | let res_cursor = next.bump(); | ||
61 | let text = SmolStr::new("'".to_string() + &ident.to_string()); | ||
62 | |||
63 | return Some((res_cursor, text)); | ||
64 | } else { | ||
65 | panic!("Next token must be ident : {:#?}", next.token_tree()); | ||
66 | } | ||
67 | } | ||
68 | } | ||
69 | |||
70 | None | ||
71 | } | ||
72 | |||
53 | if pos < self.cached.borrow().len() { | 73 | if pos < self.cached.borrow().len() { |
54 | return Ref::map(self.cached.borrow(), |c| &c[pos]); | 74 | return Ref::map(self.cached.borrow(), |c| &c[pos]); |
55 | } | 75 | } |
@@ -63,6 +83,12 @@ impl<'a> SubtreeTokenSource<'a> { | |||
63 | continue; | 83 | continue; |
64 | } | 84 | } |
65 | 85 | ||
86 | if let Some((curr, text)) = is_lifetime(cursor) { | ||
87 | cached.push(Some(TtToken { kind: LIFETIME, is_joint_to_next: false, text })); | ||
88 | self.cached_cursor.set(curr); | ||
89 | continue; | ||
90 | } | ||
91 | |||
66 | match cursor.token_tree() { | 92 | match cursor.token_tree() { |
67 | Some(tt::TokenTree::Leaf(leaf)) => { | 93 | Some(tt::TokenTree::Leaf(leaf)) => { |
68 | cached.push(Some(convert_leaf(&leaf))); | 94 | cached.push(Some(convert_leaf(&leaf))); |
@@ -132,27 +158,28 @@ fn convert_literal(l: &tt::Literal) -> TtToken { | |||
132 | let kind = lex_single_syntax_kind(&l.text) | 158 | let kind = lex_single_syntax_kind(&l.text) |
133 | .map(|(kind, _error)| kind) | 159 | .map(|(kind, _error)| kind) |
134 | .filter(|kind| kind.is_literal()) | 160 | .filter(|kind| kind.is_literal()) |
135 | .unwrap_or_else(|| match l.text.as_ref() { | 161 | .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l)); |
136 | "true" => T![true], | ||
137 | "false" => T![false], | ||
138 | _ => panic!("Fail to convert given literal {:#?}", &l), | ||
139 | }); | ||
140 | 162 | ||
141 | TtToken { kind, is_joint_to_next: false, text: l.text.clone() } | 163 | TtToken { kind, is_joint_to_next: false, text: l.text.clone() } |
142 | } | 164 | } |
143 | 165 | ||
144 | fn convert_ident(ident: &tt::Ident) -> TtToken { | 166 | fn convert_ident(ident: &tt::Ident) -> TtToken { |
145 | let kind = if ident.text.starts_with('\'') { | 167 | let kind = match ident.text.as_ref() { |
146 | LIFETIME | 168 | "true" => T![true], |
147 | } else { | 169 | "false" => T![false], |
148 | SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT) | 170 | i if i.starts_with('\'') => LIFETIME, |
171 | _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT), | ||
149 | }; | 172 | }; |
150 | 173 | ||
151 | TtToken { kind, is_joint_to_next: false, text: ident.text.clone() } | 174 | TtToken { kind, is_joint_to_next: false, text: ident.text.clone() } |
152 | } | 175 | } |
153 | 176 | ||
154 | fn convert_punct(p: tt::Punct) -> TtToken { | 177 | fn convert_punct(p: tt::Punct) -> TtToken { |
155 | let kind = SyntaxKind::from_char(p.char).unwrap(); | 178 | let kind = match SyntaxKind::from_char(p.char) { |
179 | None => panic!("{:#?} is not a valid punct", p), | ||
180 | Some(kind) => kind, | ||
181 | }; | ||
182 | |||
156 | let text = { | 183 | let text = { |
157 | let mut buf = [0u8; 4]; | 184 | let mut buf = [0u8; 4]; |
158 | let s: &str = p.char.encode_utf8(&mut buf); | 185 | let s: &str = p.char.encode_utf8(&mut buf); |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 31e9b22e7..2b4390eb2 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -271,7 +271,7 @@ struct RawConvertor<'a> { | |||
271 | inner: std::slice::Iter<'a, RawToken>, | 271 | inner: std::slice::Iter<'a, RawToken>, |
272 | } | 272 | } |
273 | 273 | ||
274 | trait SrcToken { | 274 | trait SrcToken: std::fmt::Debug { |
275 | fn kind(&self) -> SyntaxKind; | 275 | fn kind(&self) -> SyntaxKind; |
276 | 276 | ||
277 | fn to_char(&self) -> Option<char>; | 277 | fn to_char(&self) -> Option<char>; |
@@ -361,8 +361,12 @@ trait TokenConvertor { | |||
361 | Some(next) if next.kind().is_punct() => tt::Spacing::Joint, | 361 | Some(next) if next.kind().is_punct() => tt::Spacing::Joint, |
362 | _ => tt::Spacing::Alone, | 362 | _ => tt::Spacing::Alone, |
363 | }; | 363 | }; |
364 | let char = token.to_char().expect("Token from lexer must be single char"); | 364 | let char = match token.to_char() { |
365 | 365 | Some(c) => c, | |
366 | None => { | ||
367 | panic!("Token from lexer must be single char: token = {:#?}", token); | ||
368 | } | ||
369 | }; | ||
366 | tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into() | 370 | tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into() |
367 | } | 371 | } |
368 | } else { | 372 | } else { |
@@ -372,10 +376,29 @@ trait TokenConvertor { | |||
372 | }; | 376 | }; |
373 | } | 377 | } |
374 | let leaf: tt::Leaf = match k { | 378 | let leaf: tt::Leaf = match k { |
375 | T![true] | T![false] => make_leaf!(Literal), | 379 | T![true] | T![false] => make_leaf!(Ident), |
376 | IDENT | LIFETIME => make_leaf!(Ident), | 380 | IDENT => make_leaf!(Ident), |
377 | k if k.is_keyword() => make_leaf!(Ident), | 381 | k if k.is_keyword() => make_leaf!(Ident), |
378 | k if k.is_literal() => make_leaf!(Literal), | 382 | k if k.is_literal() => make_leaf!(Literal), |
383 | LIFETIME => { | ||
384 | let char_unit = TextUnit::from_usize(1); | ||
385 | let r = TextRange::offset_len(range.start(), char_unit); | ||
386 | let apostrophe = tt::Leaf::from(tt::Punct { | ||
387 | char: '\'', | ||
388 | spacing: tt::Spacing::Joint, | ||
389 | id: self.id_alloc().alloc(r), | ||
390 | }); | ||
391 | result.push(apostrophe.into()); | ||
392 | |||
393 | let r = | ||
394 | TextRange::offset_len(range.start() + char_unit, range.len() - char_unit); | ||
395 | let ident = tt::Leaf::from(tt::Ident { | ||
396 | text: SmolStr::new(&token.to_text()[1..]), | ||
397 | id: self.id_alloc().alloc(r), | ||
398 | }); | ||
399 | result.push(ident.into()); | ||
400 | return; | ||
401 | } | ||
379 | _ => return, | 402 | _ => return, |
380 | }; | 403 | }; |
381 | 404 | ||
@@ -455,6 +478,7 @@ impl Convertor { | |||
455 | } | 478 | } |
456 | } | 479 | } |
457 | 480 | ||
481 | #[derive(Debug)] | ||
458 | enum SynToken { | 482 | enum SynToken { |
459 | Ordiniary(SyntaxToken), | 483 | Ordiniary(SyntaxToken), |
460 | Punch(SyntaxToken, TextUnit), | 484 | Punch(SyntaxToken, TextUnit), |
@@ -592,11 +616,14 @@ fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr { | |||
592 | } | 616 | } |
593 | 617 | ||
594 | impl<'a> TreeSink for TtTreeSink<'a> { | 618 | impl<'a> TreeSink for TtTreeSink<'a> { |
595 | fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { | 619 | fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { |
596 | if kind == L_DOLLAR || kind == R_DOLLAR { | 620 | if kind == L_DOLLAR || kind == R_DOLLAR { |
597 | self.cursor = self.cursor.bump_subtree(); | 621 | self.cursor = self.cursor.bump_subtree(); |
598 | return; | 622 | return; |
599 | } | 623 | } |
624 | if kind == LIFETIME { | ||
625 | n_tokens = 2; | ||
626 | } | ||
600 | 627 | ||
601 | let mut last = self.cursor; | 628 | let mut last = self.cursor; |
602 | for _ in 0..n_tokens { | 629 | for _ in 0..n_tokens { |
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index 5d1274d21..100ed41f2 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -215,6 +215,33 @@ SUBTREE $ | |||
215 | } | 215 | } |
216 | 216 | ||
217 | #[test] | 217 | #[test] |
218 | fn test_lifetime_split() { | ||
219 | parse_macro( | ||
220 | r#" | ||
221 | macro_rules! foo { | ||
222 | ($($t:tt)*) => { $($t)*} | ||
223 | } | ||
224 | "#, | ||
225 | ) | ||
226 | .assert_expand( | ||
227 | r#"foo!(static bar: &'static str = "hello";);"#, | ||
228 | r#" | ||
229 | SUBTREE $ | ||
230 | IDENT static 17 | ||
231 | IDENT bar 18 | ||
232 | PUNCH : [alone] 19 | ||
233 | PUNCH & [alone] 20 | ||
234 | PUNCH ' [joint] 21 | ||
235 | IDENT static 22 | ||
236 | IDENT str 23 | ||
237 | PUNCH = [alone] 24 | ||
238 | LITERAL "hello" 25 | ||
239 | PUNCH ; [joint] 26 | ||
240 | "#, | ||
241 | ); | ||
242 | } | ||
243 | |||
244 | #[test] | ||
218 | fn test_expr_order() { | 245 | fn test_expr_order() { |
219 | let expanded = parse_macro( | 246 | let expanded = parse_macro( |
220 | r#" | 247 | r#" |
@@ -989,6 +1016,36 @@ fn test_literal() { | |||
989 | } | 1016 | } |
990 | 1017 | ||
991 | #[test] | 1018 | #[test] |
1019 | fn test_boolean_is_ident() { | ||
1020 | parse_macro( | ||
1021 | r#" | ||
1022 | macro_rules! foo { | ||
1023 | ($lit0:literal, $lit1:literal) => { const VALUE: (bool,bool) = ($lit0,$lit1); }; | ||
1024 | } | ||
1025 | "#, | ||
1026 | ) | ||
1027 | .assert_expand( | ||
1028 | r#"foo!(true,false);"#, | ||
1029 | r#" | ||
1030 | SUBTREE $ | ||
1031 | IDENT const 14 | ||
1032 | IDENT VALUE 15 | ||
1033 | PUNCH : [alone] 16 | ||
1034 | SUBTREE () 17 | ||
1035 | IDENT bool 18 | ||
1036 | PUNCH , [alone] 19 | ||
1037 | IDENT bool 20 | ||
1038 | PUNCH = [alone] 21 | ||
1039 | SUBTREE () 22 | ||
1040 | IDENT true 29 | ||
1041 | PUNCH , [joint] 25 | ||
1042 | IDENT false 31 | ||
1043 | PUNCH ; [alone] 28 | ||
1044 | "#, | ||
1045 | ); | ||
1046 | } | ||
1047 | |||
1048 | #[test] | ||
992 | fn test_vis() { | 1049 | fn test_vis() { |
993 | parse_macro( | 1050 | parse_macro( |
994 | r#" | 1051 | r#" |
diff --git a/crates/ra_mbe/src/tt_iter.rs b/crates/ra_mbe/src/tt_iter.rs index 100184e66..46c420718 100644 --- a/crates/ra_mbe/src/tt_iter.rs +++ b/crates/ra_mbe/src/tt_iter.rs | |||
@@ -40,9 +40,11 @@ impl<'a> TtIter<'a> { | |||
40 | } | 40 | } |
41 | } | 41 | } |
42 | 42 | ||
43 | pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Literal, ()> { | 43 | pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> { |
44 | match self.expect_leaf()? { | 44 | let it = self.expect_leaf()?; |
45 | tt::Leaf::Literal(it) => Ok(it), | 45 | match it { |
46 | tt::Leaf::Literal(_) => Ok(it), | ||
47 | tt::Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it), | ||
46 | _ => Err(()), | 48 | _ => Err(()), |
47 | } | 49 | } |
48 | } | 50 | } |
diff --git a/crates/ra_proc_macro/src/lib.rs b/crates/ra_proc_macro/src/lib.rs index b200fd126..004943b9e 100644 --- a/crates/ra_proc_macro/src/lib.rs +++ b/crates/ra_proc_macro/src/lib.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | //! | 2 | //! |
3 | //! We separate proc-macro expanding logic to an extern program to allow | 3 | //! We separate proc-macro expanding logic to an extern program to allow |
4 | //! different implementations (e.g. wasm or dylib loading). And this crate | 4 | //! different implementations (e.g. wasm or dylib loading). And this crate |
5 | //! is used to provide basic infrastructure for communication between two | 5 | //! is used to provide basic infrastructure for communication between two |
6 | //! processes: Client (RA itself), Server (the external program) | 6 | //! processes: Client (RA itself), Server (the external program) |
7 | 7 | ||
8 | mod rpc; | 8 | mod rpc; |
@@ -13,6 +13,7 @@ use process::{ProcMacroProcessSrv, ProcMacroProcessThread}; | |||
13 | use ra_tt::{SmolStr, Subtree}; | 13 | use ra_tt::{SmolStr, Subtree}; |
14 | use std::{ | 14 | use std::{ |
15 | ffi::OsStr, | 15 | ffi::OsStr, |
16 | io, | ||
16 | path::{Path, PathBuf}, | 17 | path::{Path, PathBuf}, |
17 | sync::Arc, | 18 | sync::Arc, |
18 | }; | 19 | }; |
@@ -57,14 +58,10 @@ pub struct ProcMacroClient { | |||
57 | } | 58 | } |
58 | 59 | ||
59 | impl ProcMacroClient { | 60 | impl ProcMacroClient { |
60 | pub fn extern_process<I, S>( | 61 | pub fn extern_process( |
61 | process_path: &Path, | 62 | process_path: PathBuf, |
62 | args: I, | 63 | args: impl IntoIterator<Item = impl AsRef<OsStr>>, |
63 | ) -> Result<ProcMacroClient, std::io::Error> | 64 | ) -> io::Result<ProcMacroClient> { |
64 | where | ||
65 | I: IntoIterator<Item = S>, | ||
66 | S: AsRef<OsStr>, | ||
67 | { | ||
68 | let (thread, process) = ProcMacroProcessSrv::run(process_path, args)?; | 65 | let (thread, process) = ProcMacroProcessSrv::run(process_path, args)?; |
69 | Ok(ProcMacroClient { | 66 | Ok(ProcMacroClient { |
70 | kind: ProcMacroClientKind::Process { process: Arc::new(process), thread }, | 67 | kind: ProcMacroClientKind::Process { process: Arc::new(process), thread }, |
@@ -84,7 +81,7 @@ impl ProcMacroClient { | |||
84 | ProcMacroClientKind::Process { process, .. } => { | 81 | ProcMacroClientKind::Process { process, .. } => { |
85 | let macros = match process.find_proc_macros(dylib_path) { | 82 | let macros = match process.find_proc_macros(dylib_path) { |
86 | Err(err) => { | 83 | Err(err) => { |
87 | eprintln!("Fail to find proc macro. Error: {:#?}", err); | 84 | eprintln!("Failed to find proc macros. Error: {:#?}", err); |
88 | return vec![]; | 85 | return vec![]; |
89 | } | 86 | } |
90 | Ok(macros) => macros, | 87 | Ok(macros) => macros, |
diff --git a/crates/ra_proc_macro/src/msg.rs b/crates/ra_proc_macro/src/msg.rs index aa95bcc8f..95d9b8804 100644 --- a/crates/ra_proc_macro/src/msg.rs +++ b/crates/ra_proc_macro/src/msg.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | //! Defines messages for cross-process message based on `ndjson` wire protocol | 1 | //! Defines messages for cross-process message passing based on `ndjson` wire protocol |
2 | 2 | ||
3 | use std::{ | 3 | use std::{ |
4 | convert::TryFrom, | 4 | convert::TryFrom, |
@@ -31,7 +31,7 @@ macro_rules! impl_try_from_response { | |||
31 | fn try_from(value: Response) -> Result<Self, Self::Error> { | 31 | fn try_from(value: Response) -> Result<Self, Self::Error> { |
32 | match value { | 32 | match value { |
33 | Response::$tag(res) => Ok(res), | 33 | Response::$tag(res) => Ok(res), |
34 | _ => Err("Fail to convert from response"), | 34 | _ => Err(concat!("Failed to convert response to ", stringify!($tag))), |
35 | } | 35 | } |
36 | } | 36 | } |
37 | } | 37 | } |
@@ -53,18 +53,16 @@ pub enum ErrorCode { | |||
53 | ExpansionError, | 53 | ExpansionError, |
54 | } | 54 | } |
55 | 55 | ||
56 | pub trait Message: Sized + Serialize + DeserializeOwned { | 56 | pub trait Message: Serialize + DeserializeOwned { |
57 | fn read(r: &mut impl BufRead) -> io::Result<Option<Self>> { | 57 | fn read(inp: &mut impl BufRead) -> io::Result<Option<Self>> { |
58 | let text = match read_json(r)? { | 58 | Ok(match read_json(inp)? { |
59 | None => return Ok(None), | 59 | None => None, |
60 | Some(text) => text, | 60 | Some(text) => Some(serde_json::from_str(&text)?), |
61 | }; | 61 | }) |
62 | let msg = serde_json::from_str(&text)?; | ||
63 | Ok(Some(msg)) | ||
64 | } | 62 | } |
65 | fn write(self, w: &mut impl Write) -> io::Result<()> { | 63 | fn write(self, out: &mut impl Write) -> io::Result<()> { |
66 | let text = serde_json::to_string(&self)?; | 64 | let text = serde_json::to_string(&self)?; |
67 | write_json(w, &text) | 65 | write_json(out, &text) |
68 | } | 66 | } |
69 | } | 67 | } |
70 | 68 | ||
@@ -73,15 +71,12 @@ impl Message for Response {} | |||
73 | 71 | ||
74 | fn read_json(inp: &mut impl BufRead) -> io::Result<Option<String>> { | 72 | fn read_json(inp: &mut impl BufRead) -> io::Result<Option<String>> { |
75 | let mut buf = String::new(); | 73 | let mut buf = String::new(); |
76 | if inp.read_line(&mut buf)? == 0 { | 74 | inp.read_line(&mut buf)?; |
77 | return Ok(None); | 75 | buf.pop(); // Remove traling '\n' |
78 | } | 76 | Ok(match buf.len() { |
79 | // Remove ending '\n' | 77 | 0 => None, |
80 | let buf = &buf[..buf.len() - 1]; | 78 | _ => Some(buf), |
81 | if buf.is_empty() { | 79 | }) |
82 | return Ok(None); | ||
83 | } | ||
84 | Ok(Some(buf.to_string())) | ||
85 | } | 80 | } |
86 | 81 | ||
87 | fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { | 82 | fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { |
diff --git a/crates/ra_proc_macro/src/process.rs b/crates/ra_proc_macro/src/process.rs index f851570bc..673f80a7a 100644 --- a/crates/ra_proc_macro/src/process.rs +++ b/crates/ra_proc_macro/src/process.rs | |||
@@ -9,7 +9,7 @@ use crate::rpc::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTas | |||
9 | use io::{BufRead, BufReader}; | 9 | use io::{BufRead, BufReader}; |
10 | use std::{ | 10 | use std::{ |
11 | convert::{TryFrom, TryInto}, | 11 | convert::{TryFrom, TryInto}, |
12 | ffi::OsStr, | 12 | ffi::{OsStr, OsString}, |
13 | io::{self, Write}, | 13 | io::{self, Write}, |
14 | path::{Path, PathBuf}, | 14 | path::{Path, PathBuf}, |
15 | process::{Child, Command, Stdio}, | 15 | process::{Child, Command, Stdio}, |
@@ -28,66 +28,11 @@ pub(crate) struct ProcMacroProcessThread { | |||
28 | handle: jod_thread::JoinHandle<()>, | 28 | handle: jod_thread::JoinHandle<()>, |
29 | } | 29 | } |
30 | 30 | ||
31 | struct Task { | ||
32 | req: Request, | ||
33 | result_tx: Sender<Option<Response>>, | ||
34 | } | ||
35 | |||
36 | struct Process { | ||
37 | path: PathBuf, | ||
38 | child: Child, | ||
39 | } | ||
40 | |||
41 | impl Drop for Process { | ||
42 | fn drop(&mut self) { | ||
43 | let _ = self.child.kill(); | ||
44 | } | ||
45 | } | ||
46 | |||
47 | impl Process { | ||
48 | fn run<I, S>(process_path: &Path, args: I) -> Result<Process, io::Error> | ||
49 | where | ||
50 | I: IntoIterator<Item = S>, | ||
51 | S: AsRef<OsStr>, | ||
52 | { | ||
53 | let child = Command::new(process_path.clone()) | ||
54 | .args(args) | ||
55 | .stdin(Stdio::piped()) | ||
56 | .stdout(Stdio::piped()) | ||
57 | .stderr(Stdio::null()) | ||
58 | .spawn()?; | ||
59 | |||
60 | Ok(Process { path: process_path.into(), child }) | ||
61 | } | ||
62 | |||
63 | fn restart(&mut self) -> Result<(), io::Error> { | ||
64 | let _ = self.child.kill(); | ||
65 | self.child = Command::new(self.path.clone()) | ||
66 | .stdin(Stdio::piped()) | ||
67 | .stdout(Stdio::piped()) | ||
68 | .stderr(Stdio::null()) | ||
69 | .spawn()?; | ||
70 | Ok(()) | ||
71 | } | ||
72 | |||
73 | fn stdio(&mut self) -> Option<(impl Write, impl BufRead)> { | ||
74 | let stdin = self.child.stdin.take()?; | ||
75 | let stdout = self.child.stdout.take()?; | ||
76 | let read = BufReader::new(stdout); | ||
77 | |||
78 | Some((stdin, read)) | ||
79 | } | ||
80 | } | ||
81 | |||
82 | impl ProcMacroProcessSrv { | 31 | impl ProcMacroProcessSrv { |
83 | pub fn run<I, S>( | 32 | pub fn run( |
84 | process_path: &Path, | 33 | process_path: PathBuf, |
85 | args: I, | 34 | args: impl IntoIterator<Item = impl AsRef<OsStr>>, |
86 | ) -> Result<(ProcMacroProcessThread, ProcMacroProcessSrv), io::Error> | 35 | ) -> io::Result<(ProcMacroProcessThread, ProcMacroProcessSrv)> { |
87 | where | ||
88 | I: IntoIterator<Item = S>, | ||
89 | S: AsRef<OsStr>, | ||
90 | { | ||
91 | let process = Process::run(process_path, args)?; | 36 | let process = Process::run(process_path, args)?; |
92 | 37 | ||
93 | let (task_tx, task_rx) = bounded(0); | 38 | let (task_tx, task_rx) = bounded(0); |
@@ -197,11 +142,62 @@ fn client_loop(task_rx: Receiver<Task>, mut process: Process) { | |||
197 | } | 142 | } |
198 | } | 143 | } |
199 | 144 | ||
145 | struct Task { | ||
146 | req: Request, | ||
147 | result_tx: Sender<Option<Response>>, | ||
148 | } | ||
149 | |||
150 | struct Process { | ||
151 | path: PathBuf, | ||
152 | args: Vec<OsString>, | ||
153 | child: Child, | ||
154 | } | ||
155 | |||
156 | impl Drop for Process { | ||
157 | fn drop(&mut self) { | ||
158 | let _ = self.child.kill(); | ||
159 | } | ||
160 | } | ||
161 | |||
162 | impl Process { | ||
163 | fn run( | ||
164 | path: PathBuf, | ||
165 | args: impl IntoIterator<Item = impl AsRef<OsStr>>, | ||
166 | ) -> io::Result<Process> { | ||
167 | let args = args.into_iter().map(|s| s.as_ref().into()).collect(); | ||
168 | let child = mk_child(&path, &args)?; | ||
169 | Ok(Process { path, args, child }) | ||
170 | } | ||
171 | |||
172 | fn restart(&mut self) -> io::Result<()> { | ||
173 | let _ = self.child.kill(); | ||
174 | self.child = mk_child(&self.path, &self.args)?; | ||
175 | Ok(()) | ||
176 | } | ||
177 | |||
178 | fn stdio(&mut self) -> Option<(impl Write, impl BufRead)> { | ||
179 | let stdin = self.child.stdin.take()?; | ||
180 | let stdout = self.child.stdout.take()?; | ||
181 | let read = BufReader::new(stdout); | ||
182 | |||
183 | Some((stdin, read)) | ||
184 | } | ||
185 | } | ||
186 | |||
187 | fn mk_child(path: &Path, args: impl IntoIterator<Item = impl AsRef<OsStr>>) -> io::Result<Child> { | ||
188 | Command::new(&path) | ||
189 | .args(args) | ||
190 | .stdin(Stdio::piped()) | ||
191 | .stdout(Stdio::piped()) | ||
192 | .stderr(Stdio::null()) | ||
193 | .spawn() | ||
194 | } | ||
195 | |||
200 | fn send_request( | 196 | fn send_request( |
201 | mut writer: &mut impl Write, | 197 | mut writer: &mut impl Write, |
202 | mut reader: &mut impl BufRead, | 198 | mut reader: &mut impl BufRead, |
203 | req: Request, | 199 | req: Request, |
204 | ) -> Result<Option<Response>, io::Error> { | 200 | ) -> io::Result<Option<Response>> { |
205 | req.write(&mut writer)?; | 201 | req.write(&mut writer)?; |
206 | Ok(Response::read(&mut reader)?) | 202 | Ok(Response::read(&mut reader)?) |
207 | } | 203 | } |
diff --git a/crates/ra_proc_macro/src/rpc.rs b/crates/ra_proc_macro/src/rpc.rs index 66b3f55db..4ce485926 100644 --- a/crates/ra_proc_macro/src/rpc.rs +++ b/crates/ra_proc_macro/src/rpc.rs | |||
@@ -1,9 +1,9 @@ | |||
1 | //! Data struture serialization related stuffs for RPC | 1 | //! Data struture serialization related stuff for RPC |
2 | //! | 2 | //! |
3 | //! Define all necessary rpc serialization data structure, | 3 | //! Defines all necessary rpc serialization data structures, |
4 | //! which include ra_tt related data and some task messages. | 4 | //! which includes `ra_tt` related data and some task messages. |
5 | //! Although adding Serialize and Deserialize trait to ra_tt directly seem to be much easier, | 5 | //! Although adding `Serialize` and `Deserialize` traits to `ra_tt` directly seems |
6 | //! we deliberately duplicate the ra_tt struct with #[serde(with = "XXDef")] | 6 | //! to be much easier, we deliberately duplicate `ra_tt` structs with `#[serde(with = "XXDef")]` |
7 | //! for separation of code responsibility. | 7 | //! for separation of code responsibility. |
8 | 8 | ||
9 | use ra_tt::{ | 9 | use ra_tt::{ |
@@ -34,15 +34,15 @@ pub struct ListMacrosResult { | |||
34 | pub struct ExpansionTask { | 34 | pub struct ExpansionTask { |
35 | /// Argument of macro call. | 35 | /// Argument of macro call. |
36 | /// | 36 | /// |
37 | /// In custom derive that would be a struct or enum; in attribute-like macro - underlying | 37 | /// In custom derive this will be a struct or enum; in attribute-like macro - underlying |
38 | /// item; in function-like macro - the macro body. | 38 | /// item; in function-like macro - the macro body. |
39 | #[serde(with = "SubtreeDef")] | 39 | #[serde(with = "SubtreeDef")] |
40 | pub macro_body: Subtree, | 40 | pub macro_body: Subtree, |
41 | 41 | ||
42 | /// Names of macros to expand. | 42 | /// Name of macro to expand. |
43 | /// | 43 | /// |
44 | /// In custom derive those are names of derived traits (`Serialize`, `Getters`, etc.). In | 44 | /// In custom derive this is the name of the derived trait (`Serialize`, `Getters`, etc.). |
45 | /// attribute-like and functiona-like macros - single name of macro itself (`show_streams`). | 45 | /// In attribute-like and function-like macros - single name of macro itself (`show_streams`). |
46 | pub macro_name: String, | 46 | pub macro_name: String, |
47 | 47 | ||
48 | /// Possible attributes for the attribute-like macros. | 48 | /// Possible attributes for the attribute-like macros. |
diff --git a/crates/ra_proc_macro_srv/src/cli.rs b/crates/ra_proc_macro_srv/src/cli.rs index c771f2b38..5f1f3ba3c 100644 --- a/crates/ra_proc_macro_srv/src/cli.rs +++ b/crates/ra_proc_macro_srv/src/cli.rs | |||
@@ -2,55 +2,43 @@ | |||
2 | 2 | ||
3 | use crate::{expand_task, list_macros}; | 3 | use crate::{expand_task, list_macros}; |
4 | use ra_proc_macro::msg::{self, Message}; | 4 | use ra_proc_macro::msg::{self, Message}; |
5 | |||
6 | use std::io; | 5 | use std::io; |
7 | 6 | ||
8 | fn read_request() -> Result<Option<msg::Request>, io::Error> { | ||
9 | let stdin = io::stdin(); | ||
10 | let mut stdin = stdin.lock(); | ||
11 | msg::Request::read(&mut stdin) | ||
12 | } | ||
13 | |||
14 | fn write_response(res: Result<msg::Response, String>) -> Result<(), io::Error> { | ||
15 | let msg: msg::Response = match res { | ||
16 | Ok(res) => res, | ||
17 | Err(err) => msg::Response::Error(msg::ResponseError { | ||
18 | code: msg::ErrorCode::ExpansionError, | ||
19 | message: err, | ||
20 | }), | ||
21 | }; | ||
22 | |||
23 | let stdout = io::stdout(); | ||
24 | let mut stdout = stdout.lock(); | ||
25 | msg.write(&mut stdout) | ||
26 | } | ||
27 | |||
28 | pub fn run() { | 7 | pub fn run() { |
29 | loop { | 8 | loop { |
30 | let req = match read_request() { | 9 | let req = match read_request() { |
31 | Err(err) => { | 10 | Err(err) => { |
32 | eprintln!("Read message error on ra_proc_macro_srv: {}", err.to_string()); | 11 | eprintln!("Read message error on ra_proc_macro_srv: {}", err); |
33 | continue; | 12 | continue; |
34 | } | 13 | } |
35 | Ok(None) => continue, | 14 | Ok(None) => continue, |
36 | Ok(Some(req)) => req, | 15 | Ok(Some(req)) => req, |
37 | }; | 16 | }; |
38 | 17 | ||
39 | match req { | 18 | let res = match req { |
40 | msg::Request::ListMacro(task) => { | 19 | msg::Request::ListMacro(task) => Ok(msg::Response::ListMacro(list_macros(&task))), |
41 | if let Err(err) = | ||
42 | write_response(list_macros(&task).map(|it| msg::Response::ListMacro(it))) | ||
43 | { | ||
44 | eprintln!("Write message error on list macro: {}", err); | ||
45 | } | ||
46 | } | ||
47 | msg::Request::ExpansionMacro(task) => { | 20 | msg::Request::ExpansionMacro(task) => { |
48 | if let Err(err) = | 21 | expand_task(&task).map(msg::Response::ExpansionMacro) |
49 | write_response(expand_task(&task).map(|it| msg::Response::ExpansionMacro(it))) | ||
50 | { | ||
51 | eprintln!("Write message error on expansion macro: {}", err); | ||
52 | } | ||
53 | } | 22 | } |
23 | }; | ||
24 | |||
25 | let msg = res.unwrap_or_else(|err| { | ||
26 | msg::Response::Error(msg::ResponseError { | ||
27 | code: msg::ErrorCode::ExpansionError, | ||
28 | message: err, | ||
29 | }) | ||
30 | }); | ||
31 | |||
32 | if let Err(err) = write_response(msg) { | ||
33 | eprintln!("Write message error: {}", err); | ||
54 | } | 34 | } |
55 | } | 35 | } |
56 | } | 36 | } |
37 | |||
38 | fn read_request() -> io::Result<Option<msg::Request>> { | ||
39 | msg::Request::read(&mut io::stdin().lock()) | ||
40 | } | ||
41 | |||
42 | fn write_response(msg: msg::Response) -> io::Result<()> { | ||
43 | msg.write(&mut io::stdout().lock()) | ||
44 | } | ||
diff --git a/crates/ra_proc_macro_srv/src/dylib.rs b/crates/ra_proc_macro_srv/src/dylib.rs index 16bd7466e..d202eb0fd 100644 --- a/crates/ra_proc_macro_srv/src/dylib.rs +++ b/crates/ra_proc_macro_srv/src/dylib.rs | |||
@@ -9,43 +9,37 @@ use libloading::Library; | |||
9 | use memmap::Mmap; | 9 | use memmap::Mmap; |
10 | use ra_proc_macro::ProcMacroKind; | 10 | use ra_proc_macro::ProcMacroKind; |
11 | 11 | ||
12 | use std::io::Error as IoError; | 12 | use std::io; |
13 | use std::io::ErrorKind as IoErrorKind; | ||
14 | 13 | ||
15 | const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; | 14 | const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; |
16 | 15 | ||
17 | fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> IoError { | 16 | fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> io::Error { |
18 | IoError::new(IoErrorKind::InvalidData, e) | 17 | io::Error::new(io::ErrorKind::InvalidData, e) |
19 | } | 18 | } |
20 | 19 | ||
21 | fn is_derive_registrar_symbol(symbol: &str) -> bool { | 20 | fn is_derive_registrar_symbol(symbol: &str) -> bool { |
22 | symbol.contains(NEW_REGISTRAR_SYMBOL) | 21 | symbol.contains(NEW_REGISTRAR_SYMBOL) |
23 | } | 22 | } |
24 | 23 | ||
25 | fn find_registrar_symbol(file: &Path) -> Result<Option<String>, IoError> { | 24 | fn find_registrar_symbol(file: &Path) -> io::Result<Option<String>> { |
26 | let file = File::open(file)?; | 25 | let file = File::open(file)?; |
27 | let buffer = unsafe { Mmap::map(&file)? }; | 26 | let buffer = unsafe { Mmap::map(&file)? }; |
28 | let object = Object::parse(&buffer).map_err(invalid_data_err)?; | 27 | let object = Object::parse(&buffer).map_err(invalid_data_err)?; |
29 | 28 | ||
30 | match object { | 29 | let name = match object { |
31 | Object::Elf(elf) => { | 30 | Object::Elf(elf) => { |
32 | let symbols = elf.dynstrtab.to_vec().map_err(invalid_data_err)?; | 31 | let symbols = elf.dynstrtab.to_vec().map_err(invalid_data_err)?; |
33 | let name = | 32 | symbols.into_iter().find(|s| is_derive_registrar_symbol(s)).map(&str::to_owned) |
34 | symbols.iter().find(|s| is_derive_registrar_symbol(s)).map(|s| s.to_string()); | ||
35 | Ok(name) | ||
36 | } | ||
37 | Object::PE(pe) => { | ||
38 | let name = pe | ||
39 | .exports | ||
40 | .iter() | ||
41 | .flat_map(|s| s.name) | ||
42 | .find(|s| is_derive_registrar_symbol(s)) | ||
43 | .map(|s| s.to_string()); | ||
44 | Ok(name) | ||
45 | } | 33 | } |
34 | Object::PE(pe) => pe | ||
35 | .exports | ||
36 | .iter() | ||
37 | .flat_map(|s| s.name) | ||
38 | .find(|s| is_derive_registrar_symbol(s)) | ||
39 | .map(&str::to_owned), | ||
46 | Object::Mach(Mach::Binary(binary)) => { | 40 | Object::Mach(Mach::Binary(binary)) => { |
47 | let exports = binary.exports().map_err(invalid_data_err)?; | 41 | let exports = binary.exports().map_err(invalid_data_err)?; |
48 | let name = exports | 42 | exports |
49 | .iter() | 43 | .iter() |
50 | .map(|s| { | 44 | .map(|s| { |
51 | // In macos doc: | 45 | // In macos doc: |
@@ -59,11 +53,11 @@ fn find_registrar_symbol(file: &Path) -> Result<Option<String>, IoError> { | |||
59 | } | 53 | } |
60 | }) | 54 | }) |
61 | .find(|s| is_derive_registrar_symbol(s)) | 55 | .find(|s| is_derive_registrar_symbol(s)) |
62 | .map(|s| s.to_string()); | 56 | .map(&str::to_owned) |
63 | Ok(name) | ||
64 | } | 57 | } |
65 | _ => Ok(None), | 58 | _ => return Ok(None), |
66 | } | 59 | }; |
60 | return Ok(name); | ||
67 | } | 61 | } |
68 | 62 | ||
69 | /// Loads dynamic library in platform dependent manner. | 63 | /// Loads dynamic library in platform dependent manner. |
@@ -93,15 +87,16 @@ fn load_library(file: &Path) -> Result<Library, libloading::Error> { | |||
93 | } | 87 | } |
94 | 88 | ||
95 | struct ProcMacroLibraryLibloading { | 89 | struct ProcMacroLibraryLibloading { |
96 | // Hold the dylib to prevent it for unloadeding | 90 | // Hold the dylib to prevent it from unloading |
97 | _lib: Library, | 91 | _lib: Library, |
98 | exported_macros: Vec<bridge::client::ProcMacro>, | 92 | exported_macros: Vec<bridge::client::ProcMacro>, |
99 | } | 93 | } |
100 | 94 | ||
101 | impl ProcMacroLibraryLibloading { | 95 | impl ProcMacroLibraryLibloading { |
102 | fn open(file: &Path) -> Result<Self, IoError> { | 96 | fn open(file: &Path) -> io::Result<Self> { |
103 | let symbol_name = find_registrar_symbol(file)? | 97 | let symbol_name = find_registrar_symbol(file)?.ok_or_else(|| { |
104 | .ok_or(invalid_data_err(format!("Cannot find registrar symbol in file {:?}", file)))?; | 98 | invalid_data_err(format!("Cannot find registrar symbol in file {}", file.display())) |
99 | })?; | ||
105 | 100 | ||
106 | let lib = load_library(file).map_err(invalid_data_err)?; | 101 | let lib = load_library(file).map_err(invalid_data_err)?; |
107 | let exported_macros = { | 102 | let exported_macros = { |
@@ -121,18 +116,16 @@ pub struct Expander { | |||
121 | } | 116 | } |
122 | 117 | ||
123 | impl Expander { | 118 | impl Expander { |
124 | pub fn new<P: AsRef<Path>>(lib: &P) -> Result<Expander, String> { | 119 | pub fn new(lib: &Path) -> Result<Expander, String> { |
125 | let mut libs = vec![]; | 120 | // Some libraries for dynamic loading require canonicalized path even when it is |
126 | /* Some libraries for dynamic loading require canonicalized path (even when it is | 121 | // already absolute |
127 | already absolute | 122 | let lib = lib |
128 | */ | 123 | .canonicalize() |
129 | let lib = | 124 | .unwrap_or_else(|err| panic!("Cannot canonicalize {}: {:?}", lib.display(), err)); |
130 | lib.as_ref().canonicalize().expect(&format!("Cannot canonicalize {:?}", lib.as_ref())); | ||
131 | 125 | ||
132 | let library = ProcMacroLibraryImpl::open(&lib).map_err(|e| e.to_string())?; | 126 | let library = ProcMacroLibraryImpl::open(&lib).map_err(|e| e.to_string())?; |
133 | libs.push(library); | ||
134 | 127 | ||
135 | Ok(Expander { libs }) | 128 | Ok(Expander { libs: vec![library] }) |
136 | } | 129 | } |
137 | 130 | ||
138 | pub fn expand( | 131 | pub fn expand( |
@@ -176,7 +169,6 @@ impl Expander { | |||
176 | parsed_attributes, | 169 | parsed_attributes, |
177 | parsed_body, | 170 | parsed_body, |
178 | ); | 171 | ); |
179 | |||
180 | return res.map(|it| it.subtree); | 172 | return res.map(|it| it.subtree); |
181 | } | 173 | } |
182 | _ => continue, | 174 | _ => continue, |
@@ -187,26 +179,21 @@ impl Expander { | |||
187 | Err(bridge::PanicMessage::String("Nothing to expand".to_string())) | 179 | Err(bridge::PanicMessage::String("Nothing to expand".to_string())) |
188 | } | 180 | } |
189 | 181 | ||
190 | pub fn list_macros(&self) -> Result<Vec<(String, ProcMacroKind)>, bridge::PanicMessage> { | 182 | pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { |
191 | let mut result = vec![]; | 183 | self.libs |
192 | 184 | .iter() | |
193 | for lib in &self.libs { | 185 | .flat_map(|it| &it.exported_macros) |
194 | for proc_macro in &lib.exported_macros { | 186 | .map(|proc_macro| match proc_macro { |
195 | let res = match proc_macro { | 187 | bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { |
196 | bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { | 188 | (trait_name.to_string(), ProcMacroKind::CustomDerive) |
197 | (trait_name.to_string(), ProcMacroKind::CustomDerive) | 189 | } |
198 | } | 190 | bridge::client::ProcMacro::Bang { name, .. } => { |
199 | bridge::client::ProcMacro::Bang { name, .. } => { | 191 | (name.to_string(), ProcMacroKind::FuncLike) |
200 | (name.to_string(), ProcMacroKind::FuncLike) | 192 | } |
201 | } | 193 | bridge::client::ProcMacro::Attr { name, .. } => { |
202 | bridge::client::ProcMacro::Attr { name, .. } => { | 194 | (name.to_string(), ProcMacroKind::Attr) |
203 | (name.to_string(), ProcMacroKind::Attr) | 195 | } |
204 | } | 196 | }) |
205 | }; | 197 | .collect() |
206 | result.push(res); | ||
207 | } | ||
208 | } | ||
209 | |||
210 | Ok(result) | ||
211 | } | 198 | } |
212 | } | 199 | } |
diff --git a/crates/ra_proc_macro_srv/src/lib.rs b/crates/ra_proc_macro_srv/src/lib.rs index c62b0ed89..3aca859db 100644 --- a/crates/ra_proc_macro_srv/src/lib.rs +++ b/crates/ra_proc_macro_srv/src/lib.rs | |||
@@ -3,10 +3,10 @@ | |||
3 | //! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code. | 3 | //! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code. |
4 | //! The general idea here is based on https://github.com/fedochet/rust-proc-macro-expander. | 4 | //! The general idea here is based on https://github.com/fedochet/rust-proc-macro-expander. |
5 | //! | 5 | //! |
6 | //! But we change some several design for fitting RA needs: | 6 | //! But we adapt it to better fit RA needs: |
7 | //! | 7 | //! |
8 | //! * We use `ra_tt` for proc-macro `TokenStream` server, it is easy to manipute and interact with | 8 | //! * We use `ra_tt` for proc-macro `TokenStream` server, it is easier to manipulate and interact with |
9 | //! RA then proc-macro2 token stream. | 9 | //! RA than `proc-macro2` token stream. |
10 | //! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` | 10 | //! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` |
11 | //! rustc rather than `unstable`. (Although in gerenal ABI compatibility is still an issue) | 11 | //! rustc rather than `unstable`. (Although in gerenal ABI compatibility is still an issue) |
12 | 12 | ||
@@ -21,36 +21,28 @@ mod dylib; | |||
21 | 21 | ||
22 | use proc_macro::bridge::client::TokenStream; | 22 | use proc_macro::bridge::client::TokenStream; |
23 | use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; | 23 | use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; |
24 | use std::path::Path; | ||
24 | 25 | ||
25 | pub(crate) fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> { | 26 | pub(crate) fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> { |
26 | let expander = dylib::Expander::new(&task.lib) | 27 | let expander = create_expander(&task.lib); |
27 | .expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib)); | ||
28 | 28 | ||
29 | match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { | 29 | match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { |
30 | Ok(expansion) => Ok(ExpansionResult { expansion }), | 30 | Ok(expansion) => Ok(ExpansionResult { expansion }), |
31 | Err(msg) => { | 31 | Err(msg) => { |
32 | let reason = format!( | 32 | Err(format!("Cannot perform expansion for {}: error {:?}", &task.macro_name, msg)) |
33 | "Cannot perform expansion for {}: error {:?}!", | ||
34 | &task.macro_name, | ||
35 | msg.as_str() | ||
36 | ); | ||
37 | Err(reason) | ||
38 | } | 33 | } |
39 | } | 34 | } |
40 | } | 35 | } |
41 | 36 | ||
42 | pub(crate) fn list_macros(task: &ListMacrosTask) -> Result<ListMacrosResult, String> { | 37 | pub(crate) fn list_macros(task: &ListMacrosTask) -> ListMacrosResult { |
43 | let expander = dylib::Expander::new(&task.lib) | 38 | let expander = create_expander(&task.lib); |
44 | .expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib)); | ||
45 | 39 | ||
46 | match expander.list_macros() { | 40 | ListMacrosResult { macros: expander.list_macros() } |
47 | Ok(macros) => Ok(ListMacrosResult { macros }), | 41 | } |
48 | Err(msg) => { | 42 | |
49 | let reason = | 43 | fn create_expander(lib: &Path) -> dylib::Expander { |
50 | format!("Cannot perform expansion for {:?}: error {:?}!", &task.lib, msg.as_str()); | 44 | dylib::Expander::new(lib) |
51 | Err(reason) | 45 | .unwrap_or_else(|err| panic!("Cannot create expander for {}: {:?}", lib.display(), err)) |
52 | } | ||
53 | } | ||
54 | } | 46 | } |
55 | 47 | ||
56 | pub mod cli; | 48 | pub mod cli; |
diff --git a/crates/ra_proc_macro_srv/src/rustc_server.rs b/crates/ra_proc_macro_srv/src/rustc_server.rs index ec0d35692..f481d70b2 100644 --- a/crates/ra_proc_macro_srv/src/rustc_server.rs +++ b/crates/ra_proc_macro_srv/src/rustc_server.rs | |||
@@ -6,7 +6,7 @@ | |||
6 | //! The original idea from fedochet is using proc-macro2 as backend, | 6 | //! The original idea from fedochet is using proc-macro2 as backend, |
7 | //! we use ra_tt instead for better intergation with RA. | 7 | //! we use ra_tt instead for better intergation with RA. |
8 | //! | 8 | //! |
9 | //! FIXME: No span and source file informatin is implemented yet | 9 | //! FIXME: No span and source file information is implemented yet |
10 | 10 | ||
11 | use crate::proc_macro::bridge::{self, server}; | 11 | use crate::proc_macro::bridge::{self, server}; |
12 | use ra_tt as tt; | 12 | use ra_tt as tt; |
@@ -76,7 +76,16 @@ impl Extend<TokenTree> for TokenStream { | |||
76 | impl Extend<TokenStream> for TokenStream { | 76 | impl Extend<TokenStream> for TokenStream { |
77 | fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { | 77 | fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { |
78 | for item in streams { | 78 | for item in streams { |
79 | self.subtree.token_trees.extend(&mut item.into_iter()) | 79 | for tkn in item { |
80 | match tkn { | ||
81 | tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => { | ||
82 | self.subtree.token_trees.extend(subtree.token_trees); | ||
83 | } | ||
84 | _ => { | ||
85 | self.subtree.token_trees.push(tkn); | ||
86 | } | ||
87 | } | ||
88 | } | ||
80 | } | 89 | } |
81 | } | 90 | } |
82 | } | 91 | } |
diff --git a/crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt b/crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt index 24507d98d..1f5d940fa 100644 --- a/crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt +++ b/crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt | |||
@@ -25,8 +25,7 @@ SUBTREE $ | |||
25 | SUBTREE () 4294967295 | 25 | SUBTREE () 4294967295 |
26 | IDENT feature 4294967295 | 26 | IDENT feature 4294967295 |
27 | PUNCH = [alone] 4294967295 | 27 | PUNCH = [alone] 4294967295 |
28 | SUBTREE $ | 28 | LITERAL "cargo-clippy" 0 |
29 | LITERAL "cargo-clippy" 0 | ||
30 | PUNCH , [alone] 4294967295 | 29 | PUNCH , [alone] 4294967295 |
31 | IDENT allow 4294967295 | 30 | IDENT allow 4294967295 |
32 | SUBTREE () 4294967295 | 31 | SUBTREE () 4294967295 |
diff --git a/crates/ra_proc_macro_srv/src/tests/utils.rs b/crates/ra_proc_macro_srv/src/tests/utils.rs index 1ee409449..2139ec7a4 100644 --- a/crates/ra_proc_macro_srv/src/tests/utils.rs +++ b/crates/ra_proc_macro_srv/src/tests/utils.rs | |||
@@ -60,6 +60,6 @@ pub fn list(crate_name: &str, version: &str) -> Vec<String> { | |||
60 | let path = fixtures::dylib_path(crate_name, version); | 60 | let path = fixtures::dylib_path(crate_name, version); |
61 | let task = ListMacrosTask { lib: path }; | 61 | let task = ListMacrosTask { lib: path }; |
62 | 62 | ||
63 | let res = list_macros(&task).unwrap(); | 63 | let res = list_macros(&task); |
64 | res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() | 64 | res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() |
65 | } | 65 | } |
diff --git a/crates/ra_project_model/src/cargo_workspace.rs b/crates/ra_project_model/src/cargo_workspace.rs index b50cda06f..84008b2e3 100644 --- a/crates/ra_project_model/src/cargo_workspace.rs +++ b/crates/ra_project_model/src/cargo_workspace.rs | |||
@@ -303,8 +303,7 @@ pub fn load_extern_resources( | |||
303 | if message.target.kind.contains(&"proc-macro".to_string()) { | 303 | if message.target.kind.contains(&"proc-macro".to_string()) { |
304 | let package_id = message.package_id; | 304 | let package_id = message.package_id; |
305 | // Skip rmeta file | 305 | // Skip rmeta file |
306 | if let Some(filename) = | 306 | if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name)) |
307 | message.filenames.iter().filter(|name| is_dylib(name)).next() | ||
308 | { | 307 | { |
309 | res.proc_dylib_paths.insert(package_id, filename.clone()); | 308 | res.proc_dylib_paths.insert(package_id, filename.clone()); |
310 | } | 309 | } |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index ea41bf85d..06df8495c 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -10,8 +10,8 @@ use ra_text_edit::TextEditBuilder; | |||
10 | use rustc_hash::FxHashMap; | 10 | use rustc_hash::FxHashMap; |
11 | 11 | ||
12 | use crate::{ | 12 | use crate::{ |
13 | AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, | 13 | AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr, |
14 | TextRange, TextUnit, | 14 | SyntaxToken, TextRange, TextUnit, |
15 | }; | 15 | }; |
16 | 16 | ||
17 | /// Returns ancestors of the node at the offset, sorted by length. This should | 17 | /// Returns ancestors of the node at the offset, sorted by length. This should |
@@ -90,6 +90,10 @@ pub fn neighbor<T: AstNode>(me: &T, direction: Direction) -> Option<T> { | |||
90 | me.syntax().siblings(direction).skip(1).find_map(T::cast) | 90 | me.syntax().siblings(direction).skip(1).find_map(T::cast) |
91 | } | 91 | } |
92 | 92 | ||
93 | pub fn has_errors(node: &SyntaxNode) -> bool { | ||
94 | node.children().any(|it| it.kind() == SyntaxKind::ERROR) | ||
95 | } | ||
96 | |||
93 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | 97 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] |
94 | pub enum InsertPosition<T> { | 98 | pub enum InsertPosition<T> { |
95 | First, | 99 | First, |
diff --git a/crates/ra_syntax/src/ast/edit.rs b/crates/ra_syntax/src/ast/edit.rs index 9e5411ee5..26e4576ff 100644 --- a/crates/ra_syntax/src/ast/edit.rs +++ b/crates/ra_syntax/src/ast/edit.rs | |||
@@ -307,7 +307,11 @@ impl ast::UseTree { | |||
307 | 307 | ||
308 | fn split_path_prefix(prefix: &ast::Path) -> Option<ast::Path> { | 308 | fn split_path_prefix(prefix: &ast::Path) -> Option<ast::Path> { |
309 | let parent = prefix.parent_path()?; | 309 | let parent = prefix.parent_path()?; |
310 | let mut res = make::path_unqualified(parent.segment()?); | 310 | let segment = parent.segment()?; |
311 | if algo::has_errors(segment.syntax()) { | ||
312 | return None; | ||
313 | } | ||
314 | let mut res = make::path_unqualified(segment); | ||
311 | for p in iter::successors(parent.parent_path(), |it| it.parent_path()) { | 315 | for p in iter::successors(parent.parent_path(), |it| it.parent_path()) { |
312 | res = make::path_qualified(res, p.segment()?); | 316 | res = make::path_qualified(res, p.segment()?); |
313 | } | 317 | } |
diff --git a/crates/rust-analyzer/src/bin/args.rs b/crates/rust-analyzer/src/bin/args.rs index 5e19253a6..b14409c39 100644 --- a/crates/rust-analyzer/src/bin/args.rs +++ b/crates/rust-analyzer/src/bin/args.rs | |||
@@ -84,7 +84,7 @@ impl Args { | |||
84 | if matches.contains(["-h", "--help"]) { | 84 | if matches.contains(["-h", "--help"]) { |
85 | eprintln!( | 85 | eprintln!( |
86 | "\ | 86 | "\ |
87 | ra-cli-parse | 87 | rust-analyzer parse |
88 | 88 | ||
89 | USAGE: | 89 | USAGE: |
90 | rust-analyzer parse [FLAGS] | 90 | rust-analyzer parse [FLAGS] |
@@ -104,7 +104,7 @@ FLAGS: | |||
104 | if matches.contains(["-h", "--help"]) { | 104 | if matches.contains(["-h", "--help"]) { |
105 | eprintln!( | 105 | eprintln!( |
106 | "\ | 106 | "\ |
107 | ra-cli-symbols | 107 | rust-analyzer symbols |
108 | 108 | ||
109 | USAGE: | 109 | USAGE: |
110 | rust-analyzer highlight [FLAGS] | 110 | rust-analyzer highlight [FLAGS] |
@@ -123,7 +123,7 @@ FLAGS: | |||
123 | if matches.contains(["-h", "--help"]) { | 123 | if matches.contains(["-h", "--help"]) { |
124 | eprintln!( | 124 | eprintln!( |
125 | "\ | 125 | "\ |
126 | ra-cli-highlight | 126 | rust-analyzer highlight |
127 | 127 | ||
128 | USAGE: | 128 | USAGE: |
129 | rust-analyzer highlight [FLAGS] | 129 | rust-analyzer highlight [FLAGS] |
@@ -143,7 +143,7 @@ FLAGS: | |||
143 | if matches.contains(["-h", "--help"]) { | 143 | if matches.contains(["-h", "--help"]) { |
144 | eprintln!( | 144 | eprintln!( |
145 | "\ | 145 | "\ |
146 | ra-cli-analysis-stats | 146 | rust-analyzer analysis-stats |
147 | 147 | ||
148 | USAGE: | 148 | USAGE: |
149 | rust-analyzer analysis-stats [FLAGS] [OPTIONS] [PATH] | 149 | rust-analyzer analysis-stats [FLAGS] [OPTIONS] [PATH] |
@@ -193,7 +193,7 @@ ARGS: | |||
193 | if matches.contains(["-h", "--help"]) { | 193 | if matches.contains(["-h", "--help"]) { |
194 | eprintln!( | 194 | eprintln!( |
195 | "\ | 195 | "\ |
196 | rust-analyzer-analysis-bench | 196 | rust-analyzer analysis-bench |
197 | 197 | ||
198 | USAGE: | 198 | USAGE: |
199 | rust-analyzer analysis-bench [FLAGS] [OPTIONS] | 199 | rust-analyzer analysis-bench [FLAGS] [OPTIONS] |
@@ -236,7 +236,7 @@ ARGS: | |||
236 | if matches.contains(["-h", "--help"]) { | 236 | if matches.contains(["-h", "--help"]) { |
237 | eprintln!( | 237 | eprintln!( |
238 | "\ | 238 | "\ |
239 | ra-cli-diagnostics | 239 | rust-analyzer diagnostics |
240 | 240 | ||
241 | USAGE: | 241 | USAGE: |
242 | rust-analyzer diagnostics [FLAGS] [PATH] | 242 | rust-analyzer diagnostics [FLAGS] [PATH] |
@@ -269,7 +269,7 @@ ARGS: | |||
269 | _ => { | 269 | _ => { |
270 | eprintln!( | 270 | eprintln!( |
271 | "\ | 271 | "\ |
272 | ra-cli | 272 | rust-analyzer |
273 | 273 | ||
274 | USAGE: | 274 | USAGE: |
275 | rust-analyzer <SUBCOMMAND> | 275 | rust-analyzer <SUBCOMMAND> |
@@ -281,6 +281,8 @@ SUBCOMMANDS: | |||
281 | analysis-bench | 281 | analysis-bench |
282 | analysis-stats | 282 | analysis-stats |
283 | highlight | 283 | highlight |
284 | diagnostics | ||
285 | proc-macro | ||
284 | parse | 286 | parse |
285 | symbols" | 287 | symbols" |
286 | ); | 288 | ); |
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 28b67cfe2..e8d5dad65 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs | |||
@@ -51,7 +51,7 @@ fn main() -> Result<()> { | |||
51 | cli::diagnostics(path.as_ref(), load_output_dirs, with_proc_macro, all)? | 51 | cli::diagnostics(path.as_ref(), load_output_dirs, with_proc_macro, all)? |
52 | } | 52 | } |
53 | 53 | ||
54 | args::Command::ProcMacro => run_proc_macro_sv()?, | 54 | args::Command::ProcMacro => run_proc_macro_srv()?, |
55 | args::Command::RunServer => run_server()?, | 55 | args::Command::RunServer => run_server()?, |
56 | args::Command::Version => println!("rust-analyzer {}", env!("REV")), | 56 | args::Command::Version => println!("rust-analyzer {}", env!("REV")), |
57 | } | 57 | } |
@@ -65,7 +65,7 @@ fn setup_logging() -> Result<()> { | |||
65 | Ok(()) | 65 | Ok(()) |
66 | } | 66 | } |
67 | 67 | ||
68 | fn run_proc_macro_sv() -> Result<()> { | 68 | fn run_proc_macro_srv() -> Result<()> { |
69 | ra_proc_macro_srv::cli::run(); | 69 | ra_proc_macro_srv::cli::run(); |
70 | Ok(()) | 70 | Ok(()) |
71 | } | 71 | } |
diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 762f776fe..d0a71120a 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs | |||
@@ -76,7 +76,7 @@ pub(crate) fn load_cargo( | |||
76 | ProcMacroClient::dummy() | 76 | ProcMacroClient::dummy() |
77 | } else { | 77 | } else { |
78 | let path = std::env::current_exe()?; | 78 | let path = std::env::current_exe()?; |
79 | ProcMacroClient::extern_process(&path, &["proc-macro"]).unwrap() | 79 | ProcMacroClient::extern_process(path, &["proc-macro"]).unwrap() |
80 | }; | 80 | }; |
81 | let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client); | 81 | let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client); |
82 | Ok((host, source_roots)) | 82 | Ok((host, source_roots)) |
diff --git a/crates/rust-analyzer/src/conv.rs b/crates/rust-analyzer/src/conv.rs index f47d931fd..d0218dcbf 100644 --- a/crates/rust-analyzer/src/conv.rs +++ b/crates/rust-analyzer/src/conv.rs | |||
@@ -24,7 +24,9 @@ use crate::{ | |||
24 | world::WorldSnapshot, | 24 | world::WorldSnapshot, |
25 | Result, | 25 | Result, |
26 | }; | 26 | }; |
27 | use semantic_tokens::{ATTRIBUTE, BUILTIN_TYPE, ENUM_MEMBER, LIFETIME, TYPE_ALIAS, UNION}; | 27 | use semantic_tokens::{ |
28 | ATTRIBUTE, BUILTIN_TYPE, ENUM_MEMBER, LIFETIME, TYPE_ALIAS, UNION, UNRESOLVED_REFERENCE, | ||
29 | }; | ||
28 | 30 | ||
29 | pub trait Conv { | 31 | pub trait Conv { |
30 | type Output; | 32 | type Output; |
@@ -381,6 +383,7 @@ impl Conv for Highlight { | |||
381 | HighlightTag::Comment => SemanticTokenType::COMMENT, | 383 | HighlightTag::Comment => SemanticTokenType::COMMENT, |
382 | HighlightTag::Attribute => ATTRIBUTE, | 384 | HighlightTag::Attribute => ATTRIBUTE, |
383 | HighlightTag::Keyword => SemanticTokenType::KEYWORD, | 385 | HighlightTag::Keyword => SemanticTokenType::KEYWORD, |
386 | HighlightTag::UnresolvedReference => UNRESOLVED_REFERENCE, | ||
384 | }; | 387 | }; |
385 | 388 | ||
386 | for modifier in self.modifiers.iter() { | 389 | for modifier in self.modifiers.iter() { |
diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs index 865fa3b1c..10fe696f6 100644 --- a/crates/rust-analyzer/src/semantic_tokens.rs +++ b/crates/rust-analyzer/src/semantic_tokens.rs | |||
@@ -10,6 +10,8 @@ pub(crate) const ENUM_MEMBER: SemanticTokenType = SemanticTokenType::new("enumMe | |||
10 | pub(crate) const LIFETIME: SemanticTokenType = SemanticTokenType::new("lifetime"); | 10 | pub(crate) const LIFETIME: SemanticTokenType = SemanticTokenType::new("lifetime"); |
11 | pub(crate) const TYPE_ALIAS: SemanticTokenType = SemanticTokenType::new("typeAlias"); | 11 | pub(crate) const TYPE_ALIAS: SemanticTokenType = SemanticTokenType::new("typeAlias"); |
12 | pub(crate) const UNION: SemanticTokenType = SemanticTokenType::new("union"); | 12 | pub(crate) const UNION: SemanticTokenType = SemanticTokenType::new("union"); |
13 | pub(crate) const UNRESOLVED_REFERENCE: SemanticTokenType = | ||
14 | SemanticTokenType::new("unresolvedReference"); | ||
13 | 15 | ||
14 | pub(crate) const CONSTANT: SemanticTokenModifier = SemanticTokenModifier::new("constant"); | 16 | pub(crate) const CONSTANT: SemanticTokenModifier = SemanticTokenModifier::new("constant"); |
15 | pub(crate) const CONTROL_FLOW: SemanticTokenModifier = SemanticTokenModifier::new("controlFlow"); | 17 | pub(crate) const CONTROL_FLOW: SemanticTokenModifier = SemanticTokenModifier::new("controlFlow"); |
@@ -43,6 +45,7 @@ pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[ | |||
43 | LIFETIME, | 45 | LIFETIME, |
44 | TYPE_ALIAS, | 46 | TYPE_ALIAS, |
45 | UNION, | 47 | UNION, |
48 | UNRESOLVED_REFERENCE, | ||
46 | ]; | 49 | ]; |
47 | 50 | ||
48 | pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[ | 51 | pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[ |
diff --git a/crates/rust-analyzer/src/world.rs b/crates/rust-analyzer/src/world.rs index f2ad453fa..8e1744bf9 100644 --- a/crates/rust-analyzer/src/world.rs +++ b/crates/rust-analyzer/src/world.rs | |||
@@ -148,20 +148,17 @@ impl WorldState { | |||
148 | 148 | ||
149 | let proc_macro_client = match &config.proc_macro_srv { | 149 | let proc_macro_client = match &config.proc_macro_srv { |
150 | None => ProcMacroClient::dummy(), | 150 | None => ProcMacroClient::dummy(), |
151 | Some((path, args)) => { | 151 | Some((path, args)) => match ProcMacroClient::extern_process(path.into(), args) { |
152 | let path = std::path::Path::new(path); | 152 | Ok(it) => it, |
153 | match ProcMacroClient::extern_process(path, args) { | 153 | Err(err) => { |
154 | Ok(it) => it, | 154 | log::error!( |
155 | Err(err) => { | 155 | "Fail to run ra_proc_macro_srv from path {}, error: {:?}", |
156 | log::error!( | 156 | path, |
157 | "Fail to run ra_proc_macro_srv from path {}, error : {}", | 157 | err |
158 | path.to_string_lossy(), | 158 | ); |
159 | err | 159 | ProcMacroClient::dummy() |
160 | ); | ||
161 | ProcMacroClient::dummy() | ||
162 | } | ||
163 | } | 160 | } |
164 | } | 161 | }, |
165 | }; | 162 | }; |
166 | 163 | ||
167 | workspaces | 164 | workspaces |
@@ -184,7 +181,7 @@ impl WorldState { | |||
184 | let mut analysis_host = AnalysisHost::new(lru_capacity); | 181 | let mut analysis_host = AnalysisHost::new(lru_capacity); |
185 | analysis_host.apply_change(change); | 182 | analysis_host.apply_change(change); |
186 | WorldState { | 183 | WorldState { |
187 | config: config, | 184 | config, |
188 | roots: folder_roots, | 185 | roots: folder_roots, |
189 | workspaces: Arc::new(workspaces), | 186 | workspaces: Arc::new(workspaces), |
190 | analysis_host, | 187 | analysis_host, |