diff options
Diffstat (limited to 'crates')
24 files changed, 552 insertions, 140 deletions
diff --git a/crates/ra_assists/src/doc_tests/generated.rs b/crates/ra_assists/src/doc_tests/generated.rs index b63b4d81a..b39e60870 100644 --- a/crates/ra_assists/src/doc_tests/generated.rs +++ b/crates/ra_assists/src/doc_tests/generated.rs | |||
@@ -78,7 +78,7 @@ fn foo() { | |||
78 | } | 78 | } |
79 | 79 | ||
80 | fn bar(arg: &str, baz: Baz) { | 80 | fn bar(arg: &str, baz: Baz) { |
81 | unimplemented!() | 81 | todo!() |
82 | } | 82 | } |
83 | 83 | ||
84 | "#####, | 84 | "#####, |
diff --git a/crates/ra_assists/src/handlers/add_function.rs b/crates/ra_assists/src/handlers/add_function.rs index 488bae08f..ad4ab66ed 100644 --- a/crates/ra_assists/src/handlers/add_function.rs +++ b/crates/ra_assists/src/handlers/add_function.rs | |||
@@ -29,7 +29,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; | |||
29 | // } | 29 | // } |
30 | // | 30 | // |
31 | // fn bar(arg: &str, baz: Baz) { | 31 | // fn bar(arg: &str, baz: Baz) { |
32 | // unimplemented!() | 32 | // todo!() |
33 | // } | 33 | // } |
34 | // | 34 | // |
35 | // ``` | 35 | // ``` |
@@ -80,7 +80,7 @@ impl FunctionBuilder { | |||
80 | Some(Self { append_fn_at, fn_name, type_params, params }) | 80 | Some(Self { append_fn_at, fn_name, type_params, params }) |
81 | } | 81 | } |
82 | fn render(self) -> Option<FunctionTemplate> { | 82 | fn render(self) -> Option<FunctionTemplate> { |
83 | let placeholder_expr = ast::make::expr_unimplemented(); | 83 | let placeholder_expr = ast::make::expr_todo(); |
84 | let fn_body = ast::make::block_expr(vec![], Some(placeholder_expr)); | 84 | let fn_body = ast::make::block_expr(vec![], Some(placeholder_expr)); |
85 | let fn_def = ast::make::fn_def(self.fn_name, self.type_params, self.params, fn_body); | 85 | let fn_def = ast::make::fn_def(self.fn_name, self.type_params, self.params, fn_body); |
86 | let fn_def = ast::make::add_newlines(2, fn_def); | 86 | let fn_def = ast::make::add_newlines(2, fn_def); |
@@ -225,7 +225,7 @@ fn foo() { | |||
225 | } | 225 | } |
226 | 226 | ||
227 | fn bar() { | 227 | fn bar() { |
228 | <|>unimplemented!() | 228 | <|>todo!() |
229 | } | 229 | } |
230 | ", | 230 | ", |
231 | ) | 231 | ) |
@@ -252,7 +252,7 @@ impl Foo { | |||
252 | } | 252 | } |
253 | 253 | ||
254 | fn bar() { | 254 | fn bar() { |
255 | <|>unimplemented!() | 255 | <|>todo!() |
256 | } | 256 | } |
257 | ", | 257 | ", |
258 | ) | 258 | ) |
@@ -276,7 +276,7 @@ fn foo1() { | |||
276 | } | 276 | } |
277 | 277 | ||
278 | fn bar() { | 278 | fn bar() { |
279 | <|>unimplemented!() | 279 | <|>todo!() |
280 | } | 280 | } |
281 | 281 | ||
282 | fn foo2() {} | 282 | fn foo2() {} |
@@ -302,7 +302,7 @@ mod baz { | |||
302 | } | 302 | } |
303 | 303 | ||
304 | fn bar() { | 304 | fn bar() { |
305 | <|>unimplemented!() | 305 | <|>todo!() |
306 | } | 306 | } |
307 | } | 307 | } |
308 | ", | 308 | ", |
@@ -315,20 +315,20 @@ mod baz { | |||
315 | add_function, | 315 | add_function, |
316 | r" | 316 | r" |
317 | struct Baz; | 317 | struct Baz; |
318 | fn baz() -> Baz { unimplemented!() } | 318 | fn baz() -> Baz { todo!() } |
319 | fn foo() { | 319 | fn foo() { |
320 | bar<|>(baz()); | 320 | bar<|>(baz()); |
321 | } | 321 | } |
322 | ", | 322 | ", |
323 | r" | 323 | r" |
324 | struct Baz; | 324 | struct Baz; |
325 | fn baz() -> Baz { unimplemented!() } | 325 | fn baz() -> Baz { todo!() } |
326 | fn foo() { | 326 | fn foo() { |
327 | bar(baz()); | 327 | bar(baz()); |
328 | } | 328 | } |
329 | 329 | ||
330 | fn bar(baz: Baz) { | 330 | fn bar(baz: Baz) { |
331 | <|>unimplemented!() | 331 | <|>todo!() |
332 | } | 332 | } |
333 | ", | 333 | ", |
334 | ); | 334 | ); |
@@ -361,7 +361,7 @@ impl Baz { | |||
361 | } | 361 | } |
362 | 362 | ||
363 | fn bar(baz: Baz) { | 363 | fn bar(baz: Baz) { |
364 | <|>unimplemented!() | 364 | <|>todo!() |
365 | } | 365 | } |
366 | ", | 366 | ", |
367 | ) | 367 | ) |
@@ -382,7 +382,7 @@ fn foo() { | |||
382 | } | 382 | } |
383 | 383 | ||
384 | fn bar(arg: &str) { | 384 | fn bar(arg: &str) { |
385 | <|>unimplemented!() | 385 | <|>todo!() |
386 | } | 386 | } |
387 | "#, | 387 | "#, |
388 | ) | 388 | ) |
@@ -403,7 +403,7 @@ fn foo() { | |||
403 | } | 403 | } |
404 | 404 | ||
405 | fn bar(arg: char) { | 405 | fn bar(arg: char) { |
406 | <|>unimplemented!() | 406 | <|>todo!() |
407 | } | 407 | } |
408 | "#, | 408 | "#, |
409 | ) | 409 | ) |
@@ -424,7 +424,7 @@ fn foo() { | |||
424 | } | 424 | } |
425 | 425 | ||
426 | fn bar(arg: i32) { | 426 | fn bar(arg: i32) { |
427 | <|>unimplemented!() | 427 | <|>todo!() |
428 | } | 428 | } |
429 | ", | 429 | ", |
430 | ) | 430 | ) |
@@ -445,7 +445,7 @@ fn foo() { | |||
445 | } | 445 | } |
446 | 446 | ||
447 | fn bar(arg: u8) { | 447 | fn bar(arg: u8) { |
448 | <|>unimplemented!() | 448 | <|>todo!() |
449 | } | 449 | } |
450 | ", | 450 | ", |
451 | ) | 451 | ) |
@@ -470,7 +470,7 @@ fn foo() { | |||
470 | } | 470 | } |
471 | 471 | ||
472 | fn bar(x: u8) { | 472 | fn bar(x: u8) { |
473 | <|>unimplemented!() | 473 | <|>todo!() |
474 | } | 474 | } |
475 | ", | 475 | ", |
476 | ) | 476 | ) |
@@ -493,7 +493,7 @@ fn foo() { | |||
493 | } | 493 | } |
494 | 494 | ||
495 | fn bar(worble: ()) { | 495 | fn bar(worble: ()) { |
496 | <|>unimplemented!() | 496 | <|>todo!() |
497 | } | 497 | } |
498 | ", | 498 | ", |
499 | ) | 499 | ) |
@@ -506,7 +506,7 @@ fn bar(worble: ()) { | |||
506 | r" | 506 | r" |
507 | trait Foo {} | 507 | trait Foo {} |
508 | fn foo() -> impl Foo { | 508 | fn foo() -> impl Foo { |
509 | unimplemented!() | 509 | todo!() |
510 | } | 510 | } |
511 | fn baz() { | 511 | fn baz() { |
512 | <|>bar(foo()) | 512 | <|>bar(foo()) |
@@ -515,14 +515,14 @@ fn baz() { | |||
515 | r" | 515 | r" |
516 | trait Foo {} | 516 | trait Foo {} |
517 | fn foo() -> impl Foo { | 517 | fn foo() -> impl Foo { |
518 | unimplemented!() | 518 | todo!() |
519 | } | 519 | } |
520 | fn baz() { | 520 | fn baz() { |
521 | bar(foo()) | 521 | bar(foo()) |
522 | } | 522 | } |
523 | 523 | ||
524 | fn bar(foo: impl Foo) { | 524 | fn bar(foo: impl Foo) { |
525 | <|>unimplemented!() | 525 | <|>todo!() |
526 | } | 526 | } |
527 | ", | 527 | ", |
528 | ) | 528 | ) |
@@ -556,7 +556,7 @@ mod Foo { | |||
556 | } | 556 | } |
557 | 557 | ||
558 | fn bar(baz: super::Baz::Bof) { | 558 | fn bar(baz: super::Baz::Bof) { |
559 | <|>unimplemented!() | 559 | <|>todo!() |
560 | } | 560 | } |
561 | } | 561 | } |
562 | ", | 562 | ", |
@@ -580,7 +580,7 @@ fn foo<T>(t: T) { | |||
580 | } | 580 | } |
581 | 581 | ||
582 | fn bar<T>(t: T) { | 582 | fn bar<T>(t: T) { |
583 | <|>unimplemented!() | 583 | <|>todo!() |
584 | } | 584 | } |
585 | ", | 585 | ", |
586 | ) | 586 | ) |
@@ -611,7 +611,7 @@ fn foo() { | |||
611 | } | 611 | } |
612 | 612 | ||
613 | fn bar(arg: fn() -> Baz) { | 613 | fn bar(arg: fn() -> Baz) { |
614 | <|>unimplemented!() | 614 | <|>todo!() |
615 | } | 615 | } |
616 | ", | 616 | ", |
617 | ) | 617 | ) |
@@ -636,7 +636,7 @@ fn foo() { | |||
636 | } | 636 | } |
637 | 637 | ||
638 | fn bar(closure: impl Fn(i64) -> i64) { | 638 | fn bar(closure: impl Fn(i64) -> i64) { |
639 | <|>unimplemented!() | 639 | <|>todo!() |
640 | } | 640 | } |
641 | ", | 641 | ", |
642 | ) | 642 | ) |
@@ -657,7 +657,7 @@ fn foo() { | |||
657 | } | 657 | } |
658 | 658 | ||
659 | fn bar(baz: ()) { | 659 | fn bar(baz: ()) { |
660 | <|>unimplemented!() | 660 | <|>todo!() |
661 | } | 661 | } |
662 | ", | 662 | ", |
663 | ) | 663 | ) |
@@ -682,7 +682,7 @@ fn foo() { | |||
682 | } | 682 | } |
683 | 683 | ||
684 | fn bar(baz_1: Baz, baz_2: Baz) { | 684 | fn bar(baz_1: Baz, baz_2: Baz) { |
685 | <|>unimplemented!() | 685 | <|>todo!() |
686 | } | 686 | } |
687 | ", | 687 | ", |
688 | ) | 688 | ) |
@@ -707,7 +707,7 @@ fn foo() { | |||
707 | } | 707 | } |
708 | 708 | ||
709 | fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) { | 709 | fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) { |
710 | <|>unimplemented!() | 710 | <|>todo!() |
711 | } | 711 | } |
712 | "#, | 712 | "#, |
713 | ) | 713 | ) |
@@ -779,7 +779,7 @@ impl Foo { | |||
779 | self.bar(); | 779 | self.bar(); |
780 | } | 780 | } |
781 | fn bar(&self) { | 781 | fn bar(&self) { |
782 | unimplemented!(); | 782 | todo!(); |
783 | } | 783 | } |
784 | } | 784 | } |
785 | ", | 785 | ", |
diff --git a/crates/ra_assists/src/handlers/merge_imports.rs b/crates/ra_assists/src/handlers/merge_imports.rs index 0958f52f1..ef0ce0586 100644 --- a/crates/ra_assists/src/handlers/merge_imports.rs +++ b/crates/ra_assists/src/handlers/merge_imports.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use std::iter::successors; | 1 | use std::iter::successors; |
2 | 2 | ||
3 | use ra_syntax::{ | 3 | use ra_syntax::{ |
4 | algo::{neighbor, SyntaxRewriter}, | 4 | algo::{neighbor, skip_trivia_token, SyntaxRewriter}, |
5 | ast::{self, edit::AstNodeEdit, make}, | 5 | ast::{self, edit::AstNodeEdit, make}, |
6 | AstNode, Direction, InsertPosition, SyntaxElement, T, | 6 | AstNode, Direction, InsertPosition, SyntaxElement, T, |
7 | }; | 7 | }; |
@@ -72,9 +72,18 @@ fn try_merge_trees(old: &ast::UseTree, new: &ast::UseTree) -> Option<ast::UseTre | |||
72 | let lhs = old.split_prefix(&lhs_prefix); | 72 | let lhs = old.split_prefix(&lhs_prefix); |
73 | let rhs = new.split_prefix(&rhs_prefix); | 73 | let rhs = new.split_prefix(&rhs_prefix); |
74 | 74 | ||
75 | let should_insert_comma = lhs | ||
76 | .use_tree_list()? | ||
77 | .r_curly_token() | ||
78 | .and_then(|it| skip_trivia_token(it.prev_token()?, Direction::Prev)) | ||
79 | .map(|it| it.kind() != T![,]) | ||
80 | .unwrap_or(true); | ||
81 | |||
75 | let mut to_insert: Vec<SyntaxElement> = Vec::new(); | 82 | let mut to_insert: Vec<SyntaxElement> = Vec::new(); |
76 | to_insert.push(make::token(T![,]).into()); | 83 | if should_insert_comma { |
77 | to_insert.push(make::tokens::single_space().into()); | 84 | to_insert.push(make::token(T![,]).into()); |
85 | to_insert.push(make::tokens::single_space().into()); | ||
86 | } | ||
78 | to_insert.extend( | 87 | to_insert.extend( |
79 | rhs.use_tree_list()? | 88 | rhs.use_tree_list()? |
80 | .syntax() | 89 | .syntax() |
@@ -247,4 +256,22 @@ use { | |||
247 | ", | 256 | ", |
248 | ); | 257 | ); |
249 | } | 258 | } |
259 | |||
260 | #[test] | ||
261 | fn test_double_comma() { | ||
262 | check_assist( | ||
263 | merge_imports, | ||
264 | r" | ||
265 | use foo::bar::baz; | ||
266 | use foo::<|>{ | ||
267 | FooBar, | ||
268 | }; | ||
269 | ", | ||
270 | r" | ||
271 | use foo::{<|> | ||
272 | FooBar, | ||
273 | bar::baz}; | ||
274 | ", | ||
275 | ) | ||
276 | } | ||
250 | } | 277 | } |
diff --git a/crates/ra_flycheck/Cargo.toml b/crates/ra_flycheck/Cargo.toml index c9a9ddc12..76e5cada4 100644 --- a/crates/ra_flycheck/Cargo.toml +++ b/crates/ra_flycheck/Cargo.toml | |||
@@ -13,4 +13,4 @@ serde_json = "1.0.48" | |||
13 | jod-thread = "0.1.1" | 13 | jod-thread = "0.1.1" |
14 | 14 | ||
15 | [dev-dependencies] | 15 | [dev-dependencies] |
16 | insta = "0.15.0" | 16 | insta = "0.16.0" |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 9baebf643..3801fce23 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -25,7 +25,7 @@ use hir_ty::{ | |||
25 | autoderef, display::HirFormatter, expr::ExprValidator, method_resolution, ApplicationTy, | 25 | autoderef, display::HirFormatter, expr::ExprValidator, method_resolution, ApplicationTy, |
26 | Canonical, InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor, | 26 | Canonical, InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor, |
27 | }; | 27 | }; |
28 | use ra_db::{CrateId, Edition, FileId}; | 28 | use ra_db::{CrateId, CrateName, Edition, FileId}; |
29 | use ra_prof::profile; | 29 | use ra_prof::profile; |
30 | use ra_syntax::{ | 30 | use ra_syntax::{ |
31 | ast::{self, AttrsOwner, NameOwner}, | 31 | ast::{self, AttrsOwner, NameOwner}, |
@@ -91,6 +91,10 @@ impl Crate { | |||
91 | db.crate_graph()[self.id].edition | 91 | db.crate_graph()[self.id].edition |
92 | } | 92 | } |
93 | 93 | ||
94 | pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateName> { | ||
95 | db.crate_graph()[self.id].display_name.as_ref().cloned() | ||
96 | } | ||
97 | |||
94 | pub fn all(db: &dyn HirDatabase) -> Vec<Crate> { | 98 | pub fn all(db: &dyn HirDatabase) -> Vec<Crate> { |
95 | db.crate_graph().iter().map(|id| Crate { id }).collect() | 99 | db.crate_graph().iter().map(|id| Crate { id }).collect() |
96 | } | 100 | } |
diff --git a/crates/ra_hir_def/Cargo.toml b/crates/ra_hir_def/Cargo.toml index 56e791e3e..b85358308 100644 --- a/crates/ra_hir_def/Cargo.toml +++ b/crates/ra_hir_def/Cargo.toml | |||
@@ -28,4 +28,4 @@ ra_cfg = { path = "../ra_cfg" } | |||
28 | tt = { path = "../ra_tt", package = "ra_tt" } | 28 | tt = { path = "../ra_tt", package = "ra_tt" } |
29 | 29 | ||
30 | [dev-dependencies] | 30 | [dev-dependencies] |
31 | insta = "0.15.0" | 31 | insta = "0.16.0" |
diff --git a/crates/ra_hir_def/src/body/lower.rs b/crates/ra_hir_def/src/body/lower.rs index 6caa87db4..79abe55ce 100644 --- a/crates/ra_hir_def/src/body/lower.rs +++ b/crates/ra_hir_def/src/body/lower.rs | |||
@@ -33,6 +33,7 @@ use crate::{ | |||
33 | }; | 33 | }; |
34 | 34 | ||
35 | use super::{ExprSource, PatSource}; | 35 | use super::{ExprSource, PatSource}; |
36 | use ast::AstChildren; | ||
36 | 37 | ||
37 | pub(super) fn lower( | 38 | pub(super) fn lower( |
38 | db: &dyn DefDatabase, | 39 | db: &dyn DefDatabase, |
@@ -598,8 +599,8 @@ impl ExprCollector<'_> { | |||
598 | } | 599 | } |
599 | ast::Pat::TupleStructPat(p) => { | 600 | ast::Pat::TupleStructPat(p) => { |
600 | let path = p.path().and_then(|path| self.expander.parse_path(path)); | 601 | let path = p.path().and_then(|path| self.expander.parse_path(path)); |
601 | let args = p.args().map(|p| self.collect_pat(p)).collect(); | 602 | let (args, ellipsis) = self.collect_tuple_pat(p.args()); |
602 | Pat::TupleStruct { path, args } | 603 | Pat::TupleStruct { path, args, ellipsis } |
603 | } | 604 | } |
604 | ast::Pat::RefPat(p) => { | 605 | ast::Pat::RefPat(p) => { |
605 | let pat = self.collect_pat_opt(p.pat()); | 606 | let pat = self.collect_pat_opt(p.pat()); |
@@ -616,10 +617,10 @@ impl ExprCollector<'_> { | |||
616 | } | 617 | } |
617 | ast::Pat::ParenPat(p) => return self.collect_pat_opt(p.pat()), | 618 | ast::Pat::ParenPat(p) => return self.collect_pat_opt(p.pat()), |
618 | ast::Pat::TuplePat(p) => { | 619 | ast::Pat::TuplePat(p) => { |
619 | let args = p.args().map(|p| self.collect_pat(p)).collect(); | 620 | let (args, ellipsis) = self.collect_tuple_pat(p.args()); |
620 | Pat::Tuple(args) | 621 | Pat::Tuple { args, ellipsis } |
621 | } | 622 | } |
622 | ast::Pat::PlaceholderPat(_) | ast::Pat::DotDotPat(_) => Pat::Wild, | 623 | ast::Pat::PlaceholderPat(_) => Pat::Wild, |
623 | ast::Pat::RecordPat(p) => { | 624 | ast::Pat::RecordPat(p) => { |
624 | let path = p.path().and_then(|path| self.expander.parse_path(path)); | 625 | let path = p.path().and_then(|path| self.expander.parse_path(path)); |
625 | let record_field_pat_list = | 626 | let record_field_pat_list = |
@@ -665,6 +666,9 @@ impl ExprCollector<'_> { | |||
665 | Pat::Missing | 666 | Pat::Missing |
666 | } | 667 | } |
667 | } | 668 | } |
669 | ast::Pat::DotDotPat(_) => unreachable!( | ||
670 | "`DotDotPat` requires special handling and should not be mapped to a Pat." | ||
671 | ), | ||
668 | // FIXME: implement | 672 | // FIXME: implement |
669 | ast::Pat::BoxPat(_) | ast::Pat::RangePat(_) | ast::Pat::MacroPat(_) => Pat::Missing, | 673 | ast::Pat::BoxPat(_) | ast::Pat::RangePat(_) | ast::Pat::MacroPat(_) => Pat::Missing, |
670 | }; | 674 | }; |
@@ -679,6 +683,19 @@ impl ExprCollector<'_> { | |||
679 | self.missing_pat() | 683 | self.missing_pat() |
680 | } | 684 | } |
681 | } | 685 | } |
686 | |||
687 | fn collect_tuple_pat(&mut self, args: AstChildren<ast::Pat>) -> (Vec<PatId>, Option<usize>) { | ||
688 | // Find the location of the `..`, if there is one. Note that we do not | ||
689 | // consider the possiblity of there being multiple `..` here. | ||
690 | let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::DotDotPat(_))); | ||
691 | // We want to skip the `..` pattern here, since we account for it above. | ||
692 | let args = args | ||
693 | .filter(|p| !matches!(p, ast::Pat::DotDotPat(_))) | ||
694 | .map(|p| self.collect_pat(p)) | ||
695 | .collect(); | ||
696 | |||
697 | (args, ellipsis) | ||
698 | } | ||
682 | } | 699 | } |
683 | 700 | ||
684 | impl From<ast::BinOp> for BinaryOp { | 701 | impl From<ast::BinOp> for BinaryOp { |
diff --git a/crates/ra_hir_def/src/data.rs b/crates/ra_hir_def/src/data.rs index 56a20c5bd..b3c91fea2 100644 --- a/crates/ra_hir_def/src/data.rs +++ b/crates/ra_hir_def/src/data.rs | |||
@@ -15,7 +15,7 @@ use ra_syntax::ast::{ | |||
15 | use crate::{ | 15 | use crate::{ |
16 | attr::Attrs, | 16 | attr::Attrs, |
17 | db::DefDatabase, | 17 | db::DefDatabase, |
18 | path::{path, GenericArgs, Path}, | 18 | path::{path, AssociatedTypeBinding, GenericArgs, Path}, |
19 | src::HasSource, | 19 | src::HasSource, |
20 | type_ref::{Mutability, TypeBound, TypeRef}, | 20 | type_ref::{Mutability, TypeBound, TypeRef}, |
21 | visibility::RawVisibility, | 21 | visibility::RawVisibility, |
@@ -95,7 +95,11 @@ fn desugar_future_path(orig: TypeRef) -> Path { | |||
95 | let path = path![std::future::Future]; | 95 | let path = path![std::future::Future]; |
96 | let mut generic_args: Vec<_> = std::iter::repeat(None).take(path.segments.len() - 1).collect(); | 96 | let mut generic_args: Vec<_> = std::iter::repeat(None).take(path.segments.len() - 1).collect(); |
97 | let mut last = GenericArgs::empty(); | 97 | let mut last = GenericArgs::empty(); |
98 | last.bindings.push((name![Output], orig)); | 98 | last.bindings.push(AssociatedTypeBinding { |
99 | name: name![Output], | ||
100 | type_ref: Some(orig), | ||
101 | bounds: Vec::new(), | ||
102 | }); | ||
99 | generic_args.push(Some(Arc::new(last))); | 103 | generic_args.push(Some(Arc::new(last))); |
100 | 104 | ||
101 | Path::from_known_path(path, generic_args) | 105 | Path::from_known_path(path, generic_args) |
diff --git a/crates/ra_hir_def/src/expr.rs b/crates/ra_hir_def/src/expr.rs index e11bdf3ec..a0cdad529 100644 --- a/crates/ra_hir_def/src/expr.rs +++ b/crates/ra_hir_def/src/expr.rs | |||
@@ -374,7 +374,7 @@ pub struct RecordFieldPat { | |||
374 | pub enum Pat { | 374 | pub enum Pat { |
375 | Missing, | 375 | Missing, |
376 | Wild, | 376 | Wild, |
377 | Tuple(Vec<PatId>), | 377 | Tuple { args: Vec<PatId>, ellipsis: Option<usize> }, |
378 | Or(Vec<PatId>), | 378 | Or(Vec<PatId>), |
379 | Record { path: Option<Path>, args: Vec<RecordFieldPat>, ellipsis: bool }, | 379 | Record { path: Option<Path>, args: Vec<RecordFieldPat>, ellipsis: bool }, |
380 | Range { start: ExprId, end: ExprId }, | 380 | Range { start: ExprId, end: ExprId }, |
@@ -382,7 +382,7 @@ pub enum Pat { | |||
382 | Path(Path), | 382 | Path(Path), |
383 | Lit(ExprId), | 383 | Lit(ExprId), |
384 | Bind { mode: BindingAnnotation, name: Name, subpat: Option<PatId> }, | 384 | Bind { mode: BindingAnnotation, name: Name, subpat: Option<PatId> }, |
385 | TupleStruct { path: Option<Path>, args: Vec<PatId> }, | 385 | TupleStruct { path: Option<Path>, args: Vec<PatId>, ellipsis: Option<usize> }, |
386 | Ref { pat: PatId, mutability: Mutability }, | 386 | Ref { pat: PatId, mutability: Mutability }, |
387 | } | 387 | } |
388 | 388 | ||
@@ -393,7 +393,7 @@ impl Pat { | |||
393 | Pat::Bind { subpat, .. } => { | 393 | Pat::Bind { subpat, .. } => { |
394 | subpat.iter().copied().for_each(f); | 394 | subpat.iter().copied().for_each(f); |
395 | } | 395 | } |
396 | Pat::Or(args) | Pat::Tuple(args) | Pat::TupleStruct { args, .. } => { | 396 | Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => { |
397 | args.iter().copied().for_each(f); | 397 | args.iter().copied().for_each(f); |
398 | } | 398 | } |
399 | Pat::Ref { pat, .. } => f(*pat), | 399 | Pat::Ref { pat, .. } => f(*pat), |
diff --git a/crates/ra_hir_def/src/path.rs b/crates/ra_hir_def/src/path.rs index 91c7b3e09..162b3c8c7 100644 --- a/crates/ra_hir_def/src/path.rs +++ b/crates/ra_hir_def/src/path.rs | |||
@@ -14,7 +14,10 @@ use hir_expand::{ | |||
14 | use ra_db::CrateId; | 14 | use ra_db::CrateId; |
15 | use ra_syntax::ast; | 15 | use ra_syntax::ast; |
16 | 16 | ||
17 | use crate::{type_ref::TypeRef, InFile}; | 17 | use crate::{ |
18 | type_ref::{TypeBound, TypeRef}, | ||
19 | InFile, | ||
20 | }; | ||
18 | 21 | ||
19 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] | 22 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] |
20 | pub struct ModPath { | 23 | pub struct ModPath { |
@@ -111,7 +114,21 @@ pub struct GenericArgs { | |||
111 | /// is left out. | 114 | /// is left out. |
112 | pub has_self_type: bool, | 115 | pub has_self_type: bool, |
113 | /// Associated type bindings like in `Iterator<Item = T>`. | 116 | /// Associated type bindings like in `Iterator<Item = T>`. |
114 | pub bindings: Vec<(Name, TypeRef)>, | 117 | pub bindings: Vec<AssociatedTypeBinding>, |
118 | } | ||
119 | |||
120 | /// An associated type binding like in `Iterator<Item = T>`. | ||
121 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
122 | pub struct AssociatedTypeBinding { | ||
123 | /// The name of the associated type. | ||
124 | pub name: Name, | ||
125 | /// The type bound to this associated type (in `Item = T`, this would be the | ||
126 | /// `T`). This can be `None` if there are bounds instead. | ||
127 | pub type_ref: Option<TypeRef>, | ||
128 | /// Bounds for the associated type, like in `Iterator<Item: | ||
129 | /// SomeOtherTrait>`. (This is the unstable `associated_type_bounds` | ||
130 | /// feature.) | ||
131 | pub bounds: Vec<TypeBound>, | ||
115 | } | 132 | } |
116 | 133 | ||
117 | /// A single generic argument. | 134 | /// A single generic argument. |
diff --git a/crates/ra_hir_def/src/path/lower.rs b/crates/ra_hir_def/src/path/lower.rs index 0f806d6fb..9ec2e0dcd 100644 --- a/crates/ra_hir_def/src/path/lower.rs +++ b/crates/ra_hir_def/src/path/lower.rs | |||
@@ -9,11 +9,12 @@ use hir_expand::{ | |||
9 | hygiene::Hygiene, | 9 | hygiene::Hygiene, |
10 | name::{name, AsName}, | 10 | name::{name, AsName}, |
11 | }; | 11 | }; |
12 | use ra_syntax::ast::{self, AstNode, TypeAscriptionOwner}; | 12 | use ra_syntax::ast::{self, AstNode, TypeAscriptionOwner, TypeBoundsOwner}; |
13 | 13 | ||
14 | use super::AssociatedTypeBinding; | ||
14 | use crate::{ | 15 | use crate::{ |
15 | path::{GenericArg, GenericArgs, ModPath, Path, PathKind}, | 16 | path::{GenericArg, GenericArgs, ModPath, Path, PathKind}, |
16 | type_ref::TypeRef, | 17 | type_ref::{TypeBound, TypeRef}, |
17 | }; | 18 | }; |
18 | 19 | ||
19 | pub(super) use lower_use::lower_use_tree; | 20 | pub(super) use lower_use::lower_use_tree; |
@@ -136,10 +137,16 @@ pub(super) fn lower_generic_args(node: ast::TypeArgList) -> Option<GenericArgs> | |||
136 | // lifetimes ignored for now | 137 | // lifetimes ignored for now |
137 | let mut bindings = Vec::new(); | 138 | let mut bindings = Vec::new(); |
138 | for assoc_type_arg in node.assoc_type_args() { | 139 | for assoc_type_arg in node.assoc_type_args() { |
140 | let assoc_type_arg: ast::AssocTypeArg = assoc_type_arg; | ||
139 | if let Some(name_ref) = assoc_type_arg.name_ref() { | 141 | if let Some(name_ref) = assoc_type_arg.name_ref() { |
140 | let name = name_ref.as_name(); | 142 | let name = name_ref.as_name(); |
141 | let type_ref = TypeRef::from_ast_opt(assoc_type_arg.type_ref()); | 143 | let type_ref = assoc_type_arg.type_ref().map(TypeRef::from_ast); |
142 | bindings.push((name, type_ref)); | 144 | let bounds = if let Some(l) = assoc_type_arg.type_bound_list() { |
145 | l.bounds().map(TypeBound::from_ast).collect() | ||
146 | } else { | ||
147 | Vec::new() | ||
148 | }; | ||
149 | bindings.push(AssociatedTypeBinding { name, type_ref, bounds }); | ||
143 | } | 150 | } |
144 | } | 151 | } |
145 | if args.is_empty() && bindings.is_empty() { | 152 | if args.is_empty() && bindings.is_empty() { |
@@ -168,7 +175,11 @@ fn lower_generic_args_from_fn_path( | |||
168 | } | 175 | } |
169 | if let Some(ret_type) = ret_type { | 176 | if let Some(ret_type) = ret_type { |
170 | let type_ref = TypeRef::from_ast_opt(ret_type.type_ref()); | 177 | let type_ref = TypeRef::from_ast_opt(ret_type.type_ref()); |
171 | bindings.push((name![Output], type_ref)) | 178 | bindings.push(AssociatedTypeBinding { |
179 | name: name![Output], | ||
180 | type_ref: Some(type_ref), | ||
181 | bounds: Vec::new(), | ||
182 | }); | ||
172 | } | 183 | } |
173 | if args.is_empty() && bindings.is_empty() { | 184 | if args.is_empty() && bindings.is_empty() { |
174 | None | 185 | None |
diff --git a/crates/ra_hir_def/src/type_ref.rs b/crates/ra_hir_def/src/type_ref.rs index ea29c4176..f308c6bdf 100644 --- a/crates/ra_hir_def/src/type_ref.rs +++ b/crates/ra_hir_def/src/type_ref.rs | |||
@@ -163,8 +163,16 @@ impl TypeRef { | |||
163 | let crate::path::GenericArg::Type(type_ref) = arg; | 163 | let crate::path::GenericArg::Type(type_ref) = arg; |
164 | go(type_ref, f); | 164 | go(type_ref, f); |
165 | } | 165 | } |
166 | for (_, type_ref) in &args_and_bindings.bindings { | 166 | for binding in &args_and_bindings.bindings { |
167 | go(type_ref, f); | 167 | if let Some(type_ref) = &binding.type_ref { |
168 | go(type_ref, f); | ||
169 | } | ||
170 | for bound in &binding.bounds { | ||
171 | match bound { | ||
172 | TypeBound::Path(path) => go_path(path, f), | ||
173 | TypeBound::Error => (), | ||
174 | } | ||
175 | } | ||
168 | } | 176 | } |
169 | } | 177 | } |
170 | } | 178 | } |
diff --git a/crates/ra_hir_ty/Cargo.toml b/crates/ra_hir_ty/Cargo.toml index 59efc1c31..e891d733f 100644 --- a/crates/ra_hir_ty/Cargo.toml +++ b/crates/ra_hir_ty/Cargo.toml | |||
@@ -8,6 +8,7 @@ authors = ["rust-analyzer developers"] | |||
8 | doctest = false | 8 | doctest = false |
9 | 9 | ||
10 | [dependencies] | 10 | [dependencies] |
11 | itertools = "0.9.0" | ||
11 | arrayvec = "0.5.1" | 12 | arrayvec = "0.5.1" |
12 | smallvec = "1.2.0" | 13 | smallvec = "1.2.0" |
13 | ena = "0.13.1" | 14 | ena = "0.13.1" |
@@ -26,9 +27,9 @@ test_utils = { path = "../test_utils" } | |||
26 | 27 | ||
27 | scoped-tls = "1" | 28 | scoped-tls = "1" |
28 | 29 | ||
29 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" } | 30 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "6222e416b96892b2a86bc08de7dbc9826ff1acea" } |
30 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" } | 31 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "6222e416b96892b2a86bc08de7dbc9826ff1acea" } |
31 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" } | 32 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "6222e416b96892b2a86bc08de7dbc9826ff1acea" } |
32 | 33 | ||
33 | [dev-dependencies] | 34 | [dev-dependencies] |
34 | insta = "0.15.0" | 35 | insta = "0.16.0" |
diff --git a/crates/ra_hir_ty/src/_match.rs b/crates/ra_hir_ty/src/_match.rs index c482cf619..a64be9848 100644 --- a/crates/ra_hir_ty/src/_match.rs +++ b/crates/ra_hir_ty/src/_match.rs | |||
@@ -289,7 +289,7 @@ impl PatStack { | |||
289 | Self::from_slice(&self.0[1..]) | 289 | Self::from_slice(&self.0[1..]) |
290 | } | 290 | } |
291 | 291 | ||
292 | fn replace_head_with(&self, pat_ids: &[PatId]) -> PatStack { | 292 | fn replace_head_with<T: Into<PatIdOrWild> + Copy>(&self, pat_ids: &[T]) -> PatStack { |
293 | let mut patterns: PatStackInner = smallvec![]; | 293 | let mut patterns: PatStackInner = smallvec![]; |
294 | for pat in pat_ids { | 294 | for pat in pat_ids { |
295 | patterns.push((*pat).into()); | 295 | patterns.push((*pat).into()); |
@@ -320,12 +320,14 @@ impl PatStack { | |||
320 | constructor: &Constructor, | 320 | constructor: &Constructor, |
321 | ) -> MatchCheckResult<Option<PatStack>> { | 321 | ) -> MatchCheckResult<Option<PatStack>> { |
322 | let result = match (self.head().as_pat(cx), constructor) { | 322 | let result = match (self.head().as_pat(cx), constructor) { |
323 | (Pat::Tuple(ref pat_ids), Constructor::Tuple { arity }) => { | 323 | (Pat::Tuple { args: ref pat_ids, ellipsis }, Constructor::Tuple { arity: _ }) => { |
324 | debug_assert_eq!( | 324 | if ellipsis.is_some() { |
325 | pat_ids.len(), | 325 | // If there are ellipsis here, we should add the correct number of |
326 | *arity, | 326 | // Pat::Wild patterns to `pat_ids`. We should be able to use the |
327 | "we type check before calling this code, so we should never hit this case", | 327 | // constructors arity for this, but at the time of writing we aren't |
328 | ); | 328 | // correctly calculating this arity when ellipsis are present. |
329 | return Err(MatchCheckErr::NotImplemented); | ||
330 | } | ||
329 | 331 | ||
330 | Some(self.replace_head_with(pat_ids)) | 332 | Some(self.replace_head_with(pat_ids)) |
331 | } | 333 | } |
@@ -351,19 +353,47 @@ impl PatStack { | |||
351 | Some(self.to_tail()) | 353 | Some(self.to_tail()) |
352 | } | 354 | } |
353 | } | 355 | } |
354 | (Pat::TupleStruct { args: ref pat_ids, .. }, Constructor::Enum(enum_constructor)) => { | 356 | ( |
357 | Pat::TupleStruct { args: ref pat_ids, ellipsis, .. }, | ||
358 | Constructor::Enum(enum_constructor), | ||
359 | ) => { | ||
355 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); | 360 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); |
356 | if !enum_variant_matches(cx, pat_id, *enum_constructor) { | 361 | if !enum_variant_matches(cx, pat_id, *enum_constructor) { |
357 | None | 362 | None |
358 | } else { | 363 | } else { |
359 | // If the enum variant matches, then we need to confirm | 364 | let constructor_arity = constructor.arity(cx)?; |
360 | // that the number of patterns aligns with the expected | 365 | if let Some(ellipsis_position) = ellipsis { |
361 | // number of patterns for that enum variant. | 366 | // If there are ellipsis in the pattern, the ellipsis must take the place |
362 | if pat_ids.len() != constructor.arity(cx)? { | 367 | // of at least one sub-pattern, so `pat_ids` should be smaller than the |
363 | return Err(MatchCheckErr::MalformedMatchArm); | 368 | // constructor arity. |
369 | if pat_ids.len() < constructor_arity { | ||
370 | let mut new_patterns: Vec<PatIdOrWild> = vec![]; | ||
371 | |||
372 | for pat_id in &pat_ids[0..ellipsis_position] { | ||
373 | new_patterns.push((*pat_id).into()); | ||
374 | } | ||
375 | |||
376 | for _ in 0..(constructor_arity - pat_ids.len()) { | ||
377 | new_patterns.push(PatIdOrWild::Wild); | ||
378 | } | ||
379 | |||
380 | for pat_id in &pat_ids[ellipsis_position..pat_ids.len()] { | ||
381 | new_patterns.push((*pat_id).into()); | ||
382 | } | ||
383 | |||
384 | Some(self.replace_head_with(&new_patterns)) | ||
385 | } else { | ||
386 | return Err(MatchCheckErr::MalformedMatchArm); | ||
387 | } | ||
388 | } else { | ||
389 | // If there is no ellipsis in the tuple pattern, the number | ||
390 | // of patterns must equal the constructor arity. | ||
391 | if pat_ids.len() == constructor_arity { | ||
392 | Some(self.replace_head_with(pat_ids)) | ||
393 | } else { | ||
394 | return Err(MatchCheckErr::MalformedMatchArm); | ||
395 | } | ||
364 | } | 396 | } |
365 | |||
366 | Some(self.replace_head_with(pat_ids)) | ||
367 | } | 397 | } |
368 | } | 398 | } |
369 | (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented), | 399 | (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented), |
@@ -644,7 +674,11 @@ impl Constructor { | |||
644 | fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Option<Constructor>> { | 674 | fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Option<Constructor>> { |
645 | let res = match pat.as_pat(cx) { | 675 | let res = match pat.as_pat(cx) { |
646 | Pat::Wild => None, | 676 | Pat::Wild => None, |
647 | Pat::Tuple(pats) => Some(Constructor::Tuple { arity: pats.len() }), | 677 | // FIXME somehow create the Tuple constructor with the proper arity. If there are |
678 | // ellipsis, the arity is not equal to the number of patterns. | ||
679 | Pat::Tuple { args: pats, ellipsis } if ellipsis.is_none() => { | ||
680 | Some(Constructor::Tuple { arity: pats.len() }) | ||
681 | } | ||
648 | Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { | 682 | Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { |
649 | Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), | 683 | Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), |
650 | _ => return Err(MatchCheckErr::NotImplemented), | 684 | _ => return Err(MatchCheckErr::NotImplemented), |
@@ -1507,6 +1541,67 @@ mod tests { | |||
1507 | } | 1541 | } |
1508 | 1542 | ||
1509 | #[test] | 1543 | #[test] |
1544 | fn enum_tuple_partial_ellipsis_2_no_diagnostic() { | ||
1545 | let content = r" | ||
1546 | enum Either { | ||
1547 | A(bool, bool, bool, bool), | ||
1548 | B, | ||
1549 | } | ||
1550 | fn test_fn() { | ||
1551 | match Either::B { | ||
1552 | Either::A(true, .., true) => {}, | ||
1553 | Either::A(true, .., false) => {}, | ||
1554 | Either::A(.., true) => {}, | ||
1555 | Either::A(.., false) => {}, | ||
1556 | Either::B => {}, | ||
1557 | } | ||
1558 | } | ||
1559 | "; | ||
1560 | |||
1561 | check_no_diagnostic(content); | ||
1562 | } | ||
1563 | |||
1564 | #[test] | ||
1565 | fn enum_tuple_partial_ellipsis_missing_arm() { | ||
1566 | let content = r" | ||
1567 | enum Either { | ||
1568 | A(bool, bool, bool, bool), | ||
1569 | B, | ||
1570 | } | ||
1571 | fn test_fn() { | ||
1572 | match Either::B { | ||
1573 | Either::A(true, .., true) => {}, | ||
1574 | Either::A(true, .., false) => {}, | ||
1575 | Either::A(false, .., false) => {}, | ||
1576 | Either::B => {}, | ||
1577 | } | ||
1578 | } | ||
1579 | "; | ||
1580 | |||
1581 | check_diagnostic(content); | ||
1582 | } | ||
1583 | |||
1584 | #[test] | ||
1585 | fn enum_tuple_partial_ellipsis_2_missing_arm() { | ||
1586 | let content = r" | ||
1587 | enum Either { | ||
1588 | A(bool, bool, bool, bool), | ||
1589 | B, | ||
1590 | } | ||
1591 | fn test_fn() { | ||
1592 | match Either::B { | ||
1593 | Either::A(true, .., true) => {}, | ||
1594 | Either::A(true, .., false) => {}, | ||
1595 | Either::A(.., true) => {}, | ||
1596 | Either::B => {}, | ||
1597 | } | ||
1598 | } | ||
1599 | "; | ||
1600 | |||
1601 | check_diagnostic(content); | ||
1602 | } | ||
1603 | |||
1604 | #[test] | ||
1510 | fn enum_tuple_ellipsis_no_diagnostic() { | 1605 | fn enum_tuple_ellipsis_no_diagnostic() { |
1511 | let content = r" | 1606 | let content = r" |
1512 | enum Either { | 1607 | enum Either { |
@@ -1645,11 +1740,7 @@ mod false_negatives { | |||
1645 | "; | 1740 | "; |
1646 | 1741 | ||
1647 | // This is a false negative. | 1742 | // This is a false negative. |
1648 | // The `..` pattern is currently lowered to a single `Pat::Wild` | 1743 | // We don't currently handle tuple patterns with ellipsis. |
1649 | // no matter how many fields the `..` pattern is covering. This | ||
1650 | // causes the match arm in this test not to type check against | ||
1651 | // the match expression, which causes this diagnostic not to | ||
1652 | // fire. | ||
1653 | check_no_diagnostic(content); | 1744 | check_no_diagnostic(content); |
1654 | } | 1745 | } |
1655 | 1746 | ||
@@ -1664,32 +1755,7 @@ mod false_negatives { | |||
1664 | "; | 1755 | "; |
1665 | 1756 | ||
1666 | // This is a false negative. | 1757 | // This is a false negative. |
1667 | // See comments on `tuple_of_bools_with_ellipsis_at_end_missing_arm`. | 1758 | // We don't currently handle tuple patterns with ellipsis. |
1668 | check_no_diagnostic(content); | ||
1669 | } | ||
1670 | |||
1671 | #[test] | ||
1672 | fn enum_tuple_partial_ellipsis_missing_arm() { | ||
1673 | let content = r" | ||
1674 | enum Either { | ||
1675 | A(bool, bool, bool, bool), | ||
1676 | B, | ||
1677 | } | ||
1678 | fn test_fn() { | ||
1679 | match Either::B { | ||
1680 | Either::A(true, .., true) => {}, | ||
1681 | Either::A(true, .., false) => {}, | ||
1682 | Either::A(false, .., false) => {}, | ||
1683 | Either::B => {}, | ||
1684 | } | ||
1685 | } | ||
1686 | "; | ||
1687 | |||
1688 | // This is a false negative. | ||
1689 | // The `..` pattern is currently lowered to a single `Pat::Wild` | ||
1690 | // no matter how many fields the `..` pattern is covering. This | ||
1691 | // causes us to return a `MatchCheckErr::MalformedMatchArm` in | ||
1692 | // `Pat::specialize_constructor`. | ||
1693 | check_no_diagnostic(content); | 1759 | check_no_diagnostic(content); |
1694 | } | 1760 | } |
1695 | } | 1761 | } |
diff --git a/crates/ra_hir_ty/src/infer/pat.rs b/crates/ra_hir_ty/src/infer/pat.rs index 078476f76..8ec4d4ace 100644 --- a/crates/ra_hir_ty/src/infer/pat.rs +++ b/crates/ra_hir_ty/src/infer/pat.rs | |||
@@ -85,7 +85,7 @@ impl<'a> InferenceContext<'a> { | |||
85 | let body = Arc::clone(&self.body); // avoid borrow checker problem | 85 | let body = Arc::clone(&self.body); // avoid borrow checker problem |
86 | 86 | ||
87 | let is_non_ref_pat = match &body[pat] { | 87 | let is_non_ref_pat = match &body[pat] { |
88 | Pat::Tuple(..) | 88 | Pat::Tuple { .. } |
89 | | Pat::Or(..) | 89 | | Pat::Or(..) |
90 | | Pat::TupleStruct { .. } | 90 | | Pat::TupleStruct { .. } |
91 | | Pat::Record { .. } | 91 | | Pat::Record { .. } |
@@ -116,7 +116,7 @@ impl<'a> InferenceContext<'a> { | |||
116 | let expected = expected; | 116 | let expected = expected; |
117 | 117 | ||
118 | let ty = match &body[pat] { | 118 | let ty = match &body[pat] { |
119 | Pat::Tuple(ref args) => { | 119 | Pat::Tuple { ref args, .. } => { |
120 | let expectations = match expected.as_tuple() { | 120 | let expectations = match expected.as_tuple() { |
121 | Some(parameters) => &*parameters.0, | 121 | Some(parameters) => &*parameters.0, |
122 | _ => &[], | 122 | _ => &[], |
@@ -155,7 +155,7 @@ impl<'a> InferenceContext<'a> { | |||
155 | let subty = self.infer_pat(*pat, expectation, default_bm); | 155 | let subty = self.infer_pat(*pat, expectation, default_bm); |
156 | Ty::apply_one(TypeCtor::Ref(*mutability), subty) | 156 | Ty::apply_one(TypeCtor::Ref(*mutability), subty) |
157 | } | 157 | } |
158 | Pat::TupleStruct { path: p, args: subpats } => { | 158 | Pat::TupleStruct { path: p, args: subpats, .. } => { |
159 | self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm, pat) | 159 | self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm, pat) |
160 | } | 160 | } |
161 | Pat::Record { path: p, args: fields, ellipsis: _ } => { | 161 | Pat::Record { path: p, args: fields, ellipsis: _ } => { |
diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs index 7b5990a47..cc1ac8e3e 100644 --- a/crates/ra_hir_ty/src/lower.rs +++ b/crates/ra_hir_ty/src/lower.rs | |||
@@ -8,6 +8,8 @@ | |||
8 | use std::iter; | 8 | use std::iter; |
9 | use std::sync::Arc; | 9 | use std::sync::Arc; |
10 | 10 | ||
11 | use smallvec::SmallVec; | ||
12 | |||
11 | use hir_def::{ | 13 | use hir_def::{ |
12 | adt::StructKind, | 14 | adt::StructKind, |
13 | builtin_type::BuiltinType, | 15 | builtin_type::BuiltinType, |
@@ -606,21 +608,35 @@ fn assoc_type_bindings_from_type_bound<'a>( | |||
606 | .into_iter() | 608 | .into_iter() |
607 | .flat_map(|segment| segment.args_and_bindings.into_iter()) | 609 | .flat_map(|segment| segment.args_and_bindings.into_iter()) |
608 | .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) | 610 | .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) |
609 | .map(move |(name, type_ref)| { | 611 | .flat_map(move |binding| { |
610 | let associated_ty = associated_type_by_name_including_super_traits( | 612 | let associated_ty = associated_type_by_name_including_super_traits( |
611 | ctx.db.upcast(), | 613 | ctx.db.upcast(), |
612 | trait_ref.trait_, | 614 | trait_ref.trait_, |
613 | &name, | 615 | &binding.name, |
614 | ); | 616 | ); |
615 | let associated_ty = match associated_ty { | 617 | let associated_ty = match associated_ty { |
616 | None => return GenericPredicate::Error, | 618 | None => return SmallVec::<[GenericPredicate; 1]>::new(), |
617 | Some(t) => t, | 619 | Some(t) => t, |
618 | }; | 620 | }; |
619 | let projection_ty = | 621 | let projection_ty = |
620 | ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() }; | 622 | ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() }; |
621 | let ty = Ty::from_hir(ctx, type_ref); | 623 | let mut preds = SmallVec::with_capacity( |
622 | let projection_predicate = ProjectionPredicate { projection_ty, ty }; | 624 | binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), |
623 | GenericPredicate::Projection(projection_predicate) | 625 | ); |
626 | if let Some(type_ref) = &binding.type_ref { | ||
627 | let ty = Ty::from_hir(ctx, type_ref); | ||
628 | let projection_predicate = | ||
629 | ProjectionPredicate { projection_ty: projection_ty.clone(), ty }; | ||
630 | preds.push(GenericPredicate::Projection(projection_predicate)); | ||
631 | } | ||
632 | for bound in &binding.bounds { | ||
633 | preds.extend(GenericPredicate::from_type_bound( | ||
634 | ctx, | ||
635 | bound, | ||
636 | Ty::Projection(projection_ty.clone()), | ||
637 | )); | ||
638 | } | ||
639 | preds | ||
624 | }) | 640 | }) |
625 | } | 641 | } |
626 | 642 | ||
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs index d088bf309..81c5e6299 100644 --- a/crates/ra_hir_ty/src/tests/traits.rs +++ b/crates/ra_hir_ty/src/tests/traits.rs | |||
@@ -1948,6 +1948,53 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> { | |||
1948 | } | 1948 | } |
1949 | 1949 | ||
1950 | #[test] | 1950 | #[test] |
1951 | fn inline_assoc_type_bounds_1() { | ||
1952 | let t = type_at( | ||
1953 | r#" | ||
1954 | //- /main.rs | ||
1955 | trait Iterator { | ||
1956 | type Item; | ||
1957 | } | ||
1958 | trait OtherTrait<T> { | ||
1959 | fn foo(&self) -> T; | ||
1960 | } | ||
1961 | |||
1962 | // workaround for Chalk assoc type normalization problems | ||
1963 | pub struct S<T>; | ||
1964 | impl<T: Iterator> Iterator for S<T> { | ||
1965 | type Item = <T as Iterator>::Item; | ||
1966 | } | ||
1967 | |||
1968 | fn test<I: Iterator<Item: OtherTrait<u32>>>() { | ||
1969 | let x: <S<I> as Iterator>::Item; | ||
1970 | x.foo()<|>; | ||
1971 | } | ||
1972 | "#, | ||
1973 | ); | ||
1974 | assert_eq!(t, "u32"); | ||
1975 | } | ||
1976 | |||
1977 | #[test] | ||
1978 | fn inline_assoc_type_bounds_2() { | ||
1979 | let t = type_at( | ||
1980 | r#" | ||
1981 | //- /main.rs | ||
1982 | trait Iterator { | ||
1983 | type Item; | ||
1984 | } | ||
1985 | |||
1986 | fn test<I: Iterator<Item: Iterator<Item = u32>>>() { | ||
1987 | let x: <<I as Iterator>::Item as Iterator>::Item; | ||
1988 | x<|>; | ||
1989 | } | ||
1990 | "#, | ||
1991 | ); | ||
1992 | // assert_eq!(t, "u32"); | ||
1993 | // doesn't currently work, Chalk #234 | ||
1994 | assert_eq!(t, "{unknown}"); | ||
1995 | } | ||
1996 | |||
1997 | #[test] | ||
1951 | fn unify_impl_trait() { | 1998 | fn unify_impl_trait() { |
1952 | assert_snapshot!( | 1999 | assert_snapshot!( |
1953 | infer_with_mismatches(r#" | 2000 | infer_with_mismatches(r#" |
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs index e05fea843..f6994a1f6 100644 --- a/crates/ra_hir_ty/src/traits/chalk.rs +++ b/crates/ra_hir_ty/src/traits/chalk.rs | |||
@@ -32,6 +32,9 @@ impl chalk_ir::interner::Interner for Interner { | |||
32 | type InternedGoal = Arc<GoalData<Self>>; | 32 | type InternedGoal = Arc<GoalData<Self>>; |
33 | type InternedGoals = Vec<Goal<Self>>; | 33 | type InternedGoals = Vec<Goal<Self>>; |
34 | type InternedSubstitution = Vec<Parameter<Self>>; | 34 | type InternedSubstitution = Vec<Parameter<Self>>; |
35 | type InternedProgramClause = chalk_ir::ProgramClauseData<Self>; | ||
36 | type InternedProgramClauses = Vec<chalk_ir::ProgramClause<Self>>; | ||
37 | type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>; | ||
35 | type Identifier = TypeAliasId; | 38 | type Identifier = TypeAliasId; |
36 | type DefId = InternId; | 39 | type DefId = InternId; |
37 | 40 | ||
@@ -181,6 +184,48 @@ impl chalk_ir::interner::Interner for Interner { | |||
181 | ) -> &'a [Parameter<Self>] { | 184 | ) -> &'a [Parameter<Self>] { |
182 | substitution | 185 | substitution |
183 | } | 186 | } |
187 | |||
188 | fn intern_program_clause( | ||
189 | &self, | ||
190 | data: chalk_ir::ProgramClauseData<Self>, | ||
191 | ) -> chalk_ir::ProgramClauseData<Self> { | ||
192 | data | ||
193 | } | ||
194 | |||
195 | fn program_clause_data<'a>( | ||
196 | &self, | ||
197 | clause: &'a chalk_ir::ProgramClauseData<Self>, | ||
198 | ) -> &'a chalk_ir::ProgramClauseData<Self> { | ||
199 | clause | ||
200 | } | ||
201 | |||
202 | fn intern_program_clauses( | ||
203 | &self, | ||
204 | data: impl IntoIterator<Item = chalk_ir::ProgramClause<Self>>, | ||
205 | ) -> Vec<chalk_ir::ProgramClause<Self>> { | ||
206 | data.into_iter().collect() | ||
207 | } | ||
208 | |||
209 | fn program_clauses_data<'a>( | ||
210 | &self, | ||
211 | clauses: &'a Vec<chalk_ir::ProgramClause<Self>>, | ||
212 | ) -> &'a [chalk_ir::ProgramClause<Self>] { | ||
213 | clauses | ||
214 | } | ||
215 | |||
216 | fn intern_quantified_where_clauses( | ||
217 | &self, | ||
218 | data: impl IntoIterator<Item = chalk_ir::QuantifiedWhereClause<Self>>, | ||
219 | ) -> Self::InternedQuantifiedWhereClauses { | ||
220 | data.into_iter().collect() | ||
221 | } | ||
222 | |||
223 | fn quantified_where_clauses_data<'a>( | ||
224 | &self, | ||
225 | clauses: &'a Self::InternedQuantifiedWhereClauses, | ||
226 | ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] { | ||
227 | clauses | ||
228 | } | ||
184 | } | 229 | } |
185 | 230 | ||
186 | impl chalk_ir::interner::HasInterner for Interner { | 231 | impl chalk_ir::interner::HasInterner for Interner { |
@@ -238,12 +283,10 @@ impl ToChalk for Ty { | |||
238 | Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx).intern(&Interner), | 283 | Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx).intern(&Interner), |
239 | Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"), | 284 | Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"), |
240 | Ty::Dyn(predicates) => { | 285 | Ty::Dyn(predicates) => { |
241 | let where_clauses = predicates | 286 | let where_clauses = chalk_ir::QuantifiedWhereClauses::from( |
242 | .iter() | 287 | &Interner, |
243 | .filter(|p| !p.is_error()) | 288 | predicates.iter().filter(|p| !p.is_error()).cloned().map(|p| p.to_chalk(db)), |
244 | .cloned() | 289 | ); |
245 | .map(|p| p.to_chalk(db)) | ||
246 | .collect(); | ||
247 | let bounded_ty = chalk_ir::DynTy { bounds: make_binders(where_clauses, 1) }; | 290 | let bounded_ty = chalk_ir::DynTy { bounds: make_binders(where_clauses, 1) }; |
248 | chalk_ir::TyData::Dyn(bounded_ty).intern(&Interner) | 291 | chalk_ir::TyData::Dyn(bounded_ty).intern(&Interner) |
249 | } | 292 | } |
@@ -281,8 +324,12 @@ impl ToChalk for Ty { | |||
281 | chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, | 324 | chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, |
282 | chalk_ir::TyData::Dyn(where_clauses) => { | 325 | chalk_ir::TyData::Dyn(where_clauses) => { |
283 | assert_eq!(where_clauses.bounds.binders.len(), 1); | 326 | assert_eq!(where_clauses.bounds.binders.len(), 1); |
284 | let predicates = | 327 | let predicates = where_clauses |
285 | where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect(); | 328 | .bounds |
329 | .skip_binders() | ||
330 | .iter(&Interner) | ||
331 | .map(|c| from_chalk(db, c.clone())) | ||
332 | .collect(); | ||
286 | Ty::Dyn(predicates) | 333 | Ty::Dyn(predicates) |
287 | } | 334 | } |
288 | } | 335 | } |
@@ -426,7 +473,7 @@ impl ToChalk for GenericPredicate { | |||
426 | ) -> GenericPredicate { | 473 | ) -> GenericPredicate { |
427 | // we don't produce any where clauses with binders and can't currently deal with them | 474 | // we don't produce any where clauses with binders and can't currently deal with them |
428 | match where_clause | 475 | match where_clause |
429 | .value | 476 | .skip_binders() |
430 | .shifted_out(&Interner) | 477 | .shifted_out(&Interner) |
431 | .expect("unexpected bound vars in where clause") | 478 | .expect("unexpected bound vars in where clause") |
432 | { | 479 | { |
@@ -521,7 +568,7 @@ impl ToChalk for Arc<super::TraitEnvironment> { | |||
521 | pred.clone().to_chalk(db).cast(&Interner); | 568 | pred.clone().to_chalk(db).cast(&Interner); |
522 | clauses.push(program_clause.into_from_env_clause(&Interner)); | 569 | clauses.push(program_clause.into_from_env_clause(&Interner)); |
523 | } | 570 | } |
524 | chalk_ir::Environment::new().add_clauses(clauses) | 571 | chalk_ir::Environment::new(&Interner).add_clauses(&Interner, clauses) |
525 | } | 572 | } |
526 | 573 | ||
527 | fn from_chalk( | 574 | fn from_chalk( |
@@ -603,10 +650,10 @@ impl ToChalk for builtin::BuiltinImplAssocTyValueData { | |||
603 | } | 650 | } |
604 | 651 | ||
605 | fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { | 652 | fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { |
606 | chalk_ir::Binders { | 653 | chalk_ir::Binders::new( |
654 | std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars).collect(), | ||
607 | value, | 655 | value, |
608 | binders: std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars).collect(), | 656 | ) |
609 | } | ||
610 | } | 657 | } |
611 | 658 | ||
612 | fn convert_where_clauses( | 659 | fn convert_where_clauses( |
@@ -696,6 +743,12 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { | |||
696 | fn interner(&self) -> &Interner { | 743 | fn interner(&self) -> &Interner { |
697 | &Interner | 744 | &Interner |
698 | } | 745 | } |
746 | fn well_known_trait_id( | ||
747 | &self, | ||
748 | _well_known_trait: chalk_rust_ir::WellKnownTrait, | ||
749 | ) -> chalk_ir::TraitId<Interner> { | ||
750 | unimplemented!() | ||
751 | } | ||
699 | } | 752 | } |
700 | 753 | ||
701 | pub(crate) fn associated_ty_data_query( | 754 | pub(crate) fn associated_ty_data_query( |
diff --git a/crates/ra_hir_ty/src/traits/chalk/tls.rs b/crates/ra_hir_ty/src/traits/chalk/tls.rs index d9bbb54a5..fa8e4d1ad 100644 --- a/crates/ra_hir_ty/src/traits/chalk/tls.rs +++ b/crates/ra_hir_ty/src/traits/chalk/tls.rs | |||
@@ -2,10 +2,11 @@ | |||
2 | use std::fmt; | 2 | use std::fmt; |
3 | 3 | ||
4 | use chalk_ir::{AliasTy, Goal, Goals, Lifetime, Parameter, ProgramClauseImplication, TypeName}; | 4 | use chalk_ir::{AliasTy, Goal, Goals, Lifetime, Parameter, ProgramClauseImplication, TypeName}; |
5 | use itertools::Itertools; | ||
5 | 6 | ||
6 | use super::{from_chalk, Interner}; | 7 | use super::{from_chalk, Interner}; |
7 | use crate::{db::HirDatabase, CallableDef, TypeCtor}; | 8 | use crate::{db::HirDatabase, CallableDef, TypeCtor}; |
8 | use hir_def::{AdtId, AssocContainerId, Lookup, TypeAliasId}; | 9 | use hir_def::{AdtId, AssocContainerId, DefWithBodyId, Lookup, TypeAliasId}; |
9 | 10 | ||
10 | pub use unsafe_tls::{set_current_program, with_current_program}; | 11 | pub use unsafe_tls::{set_current_program, with_current_program}; |
11 | 12 | ||
@@ -69,7 +70,27 @@ impl DebugContext<'_> { | |||
69 | write!(f, "{}::{}", trait_name, name)?; | 70 | write!(f, "{}::{}", trait_name, name)?; |
70 | } | 71 | } |
71 | TypeCtor::Closure { def, expr } => { | 72 | TypeCtor::Closure { def, expr } => { |
72 | write!(f, "{{closure {:?} in {:?}}}", expr.into_raw(), def)?; | 73 | write!(f, "{{closure {:?} in ", expr.into_raw())?; |
74 | match def { | ||
75 | DefWithBodyId::FunctionId(func) => { | ||
76 | write!(f, "fn {}", self.0.function_data(func).name)? | ||
77 | } | ||
78 | DefWithBodyId::StaticId(s) => { | ||
79 | if let Some(name) = self.0.static_data(s).name.as_ref() { | ||
80 | write!(f, "body of static {}", name)?; | ||
81 | } else { | ||
82 | write!(f, "body of unnamed static {:?}", s)?; | ||
83 | } | ||
84 | } | ||
85 | DefWithBodyId::ConstId(c) => { | ||
86 | if let Some(name) = self.0.const_data(c).name.as_ref() { | ||
87 | write!(f, "body of const {}", name)?; | ||
88 | } else { | ||
89 | write!(f, "body of unnamed const {:?}", c)?; | ||
90 | } | ||
91 | } | ||
92 | }; | ||
93 | write!(f, "}}")?; | ||
73 | } | 94 | } |
74 | } | 95 | } |
75 | Ok(()) | 96 | Ok(()) |
@@ -113,14 +134,15 @@ impl DebugContext<'_> { | |||
113 | }; | 134 | }; |
114 | let trait_data = self.0.trait_data(trait_); | 135 | let trait_data = self.0.trait_data(trait_); |
115 | let params = alias.substitution.parameters(&Interner); | 136 | let params = alias.substitution.parameters(&Interner); |
116 | write!( | 137 | write!(fmt, "<{:?} as {}", ¶ms[0], trait_data.name,)?; |
117 | fmt, | 138 | if params.len() > 1 { |
118 | "<{:?} as {}<{:?}>>::{}", | 139 | write!( |
119 | ¶ms[0], | 140 | fmt, |
120 | trait_data.name, | 141 | "<{}>", |
121 | ¶ms[1..], | 142 | ¶ms[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))), |
122 | type_alias_data.name | 143 | )?; |
123 | ) | 144 | } |
145 | write!(fmt, ">::{}", type_alias_data.name) | ||
124 | } | 146 | } |
125 | 147 | ||
126 | pub fn debug_ty( | 148 | pub fn debug_ty( |
diff --git a/crates/ra_ide/Cargo.toml b/crates/ra_ide/Cargo.toml index b4a29b81b..05c940605 100644 --- a/crates/ra_ide/Cargo.toml +++ b/crates/ra_ide/Cargo.toml | |||
@@ -35,4 +35,4 @@ ra_assists = { path = "../ra_assists" } | |||
35 | hir = { path = "../ra_hir", package = "ra_hir" } | 35 | hir = { path = "../ra_hir", package = "ra_hir" } |
36 | 36 | ||
37 | [dev-dependencies] | 37 | [dev-dependencies] |
38 | insta = "0.15.0" | 38 | insta = "0.16.0" |
diff --git a/crates/rust-analyzer/src/bin/args.rs b/crates/rust-analyzer/src/bin/args.rs index 3cf394bb4..f5981588a 100644 --- a/crates/rust-analyzer/src/bin/args.rs +++ b/crates/rust-analyzer/src/bin/args.rs | |||
@@ -35,6 +35,13 @@ pub(crate) enum Command { | |||
35 | what: BenchWhat, | 35 | what: BenchWhat, |
36 | load_output_dirs: bool, | 36 | load_output_dirs: bool, |
37 | }, | 37 | }, |
38 | Diagnostics { | ||
39 | path: PathBuf, | ||
40 | load_output_dirs: bool, | ||
41 | /// Include files which are not modules. In rust-analyzer | ||
42 | /// this would include the parser test files. | ||
43 | all: bool, | ||
44 | }, | ||
38 | RunServer, | 45 | RunServer, |
39 | Version, | 46 | Version, |
40 | } | 47 | } |
@@ -209,6 +216,38 @@ ARGS: | |||
209 | let load_output_dirs = matches.contains("--load-output-dirs"); | 216 | let load_output_dirs = matches.contains("--load-output-dirs"); |
210 | Command::Bench { path, what, load_output_dirs } | 217 | Command::Bench { path, what, load_output_dirs } |
211 | } | 218 | } |
219 | "diagnostics" => { | ||
220 | if matches.contains(["-h", "--help"]) { | ||
221 | eprintln!( | ||
222 | "\ | ||
223 | ra-cli-diagnostics | ||
224 | |||
225 | USAGE: | ||
226 | rust-analyzer diagnostics [FLAGS] [PATH] | ||
227 | |||
228 | FLAGS: | ||
229 | -h, --help Prints help information | ||
230 | --load-output-dirs Load OUT_DIR values by running `cargo check` before analysis | ||
231 | --all Include all files rather than only modules | ||
232 | |||
233 | ARGS: | ||
234 | <PATH>" | ||
235 | ); | ||
236 | return Ok(Err(HelpPrinted)); | ||
237 | } | ||
238 | |||
239 | let load_output_dirs = matches.contains("--load-output-dirs"); | ||
240 | let all = matches.contains("--all"); | ||
241 | let path = { | ||
242 | let mut trailing = matches.free()?; | ||
243 | if trailing.len() != 1 { | ||
244 | bail!("Invalid flags"); | ||
245 | } | ||
246 | trailing.pop().unwrap().into() | ||
247 | }; | ||
248 | |||
249 | Command::Diagnostics { path, load_output_dirs, all } | ||
250 | } | ||
212 | _ => { | 251 | _ => { |
213 | eprintln!( | 252 | eprintln!( |
214 | "\ | 253 | "\ |
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 608f4f67b..7cfc44f01 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs | |||
@@ -39,6 +39,10 @@ fn main() -> Result<()> { | |||
39 | cli::analysis_bench(args.verbosity, path.as_ref(), what, load_output_dirs)? | 39 | cli::analysis_bench(args.verbosity, path.as_ref(), what, load_output_dirs)? |
40 | } | 40 | } |
41 | 41 | ||
42 | args::Command::Diagnostics { path, load_output_dirs, all } => { | ||
43 | cli::diagnostics(path.as_ref(), load_output_dirs, all)? | ||
44 | } | ||
45 | |||
42 | args::Command::RunServer => run_server()?, | 46 | args::Command::RunServer => run_server()?, |
43 | args::Command::Version => println!("rust-analyzer {}", env!("REV")), | 47 | args::Command::Version => println!("rust-analyzer {}", env!("REV")), |
44 | } | 48 | } |
diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs index c9738d101..a865a7c7e 100644 --- a/crates/rust-analyzer/src/cli.rs +++ b/crates/rust-analyzer/src/cli.rs | |||
@@ -3,6 +3,7 @@ | |||
3 | mod load_cargo; | 3 | mod load_cargo; |
4 | mod analysis_stats; | 4 | mod analysis_stats; |
5 | mod analysis_bench; | 5 | mod analysis_bench; |
6 | mod diagnostics; | ||
6 | mod progress_report; | 7 | mod progress_report; |
7 | 8 | ||
8 | use std::io::Read; | 9 | use std::io::Read; |
@@ -12,6 +13,10 @@ use ra_ide::{file_structure, Analysis}; | |||
12 | use ra_prof::profile; | 13 | use ra_prof::profile; |
13 | use ra_syntax::{AstNode, SourceFile}; | 14 | use ra_syntax::{AstNode, SourceFile}; |
14 | 15 | ||
16 | pub use analysis_bench::{analysis_bench, BenchWhat, Position}; | ||
17 | pub use analysis_stats::analysis_stats; | ||
18 | pub use diagnostics::diagnostics; | ||
19 | |||
15 | #[derive(Clone, Copy)] | 20 | #[derive(Clone, Copy)] |
16 | pub enum Verbosity { | 21 | pub enum Verbosity { |
17 | Spammy, | 22 | Spammy, |
@@ -60,9 +65,6 @@ pub fn highlight(rainbow: bool) -> Result<()> { | |||
60 | Ok(()) | 65 | Ok(()) |
61 | } | 66 | } |
62 | 67 | ||
63 | pub use analysis_bench::{analysis_bench, BenchWhat, Position}; | ||
64 | pub use analysis_stats::analysis_stats; | ||
65 | |||
66 | fn file() -> Result<SourceFile> { | 68 | fn file() -> Result<SourceFile> { |
67 | let text = read_stdin()?; | 69 | let text = read_stdin()?; |
68 | Ok(SourceFile::parse(&text).tree()) | 70 | Ok(SourceFile::parse(&text).tree()) |
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs new file mode 100644 index 000000000..92664b415 --- /dev/null +++ b/crates/rust-analyzer/src/cli/diagnostics.rs | |||
@@ -0,0 +1,74 @@ | |||
1 | //! Analyze all modules in a project for diagnostics. Exits with a non-zero status | ||
2 | //! code if any errors are found. | ||
3 | |||
4 | use anyhow::anyhow; | ||
5 | use ra_db::SourceDatabaseExt; | ||
6 | use ra_ide::Severity; | ||
7 | use std::{collections::HashSet, path::Path}; | ||
8 | |||
9 | use crate::cli::{load_cargo::load_cargo, Result}; | ||
10 | use hir::Semantics; | ||
11 | |||
12 | pub fn diagnostics(path: &Path, load_output_dirs: bool, all: bool) -> Result<()> { | ||
13 | let (host, roots) = load_cargo(path, load_output_dirs)?; | ||
14 | let db = host.raw_database(); | ||
15 | let analysis = host.analysis(); | ||
16 | let semantics = Semantics::new(db); | ||
17 | let members = roots | ||
18 | .into_iter() | ||
19 | .filter_map(|(source_root_id, project_root)| { | ||
20 | // filter out dependencies | ||
21 | if project_root.is_member() { | ||
22 | Some(source_root_id) | ||
23 | } else { | ||
24 | None | ||
25 | } | ||
26 | }) | ||
27 | .collect::<HashSet<_>>(); | ||
28 | |||
29 | let mut found_error = false; | ||
30 | let mut visited_files = HashSet::new(); | ||
31 | for source_root_id in members { | ||
32 | for file_id in db.source_root(source_root_id).walk() { | ||
33 | // Filter out files which are not actually modules (unless `--all` flag is | ||
34 | // passed). In the rust-analyzer repository this filters out the parser test files. | ||
35 | if semantics.to_module_def(file_id).is_some() || all { | ||
36 | if !visited_files.contains(&file_id) { | ||
37 | let crate_name = if let Some(module) = semantics.to_module_def(file_id) { | ||
38 | if let Some(name) = module.krate().display_name(db) { | ||
39 | format!("{}", name) | ||
40 | } else { | ||
41 | String::from("unknown") | ||
42 | } | ||
43 | } else { | ||
44 | String::from("unknown") | ||
45 | }; | ||
46 | println!( | ||
47 | "processing crate: {}, module: {}", | ||
48 | crate_name, | ||
49 | db.file_relative_path(file_id) | ||
50 | ); | ||
51 | for diagnostic in analysis.diagnostics(file_id).unwrap() { | ||
52 | if matches!(diagnostic.severity, Severity::Error) { | ||
53 | found_error = true; | ||
54 | } | ||
55 | |||
56 | println!("{:?}", diagnostic); | ||
57 | } | ||
58 | |||
59 | visited_files.insert(file_id); | ||
60 | } | ||
61 | } | ||
62 | } | ||
63 | } | ||
64 | |||
65 | println!(); | ||
66 | println!("diagnostic scan complete"); | ||
67 | |||
68 | if found_error { | ||
69 | println!(); | ||
70 | Err(anyhow!("diagnostic error detected")) | ||
71 | } else { | ||
72 | Ok(()) | ||
73 | } | ||
74 | } | ||