diff options
51 files changed, 1126 insertions, 364 deletions
diff --git a/Cargo.lock b/Cargo.lock index 975c1aef8..89a734c9b 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -114,17 +114,18 @@ checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" | |||
114 | [[package]] | 114 | [[package]] |
115 | name = "chalk-derive" | 115 | name = "chalk-derive" |
116 | version = "0.1.0" | 116 | version = "0.1.0" |
117 | source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356" | 117 | source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" |
118 | dependencies = [ | 118 | dependencies = [ |
119 | "proc-macro2", | 119 | "proc-macro2", |
120 | "quote", | 120 | "quote", |
121 | "syn", | 121 | "syn", |
122 | "synstructure", | ||
122 | ] | 123 | ] |
123 | 124 | ||
124 | [[package]] | 125 | [[package]] |
125 | name = "chalk-engine" | 126 | name = "chalk-engine" |
126 | version = "0.9.0" | 127 | version = "0.9.0" |
127 | source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356" | 128 | source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" |
128 | dependencies = [ | 129 | dependencies = [ |
129 | "chalk-macros", | 130 | "chalk-macros", |
130 | "rustc-hash", | 131 | "rustc-hash", |
@@ -133,7 +134,7 @@ dependencies = [ | |||
133 | [[package]] | 134 | [[package]] |
134 | name = "chalk-ir" | 135 | name = "chalk-ir" |
135 | version = "0.1.0" | 136 | version = "0.1.0" |
136 | source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356" | 137 | source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" |
137 | dependencies = [ | 138 | dependencies = [ |
138 | "chalk-derive", | 139 | "chalk-derive", |
139 | "chalk-engine", | 140 | "chalk-engine", |
@@ -143,7 +144,7 @@ dependencies = [ | |||
143 | [[package]] | 144 | [[package]] |
144 | name = "chalk-macros" | 145 | name = "chalk-macros" |
145 | version = "0.1.1" | 146 | version = "0.1.1" |
146 | source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356" | 147 | source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" |
147 | dependencies = [ | 148 | dependencies = [ |
148 | "lazy_static", | 149 | "lazy_static", |
149 | ] | 150 | ] |
@@ -151,7 +152,7 @@ dependencies = [ | |||
151 | [[package]] | 152 | [[package]] |
152 | name = "chalk-rust-ir" | 153 | name = "chalk-rust-ir" |
153 | version = "0.1.0" | 154 | version = "0.1.0" |
154 | source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356" | 155 | source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" |
155 | dependencies = [ | 156 | dependencies = [ |
156 | "chalk-derive", | 157 | "chalk-derive", |
157 | "chalk-engine", | 158 | "chalk-engine", |
@@ -162,7 +163,7 @@ dependencies = [ | |||
162 | [[package]] | 163 | [[package]] |
163 | name = "chalk-solve" | 164 | name = "chalk-solve" |
164 | version = "0.1.0" | 165 | version = "0.1.0" |
165 | source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356" | 166 | source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" |
166 | dependencies = [ | 167 | dependencies = [ |
167 | "chalk-derive", | 168 | "chalk-derive", |
168 | "chalk-engine", | 169 | "chalk-engine", |
@@ -446,9 +447,9 @@ dependencies = [ | |||
446 | 447 | ||
447 | [[package]] | 448 | [[package]] |
448 | name = "hermit-abi" | 449 | name = "hermit-abi" |
449 | version = "0.1.10" | 450 | version = "0.1.11" |
450 | source = "registry+https://github.com/rust-lang/crates.io-index" | 451 | source = "registry+https://github.com/rust-lang/crates.io-index" |
451 | checksum = "725cf19794cf90aa94e65050cb4191ff5d8fa87a498383774c47b332e3af952e" | 452 | checksum = "8a0d737e0f947a1864e93d33fdef4af8445a00d1ed8dc0c8ddb73139ea6abf15" |
452 | dependencies = [ | 453 | dependencies = [ |
453 | "libc", | 454 | "libc", |
454 | ] | 455 | ] |
@@ -495,9 +496,9 @@ dependencies = [ | |||
495 | 496 | ||
496 | [[package]] | 497 | [[package]] |
497 | name = "insta" | 498 | name = "insta" |
498 | version = "0.15.0" | 499 | version = "0.16.0" |
499 | source = "registry+https://github.com/rust-lang/crates.io-index" | 500 | source = "registry+https://github.com/rust-lang/crates.io-index" |
500 | checksum = "8de3f029212a3fe78a6090f1f2b993877ca245a9ded863f3fcbd6eae084fc1ed" | 501 | checksum = "8386e795fb3927131ea4cede203c529a333652eb6dc4ff29616b832b27e9b096" |
501 | dependencies = [ | 502 | dependencies = [ |
502 | "console", | 503 | "console", |
503 | "difference", | 504 | "difference", |
@@ -593,15 +594,15 @@ checksum = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" | |||
593 | 594 | ||
594 | [[package]] | 595 | [[package]] |
595 | name = "libc" | 596 | name = "libc" |
596 | version = "0.2.68" | 597 | version = "0.2.69" |
597 | source = "registry+https://github.com/rust-lang/crates.io-index" | 598 | source = "registry+https://github.com/rust-lang/crates.io-index" |
598 | checksum = "dea0c0405123bba743ee3f91f49b1c7cfb684eef0da0a50110f758ccf24cdff0" | 599 | checksum = "99e85c08494b21a9054e7fe1374a732aeadaff3980b6990b94bfd3a70f690005" |
599 | 600 | ||
600 | [[package]] | 601 | [[package]] |
601 | name = "libloading" | 602 | name = "libloading" |
602 | version = "0.6.0" | 603 | version = "0.6.1" |
603 | source = "registry+https://github.com/rust-lang/crates.io-index" | 604 | source = "registry+https://github.com/rust-lang/crates.io-index" |
604 | checksum = "2c979a19ffb457f0273965c333053f3d586bf759bf7b683fbebc37f9a9ebedc4" | 605 | checksum = "3c4f51b790f5bdb65acb4cc94bb81d7b2ee60348a5431ac1467d390b017600b0" |
605 | dependencies = [ | 606 | dependencies = [ |
606 | "winapi 0.3.8", | 607 | "winapi 0.3.8", |
607 | ] | 608 | ] |
@@ -757,9 +758,9 @@ dependencies = [ | |||
757 | 758 | ||
758 | [[package]] | 759 | [[package]] |
759 | name = "num_cpus" | 760 | name = "num_cpus" |
760 | version = "1.12.0" | 761 | version = "1.13.0" |
761 | source = "registry+https://github.com/rust-lang/crates.io-index" | 762 | source = "registry+https://github.com/rust-lang/crates.io-index" |
762 | checksum = "46203554f085ff89c235cd12f7075f3233af9b11ed7c9e16dfe2560d03313ce6" | 763 | checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" |
763 | dependencies = [ | 764 | dependencies = [ |
764 | "hermit-abi", | 765 | "hermit-abi", |
765 | "libc", | 766 | "libc", |
@@ -779,9 +780,9 @@ checksum = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" | |||
779 | 780 | ||
780 | [[package]] | 781 | [[package]] |
781 | name = "parking_lot" | 782 | name = "parking_lot" |
782 | version = "0.10.1" | 783 | version = "0.10.2" |
783 | source = "registry+https://github.com/rust-lang/crates.io-index" | 784 | source = "registry+https://github.com/rust-lang/crates.io-index" |
784 | checksum = "6fdfcb5f20930a79e326f7ec992a9fdb5b7bd809254b1e735bdd5a99f78bee0d" | 785 | checksum = "d3a704eb390aafdc107b0e392f56a82b668e3a71366993b5340f5833fd62505e" |
785 | dependencies = [ | 786 | dependencies = [ |
786 | "lock_api", | 787 | "lock_api", |
787 | "parking_lot_core", | 788 | "parking_lot_core", |
@@ -1013,6 +1014,7 @@ dependencies = [ | |||
1013 | "chalk-solve", | 1014 | "chalk-solve", |
1014 | "ena", | 1015 | "ena", |
1015 | "insta", | 1016 | "insta", |
1017 | "itertools", | ||
1016 | "log", | 1018 | "log", |
1017 | "ra_arena", | 1019 | "ra_arena", |
1018 | "ra_db", | 1020 | "ra_db", |
@@ -1339,6 +1341,7 @@ dependencies = [ | |||
1339 | "ra_hir_def", | 1341 | "ra_hir_def", |
1340 | "ra_hir_ty", | 1342 | "ra_hir_ty", |
1341 | "ra_ide", | 1343 | "ra_ide", |
1344 | "ra_proc_macro_srv", | ||
1342 | "ra_prof", | 1345 | "ra_prof", |
1343 | "ra_project_model", | 1346 | "ra_project_model", |
1344 | "ra_syntax", | 1347 | "ra_syntax", |
@@ -1565,6 +1568,18 @@ dependencies = [ | |||
1565 | ] | 1568 | ] |
1566 | 1569 | ||
1567 | [[package]] | 1570 | [[package]] |
1571 | name = "synstructure" | ||
1572 | version = "0.12.3" | ||
1573 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
1574 | checksum = "67656ea1dc1b41b1451851562ea232ec2e5a80242139f7e679ceccfb5d61f545" | ||
1575 | dependencies = [ | ||
1576 | "proc-macro2", | ||
1577 | "quote", | ||
1578 | "syn", | ||
1579 | "unicode-xid", | ||
1580 | ] | ||
1581 | |||
1582 | [[package]] | ||
1568 | name = "tempfile" | 1583 | name = "tempfile" |
1569 | version = "3.1.0" | 1584 | version = "3.1.0" |
1570 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1585 | source = "registry+https://github.com/rust-lang/crates.io-index" |
diff --git a/crates/ra_assists/src/doc_tests/generated.rs b/crates/ra_assists/src/doc_tests/generated.rs index b63b4d81a..b39e60870 100644 --- a/crates/ra_assists/src/doc_tests/generated.rs +++ b/crates/ra_assists/src/doc_tests/generated.rs | |||
@@ -78,7 +78,7 @@ fn foo() { | |||
78 | } | 78 | } |
79 | 79 | ||
80 | fn bar(arg: &str, baz: Baz) { | 80 | fn bar(arg: &str, baz: Baz) { |
81 | unimplemented!() | 81 | todo!() |
82 | } | 82 | } |
83 | 83 | ||
84 | "#####, | 84 | "#####, |
diff --git a/crates/ra_assists/src/handlers/add_function.rs b/crates/ra_assists/src/handlers/add_function.rs index 488bae08f..ad4ab66ed 100644 --- a/crates/ra_assists/src/handlers/add_function.rs +++ b/crates/ra_assists/src/handlers/add_function.rs | |||
@@ -29,7 +29,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; | |||
29 | // } | 29 | // } |
30 | // | 30 | // |
31 | // fn bar(arg: &str, baz: Baz) { | 31 | // fn bar(arg: &str, baz: Baz) { |
32 | // unimplemented!() | 32 | // todo!() |
33 | // } | 33 | // } |
34 | // | 34 | // |
35 | // ``` | 35 | // ``` |
@@ -80,7 +80,7 @@ impl FunctionBuilder { | |||
80 | Some(Self { append_fn_at, fn_name, type_params, params }) | 80 | Some(Self { append_fn_at, fn_name, type_params, params }) |
81 | } | 81 | } |
82 | fn render(self) -> Option<FunctionTemplate> { | 82 | fn render(self) -> Option<FunctionTemplate> { |
83 | let placeholder_expr = ast::make::expr_unimplemented(); | 83 | let placeholder_expr = ast::make::expr_todo(); |
84 | let fn_body = ast::make::block_expr(vec![], Some(placeholder_expr)); | 84 | let fn_body = ast::make::block_expr(vec![], Some(placeholder_expr)); |
85 | let fn_def = ast::make::fn_def(self.fn_name, self.type_params, self.params, fn_body); | 85 | let fn_def = ast::make::fn_def(self.fn_name, self.type_params, self.params, fn_body); |
86 | let fn_def = ast::make::add_newlines(2, fn_def); | 86 | let fn_def = ast::make::add_newlines(2, fn_def); |
@@ -225,7 +225,7 @@ fn foo() { | |||
225 | } | 225 | } |
226 | 226 | ||
227 | fn bar() { | 227 | fn bar() { |
228 | <|>unimplemented!() | 228 | <|>todo!() |
229 | } | 229 | } |
230 | ", | 230 | ", |
231 | ) | 231 | ) |
@@ -252,7 +252,7 @@ impl Foo { | |||
252 | } | 252 | } |
253 | 253 | ||
254 | fn bar() { | 254 | fn bar() { |
255 | <|>unimplemented!() | 255 | <|>todo!() |
256 | } | 256 | } |
257 | ", | 257 | ", |
258 | ) | 258 | ) |
@@ -276,7 +276,7 @@ fn foo1() { | |||
276 | } | 276 | } |
277 | 277 | ||
278 | fn bar() { | 278 | fn bar() { |
279 | <|>unimplemented!() | 279 | <|>todo!() |
280 | } | 280 | } |
281 | 281 | ||
282 | fn foo2() {} | 282 | fn foo2() {} |
@@ -302,7 +302,7 @@ mod baz { | |||
302 | } | 302 | } |
303 | 303 | ||
304 | fn bar() { | 304 | fn bar() { |
305 | <|>unimplemented!() | 305 | <|>todo!() |
306 | } | 306 | } |
307 | } | 307 | } |
308 | ", | 308 | ", |
@@ -315,20 +315,20 @@ mod baz { | |||
315 | add_function, | 315 | add_function, |
316 | r" | 316 | r" |
317 | struct Baz; | 317 | struct Baz; |
318 | fn baz() -> Baz { unimplemented!() } | 318 | fn baz() -> Baz { todo!() } |
319 | fn foo() { | 319 | fn foo() { |
320 | bar<|>(baz()); | 320 | bar<|>(baz()); |
321 | } | 321 | } |
322 | ", | 322 | ", |
323 | r" | 323 | r" |
324 | struct Baz; | 324 | struct Baz; |
325 | fn baz() -> Baz { unimplemented!() } | 325 | fn baz() -> Baz { todo!() } |
326 | fn foo() { | 326 | fn foo() { |
327 | bar(baz()); | 327 | bar(baz()); |
328 | } | 328 | } |
329 | 329 | ||
330 | fn bar(baz: Baz) { | 330 | fn bar(baz: Baz) { |
331 | <|>unimplemented!() | 331 | <|>todo!() |
332 | } | 332 | } |
333 | ", | 333 | ", |
334 | ); | 334 | ); |
@@ -361,7 +361,7 @@ impl Baz { | |||
361 | } | 361 | } |
362 | 362 | ||
363 | fn bar(baz: Baz) { | 363 | fn bar(baz: Baz) { |
364 | <|>unimplemented!() | 364 | <|>todo!() |
365 | } | 365 | } |
366 | ", | 366 | ", |
367 | ) | 367 | ) |
@@ -382,7 +382,7 @@ fn foo() { | |||
382 | } | 382 | } |
383 | 383 | ||
384 | fn bar(arg: &str) { | 384 | fn bar(arg: &str) { |
385 | <|>unimplemented!() | 385 | <|>todo!() |
386 | } | 386 | } |
387 | "#, | 387 | "#, |
388 | ) | 388 | ) |
@@ -403,7 +403,7 @@ fn foo() { | |||
403 | } | 403 | } |
404 | 404 | ||
405 | fn bar(arg: char) { | 405 | fn bar(arg: char) { |
406 | <|>unimplemented!() | 406 | <|>todo!() |
407 | } | 407 | } |
408 | "#, | 408 | "#, |
409 | ) | 409 | ) |
@@ -424,7 +424,7 @@ fn foo() { | |||
424 | } | 424 | } |
425 | 425 | ||
426 | fn bar(arg: i32) { | 426 | fn bar(arg: i32) { |
427 | <|>unimplemented!() | 427 | <|>todo!() |
428 | } | 428 | } |
429 | ", | 429 | ", |
430 | ) | 430 | ) |
@@ -445,7 +445,7 @@ fn foo() { | |||
445 | } | 445 | } |
446 | 446 | ||
447 | fn bar(arg: u8) { | 447 | fn bar(arg: u8) { |
448 | <|>unimplemented!() | 448 | <|>todo!() |
449 | } | 449 | } |
450 | ", | 450 | ", |
451 | ) | 451 | ) |
@@ -470,7 +470,7 @@ fn foo() { | |||
470 | } | 470 | } |
471 | 471 | ||
472 | fn bar(x: u8) { | 472 | fn bar(x: u8) { |
473 | <|>unimplemented!() | 473 | <|>todo!() |
474 | } | 474 | } |
475 | ", | 475 | ", |
476 | ) | 476 | ) |
@@ -493,7 +493,7 @@ fn foo() { | |||
493 | } | 493 | } |
494 | 494 | ||
495 | fn bar(worble: ()) { | 495 | fn bar(worble: ()) { |
496 | <|>unimplemented!() | 496 | <|>todo!() |
497 | } | 497 | } |
498 | ", | 498 | ", |
499 | ) | 499 | ) |
@@ -506,7 +506,7 @@ fn bar(worble: ()) { | |||
506 | r" | 506 | r" |
507 | trait Foo {} | 507 | trait Foo {} |
508 | fn foo() -> impl Foo { | 508 | fn foo() -> impl Foo { |
509 | unimplemented!() | 509 | todo!() |
510 | } | 510 | } |
511 | fn baz() { | 511 | fn baz() { |
512 | <|>bar(foo()) | 512 | <|>bar(foo()) |
@@ -515,14 +515,14 @@ fn baz() { | |||
515 | r" | 515 | r" |
516 | trait Foo {} | 516 | trait Foo {} |
517 | fn foo() -> impl Foo { | 517 | fn foo() -> impl Foo { |
518 | unimplemented!() | 518 | todo!() |
519 | } | 519 | } |
520 | fn baz() { | 520 | fn baz() { |
521 | bar(foo()) | 521 | bar(foo()) |
522 | } | 522 | } |
523 | 523 | ||
524 | fn bar(foo: impl Foo) { | 524 | fn bar(foo: impl Foo) { |
525 | <|>unimplemented!() | 525 | <|>todo!() |
526 | } | 526 | } |
527 | ", | 527 | ", |
528 | ) | 528 | ) |
@@ -556,7 +556,7 @@ mod Foo { | |||
556 | } | 556 | } |
557 | 557 | ||
558 | fn bar(baz: super::Baz::Bof) { | 558 | fn bar(baz: super::Baz::Bof) { |
559 | <|>unimplemented!() | 559 | <|>todo!() |
560 | } | 560 | } |
561 | } | 561 | } |
562 | ", | 562 | ", |
@@ -580,7 +580,7 @@ fn foo<T>(t: T) { | |||
580 | } | 580 | } |
581 | 581 | ||
582 | fn bar<T>(t: T) { | 582 | fn bar<T>(t: T) { |
583 | <|>unimplemented!() | 583 | <|>todo!() |
584 | } | 584 | } |
585 | ", | 585 | ", |
586 | ) | 586 | ) |
@@ -611,7 +611,7 @@ fn foo() { | |||
611 | } | 611 | } |
612 | 612 | ||
613 | fn bar(arg: fn() -> Baz) { | 613 | fn bar(arg: fn() -> Baz) { |
614 | <|>unimplemented!() | 614 | <|>todo!() |
615 | } | 615 | } |
616 | ", | 616 | ", |
617 | ) | 617 | ) |
@@ -636,7 +636,7 @@ fn foo() { | |||
636 | } | 636 | } |
637 | 637 | ||
638 | fn bar(closure: impl Fn(i64) -> i64) { | 638 | fn bar(closure: impl Fn(i64) -> i64) { |
639 | <|>unimplemented!() | 639 | <|>todo!() |
640 | } | 640 | } |
641 | ", | 641 | ", |
642 | ) | 642 | ) |
@@ -657,7 +657,7 @@ fn foo() { | |||
657 | } | 657 | } |
658 | 658 | ||
659 | fn bar(baz: ()) { | 659 | fn bar(baz: ()) { |
660 | <|>unimplemented!() | 660 | <|>todo!() |
661 | } | 661 | } |
662 | ", | 662 | ", |
663 | ) | 663 | ) |
@@ -682,7 +682,7 @@ fn foo() { | |||
682 | } | 682 | } |
683 | 683 | ||
684 | fn bar(baz_1: Baz, baz_2: Baz) { | 684 | fn bar(baz_1: Baz, baz_2: Baz) { |
685 | <|>unimplemented!() | 685 | <|>todo!() |
686 | } | 686 | } |
687 | ", | 687 | ", |
688 | ) | 688 | ) |
@@ -707,7 +707,7 @@ fn foo() { | |||
707 | } | 707 | } |
708 | 708 | ||
709 | fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) { | 709 | fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) { |
710 | <|>unimplemented!() | 710 | <|>todo!() |
711 | } | 711 | } |
712 | "#, | 712 | "#, |
713 | ) | 713 | ) |
@@ -779,7 +779,7 @@ impl Foo { | |||
779 | self.bar(); | 779 | self.bar(); |
780 | } | 780 | } |
781 | fn bar(&self) { | 781 | fn bar(&self) { |
782 | unimplemented!(); | 782 | todo!(); |
783 | } | 783 | } |
784 | } | 784 | } |
785 | ", | 785 | ", |
diff --git a/crates/ra_flycheck/Cargo.toml b/crates/ra_flycheck/Cargo.toml index c9a9ddc12..76e5cada4 100644 --- a/crates/ra_flycheck/Cargo.toml +++ b/crates/ra_flycheck/Cargo.toml | |||
@@ -13,4 +13,4 @@ serde_json = "1.0.48" | |||
13 | jod-thread = "0.1.1" | 13 | jod-thread = "0.1.1" |
14 | 14 | ||
15 | [dev-dependencies] | 15 | [dev-dependencies] |
16 | insta = "0.15.0" | 16 | insta = "0.16.0" |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 9baebf643..3801fce23 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -25,7 +25,7 @@ use hir_ty::{ | |||
25 | autoderef, display::HirFormatter, expr::ExprValidator, method_resolution, ApplicationTy, | 25 | autoderef, display::HirFormatter, expr::ExprValidator, method_resolution, ApplicationTy, |
26 | Canonical, InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor, | 26 | Canonical, InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor, |
27 | }; | 27 | }; |
28 | use ra_db::{CrateId, Edition, FileId}; | 28 | use ra_db::{CrateId, CrateName, Edition, FileId}; |
29 | use ra_prof::profile; | 29 | use ra_prof::profile; |
30 | use ra_syntax::{ | 30 | use ra_syntax::{ |
31 | ast::{self, AttrsOwner, NameOwner}, | 31 | ast::{self, AttrsOwner, NameOwner}, |
@@ -91,6 +91,10 @@ impl Crate { | |||
91 | db.crate_graph()[self.id].edition | 91 | db.crate_graph()[self.id].edition |
92 | } | 92 | } |
93 | 93 | ||
94 | pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateName> { | ||
95 | db.crate_graph()[self.id].display_name.as_ref().cloned() | ||
96 | } | ||
97 | |||
94 | pub fn all(db: &dyn HirDatabase) -> Vec<Crate> { | 98 | pub fn all(db: &dyn HirDatabase) -> Vec<Crate> { |
95 | db.crate_graph().iter().map(|id| Crate { id }).collect() | 99 | db.crate_graph().iter().map(|id| Crate { id }).collect() |
96 | } | 100 | } |
diff --git a/crates/ra_hir_def/Cargo.toml b/crates/ra_hir_def/Cargo.toml index 56e791e3e..b85358308 100644 --- a/crates/ra_hir_def/Cargo.toml +++ b/crates/ra_hir_def/Cargo.toml | |||
@@ -28,4 +28,4 @@ ra_cfg = { path = "../ra_cfg" } | |||
28 | tt = { path = "../ra_tt", package = "ra_tt" } | 28 | tt = { path = "../ra_tt", package = "ra_tt" } |
29 | 29 | ||
30 | [dev-dependencies] | 30 | [dev-dependencies] |
31 | insta = "0.15.0" | 31 | insta = "0.16.0" |
diff --git a/crates/ra_hir_def/src/data.rs b/crates/ra_hir_def/src/data.rs index 56a20c5bd..ccb682f9a 100644 --- a/crates/ra_hir_def/src/data.rs +++ b/crates/ra_hir_def/src/data.rs | |||
@@ -9,13 +9,14 @@ use hir_expand::{ | |||
9 | }; | 9 | }; |
10 | use ra_prof::profile; | 10 | use ra_prof::profile; |
11 | use ra_syntax::ast::{ | 11 | use ra_syntax::ast::{ |
12 | self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner, VisibilityOwner, | 12 | self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner, TypeBoundsOwner, |
13 | VisibilityOwner, | ||
13 | }; | 14 | }; |
14 | 15 | ||
15 | use crate::{ | 16 | use crate::{ |
16 | attr::Attrs, | 17 | attr::Attrs, |
17 | db::DefDatabase, | 18 | db::DefDatabase, |
18 | path::{path, GenericArgs, Path}, | 19 | path::{path, AssociatedTypeBinding, GenericArgs, Path}, |
19 | src::HasSource, | 20 | src::HasSource, |
20 | type_ref::{Mutability, TypeBound, TypeRef}, | 21 | type_ref::{Mutability, TypeBound, TypeRef}, |
21 | visibility::RawVisibility, | 22 | visibility::RawVisibility, |
@@ -95,7 +96,11 @@ fn desugar_future_path(orig: TypeRef) -> Path { | |||
95 | let path = path![std::future::Future]; | 96 | let path = path![std::future::Future]; |
96 | let mut generic_args: Vec<_> = std::iter::repeat(None).take(path.segments.len() - 1).collect(); | 97 | let mut generic_args: Vec<_> = std::iter::repeat(None).take(path.segments.len() - 1).collect(); |
97 | let mut last = GenericArgs::empty(); | 98 | let mut last = GenericArgs::empty(); |
98 | last.bindings.push((name![Output], orig)); | 99 | last.bindings.push(AssociatedTypeBinding { |
100 | name: name![Output], | ||
101 | type_ref: Some(orig), | ||
102 | bounds: Vec::new(), | ||
103 | }); | ||
99 | generic_args.push(Some(Arc::new(last))); | 104 | generic_args.push(Some(Arc::new(last))); |
100 | 105 | ||
101 | Path::from_known_path(path, generic_args) | 106 | Path::from_known_path(path, generic_args) |
@@ -106,6 +111,7 @@ pub struct TypeAliasData { | |||
106 | pub name: Name, | 111 | pub name: Name, |
107 | pub type_ref: Option<TypeRef>, | 112 | pub type_ref: Option<TypeRef>, |
108 | pub visibility: RawVisibility, | 113 | pub visibility: RawVisibility, |
114 | pub bounds: Vec<TypeBound>, | ||
109 | } | 115 | } |
110 | 116 | ||
111 | impl TypeAliasData { | 117 | impl TypeAliasData { |
@@ -118,9 +124,17 @@ impl TypeAliasData { | |||
118 | let name = node.value.name().map_or_else(Name::missing, |n| n.as_name()); | 124 | let name = node.value.name().map_or_else(Name::missing, |n| n.as_name()); |
119 | let type_ref = node.value.type_ref().map(TypeRef::from_ast); | 125 | let type_ref = node.value.type_ref().map(TypeRef::from_ast); |
120 | let vis_default = RawVisibility::default_for_container(loc.container); | 126 | let vis_default = RawVisibility::default_for_container(loc.container); |
121 | let visibility = | 127 | let visibility = RawVisibility::from_ast_with_default( |
122 | RawVisibility::from_ast_with_default(db, vis_default, node.map(|n| n.visibility())); | 128 | db, |
123 | Arc::new(TypeAliasData { name, type_ref, visibility }) | 129 | vis_default, |
130 | node.as_ref().map(|n| n.visibility()), | ||
131 | ); | ||
132 | let bounds = if let Some(bound_list) = node.value.type_bound_list() { | ||
133 | bound_list.bounds().map(TypeBound::from_ast).collect() | ||
134 | } else { | ||
135 | Vec::new() | ||
136 | }; | ||
137 | Arc::new(TypeAliasData { name, type_ref, visibility, bounds }) | ||
124 | } | 138 | } |
125 | } | 139 | } |
126 | 140 | ||
diff --git a/crates/ra_hir_def/src/path.rs b/crates/ra_hir_def/src/path.rs index 91c7b3e09..162b3c8c7 100644 --- a/crates/ra_hir_def/src/path.rs +++ b/crates/ra_hir_def/src/path.rs | |||
@@ -14,7 +14,10 @@ use hir_expand::{ | |||
14 | use ra_db::CrateId; | 14 | use ra_db::CrateId; |
15 | use ra_syntax::ast; | 15 | use ra_syntax::ast; |
16 | 16 | ||
17 | use crate::{type_ref::TypeRef, InFile}; | 17 | use crate::{ |
18 | type_ref::{TypeBound, TypeRef}, | ||
19 | InFile, | ||
20 | }; | ||
18 | 21 | ||
19 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] | 22 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] |
20 | pub struct ModPath { | 23 | pub struct ModPath { |
@@ -111,7 +114,21 @@ pub struct GenericArgs { | |||
111 | /// is left out. | 114 | /// is left out. |
112 | pub has_self_type: bool, | 115 | pub has_self_type: bool, |
113 | /// Associated type bindings like in `Iterator<Item = T>`. | 116 | /// Associated type bindings like in `Iterator<Item = T>`. |
114 | pub bindings: Vec<(Name, TypeRef)>, | 117 | pub bindings: Vec<AssociatedTypeBinding>, |
118 | } | ||
119 | |||
120 | /// An associated type binding like in `Iterator<Item = T>`. | ||
121 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
122 | pub struct AssociatedTypeBinding { | ||
123 | /// The name of the associated type. | ||
124 | pub name: Name, | ||
125 | /// The type bound to this associated type (in `Item = T`, this would be the | ||
126 | /// `T`). This can be `None` if there are bounds instead. | ||
127 | pub type_ref: Option<TypeRef>, | ||
128 | /// Bounds for the associated type, like in `Iterator<Item: | ||
129 | /// SomeOtherTrait>`. (This is the unstable `associated_type_bounds` | ||
130 | /// feature.) | ||
131 | pub bounds: Vec<TypeBound>, | ||
115 | } | 132 | } |
116 | 133 | ||
117 | /// A single generic argument. | 134 | /// A single generic argument. |
diff --git a/crates/ra_hir_def/src/path/lower.rs b/crates/ra_hir_def/src/path/lower.rs index 0f806d6fb..9ec2e0dcd 100644 --- a/crates/ra_hir_def/src/path/lower.rs +++ b/crates/ra_hir_def/src/path/lower.rs | |||
@@ -9,11 +9,12 @@ use hir_expand::{ | |||
9 | hygiene::Hygiene, | 9 | hygiene::Hygiene, |
10 | name::{name, AsName}, | 10 | name::{name, AsName}, |
11 | }; | 11 | }; |
12 | use ra_syntax::ast::{self, AstNode, TypeAscriptionOwner}; | 12 | use ra_syntax::ast::{self, AstNode, TypeAscriptionOwner, TypeBoundsOwner}; |
13 | 13 | ||
14 | use super::AssociatedTypeBinding; | ||
14 | use crate::{ | 15 | use crate::{ |
15 | path::{GenericArg, GenericArgs, ModPath, Path, PathKind}, | 16 | path::{GenericArg, GenericArgs, ModPath, Path, PathKind}, |
16 | type_ref::TypeRef, | 17 | type_ref::{TypeBound, TypeRef}, |
17 | }; | 18 | }; |
18 | 19 | ||
19 | pub(super) use lower_use::lower_use_tree; | 20 | pub(super) use lower_use::lower_use_tree; |
@@ -136,10 +137,16 @@ pub(super) fn lower_generic_args(node: ast::TypeArgList) -> Option<GenericArgs> | |||
136 | // lifetimes ignored for now | 137 | // lifetimes ignored for now |
137 | let mut bindings = Vec::new(); | 138 | let mut bindings = Vec::new(); |
138 | for assoc_type_arg in node.assoc_type_args() { | 139 | for assoc_type_arg in node.assoc_type_args() { |
140 | let assoc_type_arg: ast::AssocTypeArg = assoc_type_arg; | ||
139 | if let Some(name_ref) = assoc_type_arg.name_ref() { | 141 | if let Some(name_ref) = assoc_type_arg.name_ref() { |
140 | let name = name_ref.as_name(); | 142 | let name = name_ref.as_name(); |
141 | let type_ref = TypeRef::from_ast_opt(assoc_type_arg.type_ref()); | 143 | let type_ref = assoc_type_arg.type_ref().map(TypeRef::from_ast); |
142 | bindings.push((name, type_ref)); | 144 | let bounds = if let Some(l) = assoc_type_arg.type_bound_list() { |
145 | l.bounds().map(TypeBound::from_ast).collect() | ||
146 | } else { | ||
147 | Vec::new() | ||
148 | }; | ||
149 | bindings.push(AssociatedTypeBinding { name, type_ref, bounds }); | ||
143 | } | 150 | } |
144 | } | 151 | } |
145 | if args.is_empty() && bindings.is_empty() { | 152 | if args.is_empty() && bindings.is_empty() { |
@@ -168,7 +175,11 @@ fn lower_generic_args_from_fn_path( | |||
168 | } | 175 | } |
169 | if let Some(ret_type) = ret_type { | 176 | if let Some(ret_type) = ret_type { |
170 | let type_ref = TypeRef::from_ast_opt(ret_type.type_ref()); | 177 | let type_ref = TypeRef::from_ast_opt(ret_type.type_ref()); |
171 | bindings.push((name![Output], type_ref)) | 178 | bindings.push(AssociatedTypeBinding { |
179 | name: name![Output], | ||
180 | type_ref: Some(type_ref), | ||
181 | bounds: Vec::new(), | ||
182 | }); | ||
172 | } | 183 | } |
173 | if args.is_empty() && bindings.is_empty() { | 184 | if args.is_empty() && bindings.is_empty() { |
174 | None | 185 | None |
diff --git a/crates/ra_hir_def/src/type_ref.rs b/crates/ra_hir_def/src/type_ref.rs index ea29c4176..f308c6bdf 100644 --- a/crates/ra_hir_def/src/type_ref.rs +++ b/crates/ra_hir_def/src/type_ref.rs | |||
@@ -163,8 +163,16 @@ impl TypeRef { | |||
163 | let crate::path::GenericArg::Type(type_ref) = arg; | 163 | let crate::path::GenericArg::Type(type_ref) = arg; |
164 | go(type_ref, f); | 164 | go(type_ref, f); |
165 | } | 165 | } |
166 | for (_, type_ref) in &args_and_bindings.bindings { | 166 | for binding in &args_and_bindings.bindings { |
167 | go(type_ref, f); | 167 | if let Some(type_ref) = &binding.type_ref { |
168 | go(type_ref, f); | ||
169 | } | ||
170 | for bound in &binding.bounds { | ||
171 | match bound { | ||
172 | TypeBound::Path(path) => go_path(path, f), | ||
173 | TypeBound::Error => (), | ||
174 | } | ||
175 | } | ||
168 | } | 176 | } |
169 | } | 177 | } |
170 | } | 178 | } |
diff --git a/crates/ra_hir_ty/Cargo.toml b/crates/ra_hir_ty/Cargo.toml index 59efc1c31..177bdbcb0 100644 --- a/crates/ra_hir_ty/Cargo.toml +++ b/crates/ra_hir_ty/Cargo.toml | |||
@@ -8,6 +8,7 @@ authors = ["rust-analyzer developers"] | |||
8 | doctest = false | 8 | doctest = false |
9 | 9 | ||
10 | [dependencies] | 10 | [dependencies] |
11 | itertools = "0.9.0" | ||
11 | arrayvec = "0.5.1" | 12 | arrayvec = "0.5.1" |
12 | smallvec = "1.2.0" | 13 | smallvec = "1.2.0" |
13 | ena = "0.13.1" | 14 | ena = "0.13.1" |
@@ -26,9 +27,9 @@ test_utils = { path = "../test_utils" } | |||
26 | 27 | ||
27 | scoped-tls = "1" | 28 | scoped-tls = "1" |
28 | 29 | ||
29 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" } | 30 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } |
30 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" } | 31 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } |
31 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" } | 32 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } |
32 | 33 | ||
33 | [dev-dependencies] | 34 | [dev-dependencies] |
34 | insta = "0.15.0" | 35 | insta = "0.16.0" |
diff --git a/crates/ra_hir_ty/src/_match.rs b/crates/ra_hir_ty/src/_match.rs index a64be9848..688026a04 100644 --- a/crates/ra_hir_ty/src/_match.rs +++ b/crates/ra_hir_ty/src/_match.rs | |||
@@ -194,9 +194,10 @@ use smallvec::{smallvec, SmallVec}; | |||
194 | use crate::{ | 194 | use crate::{ |
195 | db::HirDatabase, | 195 | db::HirDatabase, |
196 | expr::{Body, Expr, Literal, Pat, PatId}, | 196 | expr::{Body, Expr, Literal, Pat, PatId}, |
197 | InferenceResult, | 197 | ApplicationTy, InferenceResult, Ty, TypeCtor, |
198 | }; | 198 | }; |
199 | use hir_def::{adt::VariantData, EnumVariantId, VariantId}; | 199 | use hir_def::{adt::VariantData, AdtId, EnumVariantId, VariantId}; |
200 | use ra_arena::Idx; | ||
200 | 201 | ||
201 | #[derive(Debug, Clone, Copy)] | 202 | #[derive(Debug, Clone, Copy)] |
202 | /// Either a pattern from the source code being analyzed, represented as | 203 | /// Either a pattern from the source code being analyzed, represented as |
@@ -512,6 +513,7 @@ pub enum Usefulness { | |||
512 | } | 513 | } |
513 | 514 | ||
514 | pub struct MatchCheckCtx<'a> { | 515 | pub struct MatchCheckCtx<'a> { |
516 | pub match_expr: Idx<Expr>, | ||
515 | pub body: Arc<Body>, | 517 | pub body: Arc<Body>, |
516 | pub infer: Arc<InferenceResult>, | 518 | pub infer: Arc<InferenceResult>, |
517 | pub db: &'a dyn HirDatabase, | 519 | pub db: &'a dyn HirDatabase, |
@@ -530,6 +532,16 @@ pub(crate) fn is_useful( | |||
530 | matrix: &Matrix, | 532 | matrix: &Matrix, |
531 | v: &PatStack, | 533 | v: &PatStack, |
532 | ) -> MatchCheckResult<Usefulness> { | 534 | ) -> MatchCheckResult<Usefulness> { |
535 | // Handle the special case of enums with no variants. In that case, no match | ||
536 | // arm is useful. | ||
537 | if let Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(AdtId::EnumId(enum_id)), .. }) = | ||
538 | cx.infer[cx.match_expr].strip_references() | ||
539 | { | ||
540 | if cx.db.enum_data(*enum_id).variants.is_empty() { | ||
541 | return Ok(Usefulness::NotUseful); | ||
542 | } | ||
543 | } | ||
544 | |||
533 | if v.is_empty() { | 545 | if v.is_empty() { |
534 | let result = if matrix.is_empty() { Usefulness::Useful } else { Usefulness::NotUseful }; | 546 | let result = if matrix.is_empty() { Usefulness::Useful } else { Usefulness::NotUseful }; |
535 | 547 | ||
@@ -1618,6 +1630,32 @@ mod tests { | |||
1618 | 1630 | ||
1619 | check_no_diagnostic(content); | 1631 | check_no_diagnostic(content); |
1620 | } | 1632 | } |
1633 | |||
1634 | #[test] | ||
1635 | fn enum_never() { | ||
1636 | let content = r" | ||
1637 | enum Never {} | ||
1638 | |||
1639 | fn test_fn(never: Never) { | ||
1640 | match never {} | ||
1641 | } | ||
1642 | "; | ||
1643 | |||
1644 | check_no_diagnostic(content); | ||
1645 | } | ||
1646 | |||
1647 | #[test] | ||
1648 | fn enum_never_ref() { | ||
1649 | let content = r" | ||
1650 | enum Never {} | ||
1651 | |||
1652 | fn test_fn(never: &Never) { | ||
1653 | match never {} | ||
1654 | } | ||
1655 | "; | ||
1656 | |||
1657 | check_no_diagnostic(content); | ||
1658 | } | ||
1621 | } | 1659 | } |
1622 | 1660 | ||
1623 | #[cfg(test)] | 1661 | #[cfg(test)] |
diff --git a/crates/ra_hir_ty/src/autoderef.rs b/crates/ra_hir_ty/src/autoderef.rs index d91c21e24..1b0f84c5c 100644 --- a/crates/ra_hir_ty/src/autoderef.rs +++ b/crates/ra_hir_ty/src/autoderef.rs | |||
@@ -14,7 +14,7 @@ use crate::{ | |||
14 | db::HirDatabase, | 14 | db::HirDatabase, |
15 | traits::{InEnvironment, Solution}, | 15 | traits::{InEnvironment, Solution}, |
16 | utils::generics, | 16 | utils::generics, |
17 | BoundVar, Canonical, DebruijnIndex, Substs, Ty, | 17 | BoundVar, Canonical, DebruijnIndex, Obligation, Substs, TraitRef, Ty, |
18 | }; | 18 | }; |
19 | 19 | ||
20 | const AUTODEREF_RECURSION_LIMIT: usize = 10; | 20 | const AUTODEREF_RECURSION_LIMIT: usize = 10; |
@@ -66,6 +66,20 @@ fn deref_by_trait( | |||
66 | let parameters = | 66 | let parameters = |
67 | Substs::build_for_generics(&generic_params).push(ty.value.value.clone()).build(); | 67 | Substs::build_for_generics(&generic_params).push(ty.value.value.clone()).build(); |
68 | 68 | ||
69 | // Check that the type implements Deref at all | ||
70 | let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() }; | ||
71 | let implements_goal = super::Canonical { | ||
72 | num_vars: ty.value.num_vars, | ||
73 | value: InEnvironment { | ||
74 | value: Obligation::Trait(trait_ref), | ||
75 | environment: ty.environment.clone(), | ||
76 | }, | ||
77 | }; | ||
78 | if db.trait_solve(krate, implements_goal).is_none() { | ||
79 | return None; | ||
80 | } | ||
81 | |||
82 | // Now do the assoc type projection | ||
69 | let projection = super::traits::ProjectionPredicate { | 83 | let projection = super::traits::ProjectionPredicate { |
70 | ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.num_vars)), | 84 | ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.num_vars)), |
71 | projection_ty: super::ProjectionTy { associated_ty: target, parameters }, | 85 | projection_ty: super::ProjectionTy { associated_ty: target, parameters }, |
@@ -91,6 +105,11 @@ fn deref_by_trait( | |||
91 | // they're just being 'passed through'. In the 'standard' case where | 105 | // they're just being 'passed through'. In the 'standard' case where |
92 | // we have `impl<T> Deref for Foo<T> { Target = T }`, that should be | 106 | // we have `impl<T> Deref for Foo<T> { Target = T }`, that should be |
93 | // the case. | 107 | // the case. |
108 | |||
109 | // FIXME: if the trait solver decides to truncate the type, these | ||
110 | // assumptions will be broken. We would need to properly introduce | ||
111 | // new variables in that case | ||
112 | |||
94 | for i in 1..vars.0.num_vars { | 113 | for i in 1..vars.0.num_vars { |
95 | if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) | 114 | if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) |
96 | { | 115 | { |
diff --git a/crates/ra_hir_ty/src/expr.rs b/crates/ra_hir_ty/src/expr.rs index 21abbcf1e..fd59f4320 100644 --- a/crates/ra_hir_ty/src/expr.rs +++ b/crates/ra_hir_ty/src/expr.rs | |||
@@ -156,7 +156,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
156 | None => return, | 156 | None => return, |
157 | }; | 157 | }; |
158 | 158 | ||
159 | let cx = MatchCheckCtx { body, infer: infer.clone(), db }; | 159 | let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db }; |
160 | let pats = arms.iter().map(|arm| arm.pat); | 160 | let pats = arms.iter().map(|arm| arm.pat); |
161 | 161 | ||
162 | let mut seen = Matrix::empty(); | 162 | let mut seen = Matrix::empty(); |
diff --git a/crates/ra_hir_ty/src/infer/unify.rs b/crates/ra_hir_ty/src/infer/unify.rs index ac25f8a80..5f6cea8d3 100644 --- a/crates/ra_hir_ty/src/infer/unify.rs +++ b/crates/ra_hir_ty/src/infer/unify.rs | |||
@@ -32,6 +32,7 @@ where | |||
32 | var_stack: Vec<TypeVarId>, | 32 | var_stack: Vec<TypeVarId>, |
33 | } | 33 | } |
34 | 34 | ||
35 | #[derive(Debug)] | ||
35 | pub(super) struct Canonicalized<T> { | 36 | pub(super) struct Canonicalized<T> { |
36 | pub value: Canonical<T>, | 37 | pub value: Canonical<T>, |
37 | free_vars: Vec<InferTy>, | 38 | free_vars: Vec<InferTy>, |
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs index 18f74d3b1..2677f3af2 100644 --- a/crates/ra_hir_ty/src/lib.rs +++ b/crates/ra_hir_ty/src/lib.rs | |||
@@ -680,6 +680,16 @@ impl Ty { | |||
680 | } | 680 | } |
681 | } | 681 | } |
682 | 682 | ||
683 | pub fn strip_references(&self) -> &Ty { | ||
684 | let mut t: &Ty = self; | ||
685 | |||
686 | while let Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(_mutability), parameters }) = t { | ||
687 | t = parameters.as_single(); | ||
688 | } | ||
689 | |||
690 | t | ||
691 | } | ||
692 | |||
683 | pub fn as_adt(&self) -> Option<(AdtId, &Substs)> { | 693 | pub fn as_adt(&self) -> Option<(AdtId, &Substs)> { |
684 | match self { | 694 | match self { |
685 | Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(adt_def), parameters }) => { | 695 | Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(adt_def), parameters }) => { |
diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs index 6c7bbc448..cc1ac8e3e 100644 --- a/crates/ra_hir_ty/src/lower.rs +++ b/crates/ra_hir_ty/src/lower.rs | |||
@@ -8,6 +8,8 @@ | |||
8 | use std::iter; | 8 | use std::iter; |
9 | use std::sync::Arc; | 9 | use std::sync::Arc; |
10 | 10 | ||
11 | use smallvec::SmallVec; | ||
12 | |||
11 | use hir_def::{ | 13 | use hir_def::{ |
12 | adt::StructKind, | 14 | adt::StructKind, |
13 | builtin_type::BuiltinType, | 15 | builtin_type::BuiltinType, |
@@ -360,13 +362,23 @@ impl Ty { | |||
360 | }, | 362 | }, |
361 | Some(TypeNs::GenericParam(param_id)) => { | 363 | Some(TypeNs::GenericParam(param_id)) => { |
362 | let predicates = ctx.db.generic_predicates_for_param(param_id); | 364 | let predicates = ctx.db.generic_predicates_for_param(param_id); |
363 | predicates | 365 | let mut traits_: Vec<_> = predicates |
364 | .iter() | 366 | .iter() |
365 | .filter_map(|pred| match &pred.value { | 367 | .filter_map(|pred| match &pred.value { |
366 | GenericPredicate::Implemented(tr) => Some(tr.trait_), | 368 | GenericPredicate::Implemented(tr) => Some(tr.trait_), |
367 | _ => None, | 369 | _ => None, |
368 | }) | 370 | }) |
369 | .collect() | 371 | .collect(); |
372 | // Handle `Self::Type` referring to own associated type in trait definitions | ||
373 | if let GenericDefId::TraitId(trait_id) = param_id.parent { | ||
374 | let generics = generics(ctx.db.upcast(), trait_id.into()); | ||
375 | if generics.params.types[param_id.local_id].provenance | ||
376 | == TypeParamProvenance::TraitSelf | ||
377 | { | ||
378 | traits_.push(trait_id); | ||
379 | } | ||
380 | } | ||
381 | traits_ | ||
370 | } | 382 | } |
371 | _ => return Ty::Unknown, | 383 | _ => return Ty::Unknown, |
372 | }; | 384 | }; |
@@ -596,21 +608,35 @@ fn assoc_type_bindings_from_type_bound<'a>( | |||
596 | .into_iter() | 608 | .into_iter() |
597 | .flat_map(|segment| segment.args_and_bindings.into_iter()) | 609 | .flat_map(|segment| segment.args_and_bindings.into_iter()) |
598 | .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) | 610 | .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) |
599 | .map(move |(name, type_ref)| { | 611 | .flat_map(move |binding| { |
600 | let associated_ty = associated_type_by_name_including_super_traits( | 612 | let associated_ty = associated_type_by_name_including_super_traits( |
601 | ctx.db.upcast(), | 613 | ctx.db.upcast(), |
602 | trait_ref.trait_, | 614 | trait_ref.trait_, |
603 | &name, | 615 | &binding.name, |
604 | ); | 616 | ); |
605 | let associated_ty = match associated_ty { | 617 | let associated_ty = match associated_ty { |
606 | None => return GenericPredicate::Error, | 618 | None => return SmallVec::<[GenericPredicate; 1]>::new(), |
607 | Some(t) => t, | 619 | Some(t) => t, |
608 | }; | 620 | }; |
609 | let projection_ty = | 621 | let projection_ty = |
610 | ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() }; | 622 | ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() }; |
611 | let ty = Ty::from_hir(ctx, type_ref); | 623 | let mut preds = SmallVec::with_capacity( |
612 | let projection_predicate = ProjectionPredicate { projection_ty, ty }; | 624 | binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), |
613 | GenericPredicate::Projection(projection_predicate) | 625 | ); |
626 | if let Some(type_ref) = &binding.type_ref { | ||
627 | let ty = Ty::from_hir(ctx, type_ref); | ||
628 | let projection_predicate = | ||
629 | ProjectionPredicate { projection_ty: projection_ty.clone(), ty }; | ||
630 | preds.push(GenericPredicate::Projection(projection_predicate)); | ||
631 | } | ||
632 | for bound in &binding.bounds { | ||
633 | preds.extend(GenericPredicate::from_type_bound( | ||
634 | ctx, | ||
635 | bound, | ||
636 | Ty::Projection(projection_ty.clone()), | ||
637 | )); | ||
638 | } | ||
639 | preds | ||
614 | }) | 640 | }) |
615 | } | 641 | } |
616 | 642 | ||
diff --git a/crates/ra_hir_ty/src/tests/regression.rs b/crates/ra_hir_ty/src/tests/regression.rs index 3402e0cb5..d69115a2f 100644 --- a/crates/ra_hir_ty/src/tests/regression.rs +++ b/crates/ra_hir_ty/src/tests/regression.rs | |||
@@ -451,8 +451,7 @@ pub mod str { | |||
451 | "#, | 451 | "#, |
452 | ); | 452 | ); |
453 | 453 | ||
454 | // should be Option<char>, but currently not because of Chalk ambiguity problem | 454 | assert_eq!("(Option<char>, Option<char>)", super::type_at_pos(&db, pos)); |
455 | assert_eq!("(Option<{unknown}>, Option<{unknown}>)", super::type_at_pos(&db, pos)); | ||
456 | } | 455 | } |
457 | 456 | ||
458 | #[test] | 457 | #[test] |
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs index 22ae6ca90..0a889f805 100644 --- a/crates/ra_hir_ty/src/tests/traits.rs +++ b/crates/ra_hir_ty/src/tests/traits.rs | |||
@@ -349,7 +349,6 @@ trait Trait: SuperTrait { | |||
349 | 349 | ||
350 | #[test] | 350 | #[test] |
351 | fn infer_project_associated_type() { | 351 | fn infer_project_associated_type() { |
352 | // y, z, a don't yet work because of https://github.com/rust-lang/chalk/issues/234 | ||
353 | assert_snapshot!( | 352 | assert_snapshot!( |
354 | infer(r#" | 353 | infer(r#" |
355 | trait Iterable { | 354 | trait Iterable { |
@@ -368,12 +367,12 @@ fn test<T: Iterable>() { | |||
368 | [108; 261) '{ ...ter; }': () | 367 | [108; 261) '{ ...ter; }': () |
369 | [118; 119) 'x': u32 | 368 | [118; 119) 'x': u32 |
370 | [145; 146) '1': u32 | 369 | [145; 146) '1': u32 |
371 | [156; 157) 'y': {unknown} | 370 | [156; 157) 'y': Iterable::Item<T> |
372 | [183; 192) 'no_matter': {unknown} | 371 | [183; 192) 'no_matter': Iterable::Item<T> |
373 | [202; 203) 'z': {unknown} | 372 | [202; 203) 'z': Iterable::Item<T> |
374 | [215; 224) 'no_matter': {unknown} | 373 | [215; 224) 'no_matter': Iterable::Item<T> |
375 | [234; 235) 'a': {unknown} | 374 | [234; 235) 'a': Iterable::Item<T> |
376 | [249; 258) 'no_matter': {unknown} | 375 | [249; 258) 'no_matter': Iterable::Item<T> |
377 | "### | 376 | "### |
378 | ); | 377 | ); |
379 | } | 378 | } |
@@ -433,8 +432,8 @@ fn test<T: Iterable<Item=u32>>() { | |||
433 | "#), | 432 | "#), |
434 | @r###" | 433 | @r###" |
435 | [67; 100) '{ ...own; }': () | 434 | [67; 100) '{ ...own; }': () |
436 | [77; 78) 'y': {unknown} | 435 | [77; 78) 'y': u32 |
437 | [90; 97) 'unknown': {unknown} | 436 | [90; 97) 'unknown': u32 |
438 | "### | 437 | "### |
439 | ); | 438 | ); |
440 | } | 439 | } |
@@ -549,7 +548,7 @@ impl std::ops::Index<u32> for Bar { | |||
549 | 548 | ||
550 | fn test() { | 549 | fn test() { |
551 | let a = Bar; | 550 | let a = Bar; |
552 | let b = a[1]; | 551 | let b = a[1u32]; |
553 | b<|>; | 552 | b<|>; |
554 | } | 553 | } |
555 | 554 | ||
@@ -574,7 +573,7 @@ fn infer_ops_index_autoderef() { | |||
574 | //- /main.rs crate:main deps:std | 573 | //- /main.rs crate:main deps:std |
575 | fn test() { | 574 | fn test() { |
576 | let a = &[1u32, 2, 3]; | 575 | let a = &[1u32, 2, 3]; |
577 | let b = a[1]; | 576 | let b = a[1u32]; |
578 | b<|>; | 577 | b<|>; |
579 | } | 578 | } |
580 | 579 | ||
@@ -916,11 +915,7 @@ fn test<T: ApplyL>(t: T) { | |||
916 | } | 915 | } |
917 | "#, | 916 | "#, |
918 | ); | 917 | ); |
919 | // FIXME here Chalk doesn't normalize the type to a placeholder. I think we | 918 | assert_eq!(t, "ApplyL::Out<T>"); |
920 | // need to add a rule like Normalize(<T as ApplyL>::Out -> ApplyL::Out<T>) | ||
921 | // to the trait env ourselves here; probably Chalk can't do this by itself. | ||
922 | // assert_eq!(t, "ApplyL::Out<[missing name]>"); | ||
923 | assert_eq!(t, "{unknown}"); | ||
924 | } | 919 | } |
925 | 920 | ||
926 | #[test] | 921 | #[test] |
@@ -1329,16 +1324,16 @@ fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) { | |||
1329 | [263; 264) 'y': impl Trait<Type = i64> | 1324 | [263; 264) 'y': impl Trait<Type = i64> |
1330 | [290; 398) '{ ...r>); }': () | 1325 | [290; 398) '{ ...r>); }': () |
1331 | [296; 299) 'get': fn get<T>(T) -> <T as Trait>::Type | 1326 | [296; 299) 'get': fn get<T>(T) -> <T as Trait>::Type |
1332 | [296; 302) 'get(x)': {unknown} | 1327 | [296; 302) 'get(x)': u32 |
1333 | [300; 301) 'x': T | 1328 | [300; 301) 'x': T |
1334 | [308; 312) 'get2': fn get2<{unknown}, T>(T) -> {unknown} | 1329 | [308; 312) 'get2': fn get2<u32, T>(T) -> u32 |
1335 | [308; 315) 'get2(x)': {unknown} | 1330 | [308; 315) 'get2(x)': u32 |
1336 | [313; 314) 'x': T | 1331 | [313; 314) 'x': T |
1337 | [321; 324) 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type | 1332 | [321; 324) 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type |
1338 | [321; 327) 'get(y)': {unknown} | 1333 | [321; 327) 'get(y)': i64 |
1339 | [325; 326) 'y': impl Trait<Type = i64> | 1334 | [325; 326) 'y': impl Trait<Type = i64> |
1340 | [333; 337) 'get2': fn get2<{unknown}, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> {unknown} | 1335 | [333; 337) 'get2': fn get2<i64, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> i64 |
1341 | [333; 340) 'get2(y)': {unknown} | 1336 | [333; 340) 'get2(y)': i64 |
1342 | [338; 339) 'y': impl Trait<Type = i64> | 1337 | [338; 339) 'y': impl Trait<Type = i64> |
1343 | [346; 349) 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type | 1338 | [346; 349) 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type |
1344 | [346; 357) 'get(set(S))': u64 | 1339 | [346; 357) 'get(set(S))': u64 |
@@ -1402,7 +1397,6 @@ mod iter { | |||
1402 | 1397 | ||
1403 | #[test] | 1398 | #[test] |
1404 | fn projection_eq_within_chalk() { | 1399 | fn projection_eq_within_chalk() { |
1405 | // std::env::set_var("CHALK_DEBUG", "1"); | ||
1406 | assert_snapshot!( | 1400 | assert_snapshot!( |
1407 | infer(r#" | 1401 | infer(r#" |
1408 | trait Trait1 { | 1402 | trait Trait1 { |
@@ -1422,7 +1416,7 @@ fn test<T: Trait1<Type = u32>>(x: T) { | |||
1422 | [164; 165) 'x': T | 1416 | [164; 165) 'x': T |
1423 | [170; 186) '{ ...o(); }': () | 1417 | [170; 186) '{ ...o(); }': () |
1424 | [176; 177) 'x': T | 1418 | [176; 177) 'x': T |
1425 | [176; 183) 'x.foo()': {unknown} | 1419 | [176; 183) 'x.foo()': u32 |
1426 | "### | 1420 | "### |
1427 | ); | 1421 | ); |
1428 | } | 1422 | } |
@@ -1578,7 +1572,7 @@ fn test<F: FnOnce(u32, u64) -> u128>(f: F) { | |||
1578 | [150; 151) 'f': F | 1572 | [150; 151) 'f': F |
1579 | [156; 184) '{ ...2)); }': () | 1573 | [156; 184) '{ ...2)); }': () |
1580 | [162; 163) 'f': F | 1574 | [162; 163) 'f': F |
1581 | [162; 181) 'f.call...1, 2))': {unknown} | 1575 | [162; 181) 'f.call...1, 2))': u128 |
1582 | [174; 180) '(1, 2)': (u32, u64) | 1576 | [174; 180) '(1, 2)': (u32, u64) |
1583 | [175; 176) '1': u32 | 1577 | [175; 176) '1': u32 |
1584 | [178; 179) '2': u64 | 1578 | [178; 179) '2': u64 |
@@ -1803,7 +1797,7 @@ fn test<T, U>() where T::Item: Trait2, T: Trait<U::Item>, U: Trait<()> { | |||
1803 | } | 1797 | } |
1804 | 1798 | ||
1805 | #[test] | 1799 | #[test] |
1806 | fn unselected_projection_on_trait_self() { | 1800 | fn unselected_projection_on_impl_self() { |
1807 | assert_snapshot!(infer( | 1801 | assert_snapshot!(infer( |
1808 | r#" | 1802 | r#" |
1809 | //- /main.rs | 1803 | //- /main.rs |
@@ -1829,7 +1823,7 @@ impl Trait for S2 { | |||
1829 | "#, | 1823 | "#, |
1830 | ), @r###" | 1824 | ), @r###" |
1831 | [54; 58) 'self': &Self | 1825 | [54; 58) 'self': &Self |
1832 | [60; 61) 'x': {unknown} | 1826 | [60; 61) 'x': Trait::Item<Self> |
1833 | [140; 144) 'self': &S | 1827 | [140; 144) 'self': &S |
1834 | [146; 147) 'x': u32 | 1828 | [146; 147) 'x': u32 |
1835 | [161; 175) '{ let y = x; }': () | 1829 | [161; 175) '{ let y = x; }': () |
@@ -1844,6 +1838,30 @@ impl Trait for S2 { | |||
1844 | } | 1838 | } |
1845 | 1839 | ||
1846 | #[test] | 1840 | #[test] |
1841 | fn unselected_projection_on_trait_self() { | ||
1842 | let t = type_at( | ||
1843 | r#" | ||
1844 | //- /main.rs | ||
1845 | trait Trait { | ||
1846 | type Item; | ||
1847 | |||
1848 | fn f(&self) -> Self::Item { loop {} } | ||
1849 | } | ||
1850 | |||
1851 | struct S; | ||
1852 | impl Trait for S { | ||
1853 | type Item = u32; | ||
1854 | } | ||
1855 | |||
1856 | fn test() { | ||
1857 | S.f()<|>; | ||
1858 | } | ||
1859 | "#, | ||
1860 | ); | ||
1861 | assert_eq!(t, "u32"); | ||
1862 | } | ||
1863 | |||
1864 | #[test] | ||
1847 | fn trait_impl_self_ty() { | 1865 | fn trait_impl_self_ty() { |
1848 | let t = type_at( | 1866 | let t = type_at( |
1849 | r#" | 1867 | r#" |
@@ -1924,6 +1942,119 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> { | |||
1924 | } | 1942 | } |
1925 | 1943 | ||
1926 | #[test] | 1944 | #[test] |
1945 | fn inline_assoc_type_bounds_1() { | ||
1946 | let t = type_at( | ||
1947 | r#" | ||
1948 | //- /main.rs | ||
1949 | trait Iterator { | ||
1950 | type Item; | ||
1951 | } | ||
1952 | trait OtherTrait<T> { | ||
1953 | fn foo(&self) -> T; | ||
1954 | } | ||
1955 | |||
1956 | // workaround for Chalk assoc type normalization problems | ||
1957 | pub struct S<T>; | ||
1958 | impl<T: Iterator> Iterator for S<T> { | ||
1959 | type Item = <T as Iterator>::Item; | ||
1960 | } | ||
1961 | |||
1962 | fn test<I: Iterator<Item: OtherTrait<u32>>>() { | ||
1963 | let x: <S<I> as Iterator>::Item; | ||
1964 | x.foo()<|>; | ||
1965 | } | ||
1966 | "#, | ||
1967 | ); | ||
1968 | assert_eq!(t, "u32"); | ||
1969 | } | ||
1970 | |||
1971 | #[test] | ||
1972 | fn inline_assoc_type_bounds_2() { | ||
1973 | let t = type_at( | ||
1974 | r#" | ||
1975 | //- /main.rs | ||
1976 | trait Iterator { | ||
1977 | type Item; | ||
1978 | } | ||
1979 | |||
1980 | fn test<I: Iterator<Item: Iterator<Item = u32>>>() { | ||
1981 | let x: <<I as Iterator>::Item as Iterator>::Item; | ||
1982 | x<|>; | ||
1983 | } | ||
1984 | "#, | ||
1985 | ); | ||
1986 | assert_eq!(t, "u32"); | ||
1987 | } | ||
1988 | |||
1989 | #[test] | ||
1990 | fn proc_macro_server_types() { | ||
1991 | assert_snapshot!( | ||
1992 | infer_with_mismatches(r#" | ||
1993 | macro_rules! with_api { | ||
1994 | ($S:ident, $self:ident, $m:ident) => { | ||
1995 | $m! { | ||
1996 | TokenStream { | ||
1997 | fn new() -> $S::TokenStream; | ||
1998 | }, | ||
1999 | Group { | ||
2000 | }, | ||
2001 | } | ||
2002 | }; | ||
2003 | } | ||
2004 | macro_rules! associated_item { | ||
2005 | (type TokenStream) => | ||
2006 | (type TokenStream: 'static + Clone;); | ||
2007 | (type Group) => | ||
2008 | (type Group: 'static + Clone;); | ||
2009 | ($($item:tt)*) => ($($item)*;) | ||
2010 | } | ||
2011 | macro_rules! declare_server_traits { | ||
2012 | ($($name:ident { | ||
2013 | $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)* | ||
2014 | }),* $(,)?) => { | ||
2015 | pub trait Types { | ||
2016 | $(associated_item!(type $name);)* | ||
2017 | } | ||
2018 | |||
2019 | $(pub trait $name: Types { | ||
2020 | $(associated_item!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)* | ||
2021 | })* | ||
2022 | |||
2023 | pub trait Server: Types $(+ $name)* {} | ||
2024 | impl<S: Types $(+ $name)*> Server for S {} | ||
2025 | } | ||
2026 | } | ||
2027 | with_api!(Self, self_, declare_server_traits); | ||
2028 | struct Group {} | ||
2029 | struct TokenStream {} | ||
2030 | struct Rustc; | ||
2031 | impl Types for Rustc { | ||
2032 | type TokenStream = TokenStream; | ||
2033 | type Group = Group; | ||
2034 | } | ||
2035 | fn make<T>() -> T { loop {} } | ||
2036 | impl TokenStream for Rustc { | ||
2037 | fn new() -> Self::TokenStream { | ||
2038 | let group: Self::Group = make(); | ||
2039 | make() | ||
2040 | } | ||
2041 | } | ||
2042 | "#, true), | ||
2043 | @r###" | ||
2044 | [1115; 1126) '{ loop {} }': T | ||
2045 | [1117; 1124) 'loop {}': ! | ||
2046 | [1122; 1124) '{}': () | ||
2047 | [1190; 1253) '{ ... }': {unknown} | ||
2048 | [1204; 1209) 'group': {unknown} | ||
2049 | [1225; 1229) 'make': fn make<{unknown}>() -> {unknown} | ||
2050 | [1225; 1231) 'make()': {unknown} | ||
2051 | [1241; 1245) 'make': fn make<{unknown}>() -> {unknown} | ||
2052 | [1241; 1247) 'make()': {unknown} | ||
2053 | "### | ||
2054 | ); | ||
2055 | } | ||
2056 | |||
2057 | #[test] | ||
1927 | fn unify_impl_trait() { | 2058 | fn unify_impl_trait() { |
1928 | assert_snapshot!( | 2059 | assert_snapshot!( |
1929 | infer_with_mismatches(r#" | 2060 | infer_with_mismatches(r#" |
@@ -2023,6 +2154,33 @@ fn main() { | |||
2023 | } | 2154 | } |
2024 | 2155 | ||
2025 | #[test] | 2156 | #[test] |
2157 | fn associated_type_bound() { | ||
2158 | let t = type_at( | ||
2159 | r#" | ||
2160 | //- /main.rs | ||
2161 | pub trait Trait { | ||
2162 | type Item: OtherTrait<u32>; | ||
2163 | } | ||
2164 | pub trait OtherTrait<T> { | ||
2165 | fn foo(&self) -> T; | ||
2166 | } | ||
2167 | |||
2168 | // this is just a workaround for chalk#234 | ||
2169 | pub struct S<T>; | ||
2170 | impl<T: Trait> Trait for S<T> { | ||
2171 | type Item = <T as Trait>::Item; | ||
2172 | } | ||
2173 | |||
2174 | fn test<T: Trait>() { | ||
2175 | let y: <S<T> as Trait>::Item = no_matter; | ||
2176 | y.foo()<|>; | ||
2177 | } | ||
2178 | "#, | ||
2179 | ); | ||
2180 | assert_eq!(t, "u32"); | ||
2181 | } | ||
2182 | |||
2183 | #[test] | ||
2026 | fn dyn_trait_through_chalk() { | 2184 | fn dyn_trait_through_chalk() { |
2027 | let t = type_at( | 2185 | let t = type_at( |
2028 | r#" | 2186 | r#" |
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs index 43d8d1e80..05791a848 100644 --- a/crates/ra_hir_ty/src/traits.rs +++ b/crates/ra_hir_ty/src/traits.rs | |||
@@ -16,10 +16,12 @@ use self::chalk::{from_chalk, Interner, ToChalk}; | |||
16 | pub(crate) mod chalk; | 16 | pub(crate) mod chalk; |
17 | mod builtin; | 17 | mod builtin; |
18 | 18 | ||
19 | /// This controls the maximum size of types Chalk considers. If we set this too | 19 | // This controls the maximum size of types Chalk considers. If we set this too |
20 | /// high, we can run into slow edge cases; if we set it too low, Chalk won't | 20 | // high, we can run into slow edge cases; if we set it too low, Chalk won't |
21 | /// find some solutions. | 21 | // find some solutions. |
22 | const CHALK_SOLVER_MAX_SIZE: usize = 10; | 22 | // FIXME this is currently hardcoded in the recursive solver |
23 | // const CHALK_SOLVER_MAX_SIZE: usize = 10; | ||
24 | |||
23 | /// This controls how much 'time' we give the Chalk solver before giving up. | 25 | /// This controls how much 'time' we give the Chalk solver before giving up. |
24 | const CHALK_SOLVER_FUEL: i32 = 100; | 26 | const CHALK_SOLVER_FUEL: i32 = 100; |
25 | 27 | ||
@@ -30,8 +32,7 @@ struct ChalkContext<'a> { | |||
30 | } | 32 | } |
31 | 33 | ||
32 | fn create_chalk_solver() -> chalk_solve::Solver<Interner> { | 34 | fn create_chalk_solver() -> chalk_solve::Solver<Interner> { |
33 | let solver_choice = | 35 | let solver_choice = chalk_solve::SolverChoice::recursive(); |
34 | chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE, expected_answers: None }; | ||
35 | solver_choice.into_solver() | 36 | solver_choice.into_solver() |
36 | } | 37 | } |
37 | 38 | ||
@@ -194,13 +195,16 @@ fn solve( | |||
194 | } | 195 | } |
195 | remaining > 0 | 196 | remaining > 0 |
196 | }; | 197 | }; |
197 | let mut solve = || solver.solve_limited(&context, goal, should_continue); | 198 | let mut solve = || { |
199 | let solution = solver.solve_limited(&context, goal, should_continue); | ||
200 | log::debug!("solve({:?}) => {:?}", goal, solution); | ||
201 | solution | ||
202 | }; | ||
198 | // don't set the TLS for Chalk unless Chalk debugging is active, to make | 203 | // don't set the TLS for Chalk unless Chalk debugging is active, to make |
199 | // extra sure we only use it for debugging | 204 | // extra sure we only use it for debugging |
200 | let solution = | 205 | let solution = |
201 | if is_chalk_debug() { chalk::tls::set_current_program(db, solve) } else { solve() }; | 206 | if is_chalk_debug() { chalk::tls::set_current_program(db, solve) } else { solve() }; |
202 | 207 | ||
203 | log::debug!("solve({:?}) => {:?}", goal, solution); | ||
204 | solution | 208 | solution |
205 | } | 209 | } |
206 | 210 | ||
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs index e05fea843..60d70d18e 100644 --- a/crates/ra_hir_ty/src/traits/chalk.rs +++ b/crates/ra_hir_ty/src/traits/chalk.rs | |||
@@ -32,6 +32,9 @@ impl chalk_ir::interner::Interner for Interner { | |||
32 | type InternedGoal = Arc<GoalData<Self>>; | 32 | type InternedGoal = Arc<GoalData<Self>>; |
33 | type InternedGoals = Vec<Goal<Self>>; | 33 | type InternedGoals = Vec<Goal<Self>>; |
34 | type InternedSubstitution = Vec<Parameter<Self>>; | 34 | type InternedSubstitution = Vec<Parameter<Self>>; |
35 | type InternedProgramClause = chalk_ir::ProgramClauseData<Self>; | ||
36 | type InternedProgramClauses = Vec<chalk_ir::ProgramClause<Self>>; | ||
37 | type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>; | ||
35 | type Identifier = TypeAliasId; | 38 | type Identifier = TypeAliasId; |
36 | type DefId = InternId; | 39 | type DefId = InternId; |
37 | 40 | ||
@@ -181,6 +184,48 @@ impl chalk_ir::interner::Interner for Interner { | |||
181 | ) -> &'a [Parameter<Self>] { | 184 | ) -> &'a [Parameter<Self>] { |
182 | substitution | 185 | substitution |
183 | } | 186 | } |
187 | |||
188 | fn intern_program_clause( | ||
189 | &self, | ||
190 | data: chalk_ir::ProgramClauseData<Self>, | ||
191 | ) -> chalk_ir::ProgramClauseData<Self> { | ||
192 | data | ||
193 | } | ||
194 | |||
195 | fn program_clause_data<'a>( | ||
196 | &self, | ||
197 | clause: &'a chalk_ir::ProgramClauseData<Self>, | ||
198 | ) -> &'a chalk_ir::ProgramClauseData<Self> { | ||
199 | clause | ||
200 | } | ||
201 | |||
202 | fn intern_program_clauses( | ||
203 | &self, | ||
204 | data: impl IntoIterator<Item = chalk_ir::ProgramClause<Self>>, | ||
205 | ) -> Vec<chalk_ir::ProgramClause<Self>> { | ||
206 | data.into_iter().collect() | ||
207 | } | ||
208 | |||
209 | fn program_clauses_data<'a>( | ||
210 | &self, | ||
211 | clauses: &'a Vec<chalk_ir::ProgramClause<Self>>, | ||
212 | ) -> &'a [chalk_ir::ProgramClause<Self>] { | ||
213 | clauses | ||
214 | } | ||
215 | |||
216 | fn intern_quantified_where_clauses( | ||
217 | &self, | ||
218 | data: impl IntoIterator<Item = chalk_ir::QuantifiedWhereClause<Self>>, | ||
219 | ) -> Self::InternedQuantifiedWhereClauses { | ||
220 | data.into_iter().collect() | ||
221 | } | ||
222 | |||
223 | fn quantified_where_clauses_data<'a>( | ||
224 | &self, | ||
225 | clauses: &'a Self::InternedQuantifiedWhereClauses, | ||
226 | ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] { | ||
227 | clauses | ||
228 | } | ||
184 | } | 229 | } |
185 | 230 | ||
186 | impl chalk_ir::interner::HasInterner for Interner { | 231 | impl chalk_ir::interner::HasInterner for Interner { |
@@ -238,12 +283,10 @@ impl ToChalk for Ty { | |||
238 | Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx).intern(&Interner), | 283 | Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx).intern(&Interner), |
239 | Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"), | 284 | Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"), |
240 | Ty::Dyn(predicates) => { | 285 | Ty::Dyn(predicates) => { |
241 | let where_clauses = predicates | 286 | let where_clauses = chalk_ir::QuantifiedWhereClauses::from( |
242 | .iter() | 287 | &Interner, |
243 | .filter(|p| !p.is_error()) | 288 | predicates.iter().filter(|p| !p.is_error()).cloned().map(|p| p.to_chalk(db)), |
244 | .cloned() | 289 | ); |
245 | .map(|p| p.to_chalk(db)) | ||
246 | .collect(); | ||
247 | let bounded_ty = chalk_ir::DynTy { bounds: make_binders(where_clauses, 1) }; | 290 | let bounded_ty = chalk_ir::DynTy { bounds: make_binders(where_clauses, 1) }; |
248 | chalk_ir::TyData::Dyn(bounded_ty).intern(&Interner) | 291 | chalk_ir::TyData::Dyn(bounded_ty).intern(&Interner) |
249 | } | 292 | } |
@@ -281,8 +324,12 @@ impl ToChalk for Ty { | |||
281 | chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, | 324 | chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, |
282 | chalk_ir::TyData::Dyn(where_clauses) => { | 325 | chalk_ir::TyData::Dyn(where_clauses) => { |
283 | assert_eq!(where_clauses.bounds.binders.len(), 1); | 326 | assert_eq!(where_clauses.bounds.binders.len(), 1); |
284 | let predicates = | 327 | let predicates = where_clauses |
285 | where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect(); | 328 | .bounds |
329 | .skip_binders() | ||
330 | .iter(&Interner) | ||
331 | .map(|c| from_chalk(db, c.clone())) | ||
332 | .collect(); | ||
286 | Ty::Dyn(predicates) | 333 | Ty::Dyn(predicates) |
287 | } | 334 | } |
288 | } | 335 | } |
@@ -426,7 +473,7 @@ impl ToChalk for GenericPredicate { | |||
426 | ) -> GenericPredicate { | 473 | ) -> GenericPredicate { |
427 | // we don't produce any where clauses with binders and can't currently deal with them | 474 | // we don't produce any where clauses with binders and can't currently deal with them |
428 | match where_clause | 475 | match where_clause |
429 | .value | 476 | .skip_binders() |
430 | .shifted_out(&Interner) | 477 | .shifted_out(&Interner) |
431 | .expect("unexpected bound vars in where clause") | 478 | .expect("unexpected bound vars in where clause") |
432 | { | 479 | { |
@@ -464,13 +511,13 @@ impl ToChalk for ProjectionTy { | |||
464 | } | 511 | } |
465 | 512 | ||
466 | impl ToChalk for super::ProjectionPredicate { | 513 | impl ToChalk for super::ProjectionPredicate { |
467 | type Chalk = chalk_ir::Normalize<Interner>; | 514 | type Chalk = chalk_ir::AliasEq<Interner>; |
468 | 515 | ||
469 | fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Normalize<Interner> { | 516 | fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq<Interner> { |
470 | chalk_ir::Normalize { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) } | 517 | chalk_ir::AliasEq { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) } |
471 | } | 518 | } |
472 | 519 | ||
473 | fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::Normalize<Interner>) -> Self { | 520 | fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq<Interner>) -> Self { |
474 | unimplemented!() | 521 | unimplemented!() |
475 | } | 522 | } |
476 | } | 523 | } |
@@ -521,7 +568,7 @@ impl ToChalk for Arc<super::TraitEnvironment> { | |||
521 | pred.clone().to_chalk(db).cast(&Interner); | 568 | pred.clone().to_chalk(db).cast(&Interner); |
522 | clauses.push(program_clause.into_from_env_clause(&Interner)); | 569 | clauses.push(program_clause.into_from_env_clause(&Interner)); |
523 | } | 570 | } |
524 | chalk_ir::Environment::new().add_clauses(clauses) | 571 | chalk_ir::Environment::new(&Interner).add_clauses(&Interner, clauses) |
525 | } | 572 | } |
526 | 573 | ||
527 | fn from_chalk( | 574 | fn from_chalk( |
@@ -603,10 +650,10 @@ impl ToChalk for builtin::BuiltinImplAssocTyValueData { | |||
603 | } | 650 | } |
604 | 651 | ||
605 | fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { | 652 | fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { |
606 | chalk_ir::Binders { | 653 | chalk_ir::Binders::new( |
654 | std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars).collect(), | ||
607 | value, | 655 | value, |
608 | binders: std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars).collect(), | 656 | ) |
609 | } | ||
610 | } | 657 | } |
611 | 658 | ||
612 | fn convert_where_clauses( | 659 | fn convert_where_clauses( |
@@ -626,6 +673,55 @@ fn convert_where_clauses( | |||
626 | result | 673 | result |
627 | } | 674 | } |
628 | 675 | ||
676 | fn generic_predicate_to_inline_bound( | ||
677 | db: &dyn HirDatabase, | ||
678 | pred: &GenericPredicate, | ||
679 | self_ty: &Ty, | ||
680 | ) -> Option<chalk_rust_ir::InlineBound<Interner>> { | ||
681 | // An InlineBound is like a GenericPredicate, except the self type is left out. | ||
682 | // We don't have a special type for this, but Chalk does. | ||
683 | match pred { | ||
684 | GenericPredicate::Implemented(trait_ref) => { | ||
685 | if &trait_ref.substs[0] != self_ty { | ||
686 | // we can only convert predicates back to type bounds if they | ||
687 | // have the expected self type | ||
688 | return None; | ||
689 | } | ||
690 | let args_no_self = trait_ref.substs[1..] | ||
691 | .iter() | ||
692 | .map(|ty| ty.clone().to_chalk(db).cast(&Interner)) | ||
693 | .collect(); | ||
694 | let trait_bound = | ||
695 | chalk_rust_ir::TraitBound { trait_id: trait_ref.trait_.to_chalk(db), args_no_self }; | ||
696 | Some(chalk_rust_ir::InlineBound::TraitBound(trait_bound)) | ||
697 | } | ||
698 | GenericPredicate::Projection(proj) => { | ||
699 | if &proj.projection_ty.parameters[0] != self_ty { | ||
700 | return None; | ||
701 | } | ||
702 | let trait_ = match proj.projection_ty.associated_ty.lookup(db.upcast()).container { | ||
703 | AssocContainerId::TraitId(t) => t, | ||
704 | _ => panic!("associated type not in trait"), | ||
705 | }; | ||
706 | let args_no_self = proj.projection_ty.parameters[1..] | ||
707 | .iter() | ||
708 | .map(|ty| ty.clone().to_chalk(db).cast(&Interner)) | ||
709 | .collect(); | ||
710 | let alias_eq_bound = chalk_rust_ir::AliasEqBound { | ||
711 | value: proj.ty.clone().to_chalk(db), | ||
712 | trait_bound: chalk_rust_ir::TraitBound { | ||
713 | trait_id: trait_.to_chalk(db), | ||
714 | args_no_self, | ||
715 | }, | ||
716 | associated_ty_id: proj.projection_ty.associated_ty.to_chalk(db), | ||
717 | parameters: Vec::new(), // FIXME we don't support generic associated types yet | ||
718 | }; | ||
719 | Some(chalk_rust_ir::InlineBound::AliasEqBound(alias_eq_bound)) | ||
720 | } | ||
721 | GenericPredicate::Error => None, | ||
722 | } | ||
723 | } | ||
724 | |||
629 | impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { | 725 | impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { |
630 | fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> { | 726 | fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> { |
631 | self.db.associated_ty_data(id) | 727 | self.db.associated_ty_data(id) |
@@ -696,6 +792,13 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { | |||
696 | fn interner(&self) -> &Interner { | 792 | fn interner(&self) -> &Interner { |
697 | &Interner | 793 | &Interner |
698 | } | 794 | } |
795 | fn well_known_trait_id( | ||
796 | &self, | ||
797 | _well_known_trait: chalk_rust_ir::WellKnownTrait, | ||
798 | ) -> Option<chalk_ir::TraitId<Interner>> { | ||
799 | // FIXME tell Chalk about well-known traits (here and in trait_datum) | ||
800 | None | ||
801 | } | ||
699 | } | 802 | } |
700 | 803 | ||
701 | pub(crate) fn associated_ty_data_query( | 804 | pub(crate) fn associated_ty_data_query( |
@@ -708,12 +811,25 @@ pub(crate) fn associated_ty_data_query( | |||
708 | AssocContainerId::TraitId(t) => t, | 811 | AssocContainerId::TraitId(t) => t, |
709 | _ => panic!("associated type not in trait"), | 812 | _ => panic!("associated type not in trait"), |
710 | }; | 813 | }; |
814 | |||
815 | // Lower bounds -- we could/should maybe move this to a separate query in `lower` | ||
816 | let type_alias_data = db.type_alias_data(type_alias); | ||
711 | let generic_params = generics(db.upcast(), type_alias.into()); | 817 | let generic_params = generics(db.upcast(), type_alias.into()); |
712 | let bound_data = chalk_rust_ir::AssociatedTyDatumBound { | 818 | let bound_vars = Substs::bound_vars(&generic_params); |
713 | // FIXME add bounds and where clauses | 819 | let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast()); |
714 | bounds: vec![], | 820 | let ctx = crate::TyLoweringContext::new(db, &resolver) |
715 | where_clauses: vec![], | 821 | .with_type_param_mode(crate::lower::TypeParamLoweringMode::Variable); |
716 | }; | 822 | let self_ty = Ty::Bound(crate::BoundVar::new(crate::DebruijnIndex::INNERMOST, 0)); |
823 | let bounds = type_alias_data | ||
824 | .bounds | ||
825 | .iter() | ||
826 | .flat_map(|bound| GenericPredicate::from_type_bound(&ctx, bound, self_ty.clone())) | ||
827 | .filter_map(|pred| generic_predicate_to_inline_bound(db, &pred, &self_ty)) | ||
828 | .map(|bound| make_binders(bound.shifted_in(&Interner), 0)) | ||
829 | .collect(); | ||
830 | |||
831 | let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars); | ||
832 | let bound_data = chalk_rust_ir::AssociatedTyDatumBound { bounds, where_clauses }; | ||
717 | let datum = AssociatedTyDatum { | 833 | let datum = AssociatedTyDatum { |
718 | trait_id: trait_.to_chalk(db), | 834 | trait_id: trait_.to_chalk(db), |
719 | id, | 835 | id, |
diff --git a/crates/ra_hir_ty/src/traits/chalk/tls.rs b/crates/ra_hir_ty/src/traits/chalk/tls.rs index d9bbb54a5..fa8e4d1ad 100644 --- a/crates/ra_hir_ty/src/traits/chalk/tls.rs +++ b/crates/ra_hir_ty/src/traits/chalk/tls.rs | |||
@@ -2,10 +2,11 @@ | |||
2 | use std::fmt; | 2 | use std::fmt; |
3 | 3 | ||
4 | use chalk_ir::{AliasTy, Goal, Goals, Lifetime, Parameter, ProgramClauseImplication, TypeName}; | 4 | use chalk_ir::{AliasTy, Goal, Goals, Lifetime, Parameter, ProgramClauseImplication, TypeName}; |
5 | use itertools::Itertools; | ||
5 | 6 | ||
6 | use super::{from_chalk, Interner}; | 7 | use super::{from_chalk, Interner}; |
7 | use crate::{db::HirDatabase, CallableDef, TypeCtor}; | 8 | use crate::{db::HirDatabase, CallableDef, TypeCtor}; |
8 | use hir_def::{AdtId, AssocContainerId, Lookup, TypeAliasId}; | 9 | use hir_def::{AdtId, AssocContainerId, DefWithBodyId, Lookup, TypeAliasId}; |
9 | 10 | ||
10 | pub use unsafe_tls::{set_current_program, with_current_program}; | 11 | pub use unsafe_tls::{set_current_program, with_current_program}; |
11 | 12 | ||
@@ -69,7 +70,27 @@ impl DebugContext<'_> { | |||
69 | write!(f, "{}::{}", trait_name, name)?; | 70 | write!(f, "{}::{}", trait_name, name)?; |
70 | } | 71 | } |
71 | TypeCtor::Closure { def, expr } => { | 72 | TypeCtor::Closure { def, expr } => { |
72 | write!(f, "{{closure {:?} in {:?}}}", expr.into_raw(), def)?; | 73 | write!(f, "{{closure {:?} in ", expr.into_raw())?; |
74 | match def { | ||
75 | DefWithBodyId::FunctionId(func) => { | ||
76 | write!(f, "fn {}", self.0.function_data(func).name)? | ||
77 | } | ||
78 | DefWithBodyId::StaticId(s) => { | ||
79 | if let Some(name) = self.0.static_data(s).name.as_ref() { | ||
80 | write!(f, "body of static {}", name)?; | ||
81 | } else { | ||
82 | write!(f, "body of unnamed static {:?}", s)?; | ||
83 | } | ||
84 | } | ||
85 | DefWithBodyId::ConstId(c) => { | ||
86 | if let Some(name) = self.0.const_data(c).name.as_ref() { | ||
87 | write!(f, "body of const {}", name)?; | ||
88 | } else { | ||
89 | write!(f, "body of unnamed const {:?}", c)?; | ||
90 | } | ||
91 | } | ||
92 | }; | ||
93 | write!(f, "}}")?; | ||
73 | } | 94 | } |
74 | } | 95 | } |
75 | Ok(()) | 96 | Ok(()) |
@@ -113,14 +134,15 @@ impl DebugContext<'_> { | |||
113 | }; | 134 | }; |
114 | let trait_data = self.0.trait_data(trait_); | 135 | let trait_data = self.0.trait_data(trait_); |
115 | let params = alias.substitution.parameters(&Interner); | 136 | let params = alias.substitution.parameters(&Interner); |
116 | write!( | 137 | write!(fmt, "<{:?} as {}", ¶ms[0], trait_data.name,)?; |
117 | fmt, | 138 | if params.len() > 1 { |
118 | "<{:?} as {}<{:?}>>::{}", | 139 | write!( |
119 | ¶ms[0], | 140 | fmt, |
120 | trait_data.name, | 141 | "<{}>", |
121 | ¶ms[1..], | 142 | ¶ms[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))), |
122 | type_alias_data.name | 143 | )?; |
123 | ) | 144 | } |
145 | write!(fmt, ">::{}", type_alias_data.name) | ||
124 | } | 146 | } |
125 | 147 | ||
126 | pub fn debug_ty( | 148 | pub fn debug_ty( |
diff --git a/crates/ra_ide/Cargo.toml b/crates/ra_ide/Cargo.toml index b4a29b81b..05c940605 100644 --- a/crates/ra_ide/Cargo.toml +++ b/crates/ra_ide/Cargo.toml | |||
@@ -35,4 +35,4 @@ ra_assists = { path = "../ra_assists" } | |||
35 | hir = { path = "../ra_hir", package = "ra_hir" } | 35 | hir = { path = "../ra_hir", package = "ra_hir" } |
36 | 36 | ||
37 | [dev-dependencies] | 37 | [dev-dependencies] |
38 | insta = "0.15.0" | 38 | insta = "0.16.0" |
diff --git a/crates/ra_ide/src/display/navigation_target.rs b/crates/ra_ide/src/display/navigation_target.rs index e61846995..6289f53f3 100644 --- a/crates/ra_ide/src/display/navigation_target.rs +++ b/crates/ra_ide/src/display/navigation_target.rs | |||
@@ -175,7 +175,7 @@ impl ToNav for FileSymbol { | |||
175 | NavigationTarget { | 175 | NavigationTarget { |
176 | file_id: self.file_id, | 176 | file_id: self.file_id, |
177 | name: self.name.clone(), | 177 | name: self.name.clone(), |
178 | kind: self.ptr.kind(), | 178 | kind: self.kind, |
179 | full_range: self.ptr.range(), | 179 | full_range: self.ptr.range(), |
180 | focus_range: self.name_range, | 180 | focus_range: self.name_range, |
181 | container_name: self.container_name.clone(), | 181 | container_name: self.container_name.clone(), |
diff --git a/crates/ra_ide_db/src/symbol_index.rs b/crates/ra_ide_db/src/symbol_index.rs index d30458d86..937abb433 100644 --- a/crates/ra_ide_db/src/symbol_index.rs +++ b/crates/ra_ide_db/src/symbol_index.rs | |||
@@ -285,7 +285,7 @@ impl Query { | |||
285 | let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value); | 285 | let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value); |
286 | 286 | ||
287 | for symbol in &symbol_index.symbols[start..end] { | 287 | for symbol in &symbol_index.symbols[start..end] { |
288 | if self.only_types && !is_type(symbol.ptr.kind()) { | 288 | if self.only_types && !is_type(symbol.kind) { |
289 | continue; | 289 | continue; |
290 | } | 290 | } |
291 | if self.exact && symbol.name != self.query { | 291 | if self.exact && symbol.name != self.query { |
@@ -312,6 +312,7 @@ fn is_type(kind: SyntaxKind) -> bool { | |||
312 | pub struct FileSymbol { | 312 | pub struct FileSymbol { |
313 | pub file_id: FileId, | 313 | pub file_id: FileId, |
314 | pub name: SmolStr, | 314 | pub name: SmolStr, |
315 | pub kind: SyntaxKind, | ||
315 | pub ptr: SyntaxNodePtr, | 316 | pub ptr: SyntaxNodePtr, |
316 | pub name_range: Option<TextRange>, | 317 | pub name_range: Option<TextRange>, |
317 | pub container_name: Option<SmolStr>, | 318 | pub container_name: Option<SmolStr>, |
@@ -377,6 +378,7 @@ fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | |||
377 | fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option<FileSymbol> { | 378 | fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option<FileSymbol> { |
378 | to_symbol(node).map(move |(name, ptr, name_range)| FileSymbol { | 379 | to_symbol(node).map(move |(name, ptr, name_range)| FileSymbol { |
379 | name, | 380 | name, |
381 | kind: node.kind(), | ||
380 | ptr, | 382 | ptr, |
381 | file_id, | 383 | file_id, |
382 | name_range: Some(name_range), | 384 | name_range: Some(name_range), |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 9fb5cb058..31e9b22e7 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -607,12 +607,13 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
607 | let text: SmolStr = match self.cursor.token_tree() { | 607 | let text: SmolStr = match self.cursor.token_tree() { |
608 | Some(tt::TokenTree::Leaf(leaf)) => { | 608 | Some(tt::TokenTree::Leaf(leaf)) => { |
609 | // Mark the range if needed | 609 | // Mark the range if needed |
610 | let id = match leaf { | 610 | let (text, id) = match leaf { |
611 | tt::Leaf::Ident(ident) => ident.id, | 611 | tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id), |
612 | tt::Leaf::Punct(punct) => punct.id, | 612 | tt::Leaf::Punct(punct) => { |
613 | tt::Leaf::Literal(lit) => lit.id, | 613 | (SmolStr::new_inline_from_ascii(1, &[punct.char as u8]), punct.id) |
614 | } | ||
615 | tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), | ||
614 | }; | 616 | }; |
615 | let text = SmolStr::new(format!("{}", leaf)); | ||
616 | let range = TextRange::offset_len(self.text_pos, TextUnit::of_str(&text)); | 617 | let range = TextRange::offset_len(self.text_pos, TextUnit::of_str(&text)); |
617 | self.token_map.insert(id, range); | 618 | self.token_map.insert(id, range); |
618 | self.cursor = self.cursor.bump(); | 619 | self.cursor = self.cursor.bump(); |
diff --git a/crates/ra_proc_macro/src/lib.rs b/crates/ra_proc_macro/src/lib.rs index 63da9f1b4..b200fd126 100644 --- a/crates/ra_proc_macro/src/lib.rs +++ b/crates/ra_proc_macro/src/lib.rs | |||
@@ -12,6 +12,7 @@ pub mod msg; | |||
12 | use process::{ProcMacroProcessSrv, ProcMacroProcessThread}; | 12 | use process::{ProcMacroProcessSrv, ProcMacroProcessThread}; |
13 | use ra_tt::{SmolStr, Subtree}; | 13 | use ra_tt::{SmolStr, Subtree}; |
14 | use std::{ | 14 | use std::{ |
15 | ffi::OsStr, | ||
15 | path::{Path, PathBuf}, | 16 | path::{Path, PathBuf}, |
16 | sync::Arc, | 17 | sync::Arc, |
17 | }; | 18 | }; |
@@ -56,8 +57,15 @@ pub struct ProcMacroClient { | |||
56 | } | 57 | } |
57 | 58 | ||
58 | impl ProcMacroClient { | 59 | impl ProcMacroClient { |
59 | pub fn extern_process(process_path: &Path) -> Result<ProcMacroClient, std::io::Error> { | 60 | pub fn extern_process<I, S>( |
60 | let (thread, process) = ProcMacroProcessSrv::run(process_path)?; | 61 | process_path: &Path, |
62 | args: I, | ||
63 | ) -> Result<ProcMacroClient, std::io::Error> | ||
64 | where | ||
65 | I: IntoIterator<Item = S>, | ||
66 | S: AsRef<OsStr>, | ||
67 | { | ||
68 | let (thread, process) = ProcMacroProcessSrv::run(process_path, args)?; | ||
61 | Ok(ProcMacroClient { | 69 | Ok(ProcMacroClient { |
62 | kind: ProcMacroClientKind::Process { process: Arc::new(process), thread }, | 70 | kind: ProcMacroClientKind::Process { process: Arc::new(process), thread }, |
63 | }) | 71 | }) |
diff --git a/crates/ra_proc_macro/src/process.rs b/crates/ra_proc_macro/src/process.rs index e8c85be38..f851570bc 100644 --- a/crates/ra_proc_macro/src/process.rs +++ b/crates/ra_proc_macro/src/process.rs | |||
@@ -9,6 +9,7 @@ use crate::rpc::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTas | |||
9 | use io::{BufRead, BufReader}; | 9 | use io::{BufRead, BufReader}; |
10 | use std::{ | 10 | use std::{ |
11 | convert::{TryFrom, TryInto}, | 11 | convert::{TryFrom, TryInto}, |
12 | ffi::OsStr, | ||
12 | io::{self, Write}, | 13 | io::{self, Write}, |
13 | path::{Path, PathBuf}, | 14 | path::{Path, PathBuf}, |
14 | process::{Child, Command, Stdio}, | 15 | process::{Child, Command, Stdio}, |
@@ -44,8 +45,13 @@ impl Drop for Process { | |||
44 | } | 45 | } |
45 | 46 | ||
46 | impl Process { | 47 | impl Process { |
47 | fn run(process_path: &Path) -> Result<Process, io::Error> { | 48 | fn run<I, S>(process_path: &Path, args: I) -> Result<Process, io::Error> |
49 | where | ||
50 | I: IntoIterator<Item = S>, | ||
51 | S: AsRef<OsStr>, | ||
52 | { | ||
48 | let child = Command::new(process_path.clone()) | 53 | let child = Command::new(process_path.clone()) |
54 | .args(args) | ||
49 | .stdin(Stdio::piped()) | 55 | .stdin(Stdio::piped()) |
50 | .stdout(Stdio::piped()) | 56 | .stdout(Stdio::piped()) |
51 | .stderr(Stdio::null()) | 57 | .stderr(Stdio::null()) |
@@ -74,10 +80,15 @@ impl Process { | |||
74 | } | 80 | } |
75 | 81 | ||
76 | impl ProcMacroProcessSrv { | 82 | impl ProcMacroProcessSrv { |
77 | pub fn run( | 83 | pub fn run<I, S>( |
78 | process_path: &Path, | 84 | process_path: &Path, |
79 | ) -> Result<(ProcMacroProcessThread, ProcMacroProcessSrv), io::Error> { | 85 | args: I, |
80 | let process = Process::run(process_path)?; | 86 | ) -> Result<(ProcMacroProcessThread, ProcMacroProcessSrv), io::Error> |
87 | where | ||
88 | I: IntoIterator<Item = S>, | ||
89 | S: AsRef<OsStr>, | ||
90 | { | ||
91 | let process = Process::run(process_path, args)?; | ||
81 | 92 | ||
82 | let (task_tx, task_rx) = bounded(0); | 93 | let (task_tx, task_rx) = bounded(0); |
83 | let handle = jod_thread::spawn(move || { | 94 | let handle = jod_thread::spawn(move || { |
diff --git a/crates/ra_proc_macro_srv/src/main.rs b/crates/ra_proc_macro_srv/src/cli.rs index 70743c1f4..c771f2b38 100644 --- a/crates/ra_proc_macro_srv/src/main.rs +++ b/crates/ra_proc_macro_srv/src/cli.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! Driver for proc macro server | 1 | //! Driver for proc macro server |
2 | 2 | ||
3 | use crate::{expand_task, list_macros}; | ||
3 | use ra_proc_macro::msg::{self, Message}; | 4 | use ra_proc_macro::msg::{self, Message}; |
4 | use ra_proc_macro_srv::{expand_task, list_macros}; | ||
5 | 5 | ||
6 | use std::io; | 6 | use std::io; |
7 | 7 | ||
@@ -24,7 +24,8 @@ fn write_response(res: Result<msg::Response, String>) -> Result<(), io::Error> { | |||
24 | let mut stdout = stdout.lock(); | 24 | let mut stdout = stdout.lock(); |
25 | msg.write(&mut stdout) | 25 | msg.write(&mut stdout) |
26 | } | 26 | } |
27 | fn main() { | 27 | |
28 | pub fn run() { | ||
28 | loop { | 29 | loop { |
29 | let req = match read_request() { | 30 | let req = match read_request() { |
30 | Err(err) => { | 31 | Err(err) => { |
diff --git a/crates/ra_proc_macro_srv/src/lib.rs b/crates/ra_proc_macro_srv/src/lib.rs index 59716cbb3..c62b0ed89 100644 --- a/crates/ra_proc_macro_srv/src/lib.rs +++ b/crates/ra_proc_macro_srv/src/lib.rs | |||
@@ -22,7 +22,7 @@ mod dylib; | |||
22 | use proc_macro::bridge::client::TokenStream; | 22 | use proc_macro::bridge::client::TokenStream; |
23 | use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; | 23 | use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; |
24 | 24 | ||
25 | pub fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> { | 25 | pub(crate) fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> { |
26 | let expander = dylib::Expander::new(&task.lib) | 26 | let expander = dylib::Expander::new(&task.lib) |
27 | .expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib)); | 27 | .expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib)); |
28 | 28 | ||
@@ -39,7 +39,7 @@ pub fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> { | |||
39 | } | 39 | } |
40 | } | 40 | } |
41 | 41 | ||
42 | pub fn list_macros(task: &ListMacrosTask) -> Result<ListMacrosResult, String> { | 42 | pub(crate) fn list_macros(task: &ListMacrosTask) -> Result<ListMacrosResult, String> { |
43 | let expander = dylib::Expander::new(&task.lib) | 43 | let expander = dylib::Expander::new(&task.lib) |
44 | .expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib)); | 44 | .expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib)); |
45 | 45 | ||
@@ -53,5 +53,7 @@ pub fn list_macros(task: &ListMacrosTask) -> Result<ListMacrosResult, String> { | |||
53 | } | 53 | } |
54 | } | 54 | } |
55 | 55 | ||
56 | pub mod cli; | ||
57 | |||
56 | #[cfg(test)] | 58 | #[cfg(test)] |
57 | mod tests; | 59 | mod tests; |
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs index 0ab64a1e0..03f2629da 100644 --- a/crates/ra_project_model/src/lib.rs +++ b/crates/ra_project_model/src/lib.rs | |||
@@ -5,9 +5,8 @@ mod json_project; | |||
5 | mod sysroot; | 5 | mod sysroot; |
6 | 6 | ||
7 | use std::{ | 7 | use std::{ |
8 | error::Error, | ||
9 | fs::{read_dir, File, ReadDir}, | 8 | fs::{read_dir, File, ReadDir}, |
10 | io::BufReader, | 9 | io::{self, BufReader}, |
11 | path::{Path, PathBuf}, | 10 | path::{Path, PathBuf}, |
12 | process::Command, | 11 | process::Command, |
13 | }; | 12 | }; |
@@ -25,25 +24,6 @@ pub use crate::{ | |||
25 | }; | 24 | }; |
26 | pub use ra_proc_macro::ProcMacroClient; | 25 | pub use ra_proc_macro::ProcMacroClient; |
27 | 26 | ||
28 | #[derive(Clone, PartialEq, Eq, Hash, Debug)] | ||
29 | pub struct CargoTomlNotFoundError { | ||
30 | pub searched_at: PathBuf, | ||
31 | pub reason: String, | ||
32 | } | ||
33 | |||
34 | impl std::fmt::Display for CargoTomlNotFoundError { | ||
35 | fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
36 | write!( | ||
37 | fmt, | ||
38 | "can't find Cargo.toml at {}, due to {}", | ||
39 | self.searched_at.display(), | ||
40 | self.reason | ||
41 | ) | ||
42 | } | ||
43 | } | ||
44 | |||
45 | impl Error for CargoTomlNotFoundError {} | ||
46 | |||
47 | #[derive(Debug, Clone)] | 27 | #[derive(Debug, Clone)] |
48 | pub enum ProjectWorkspace { | 28 | pub enum ProjectWorkspace { |
49 | /// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`. | 29 | /// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`. |
@@ -77,31 +57,119 @@ impl PackageRoot { | |||
77 | } | 57 | } |
78 | } | 58 | } |
79 | 59 | ||
80 | impl ProjectWorkspace { | 60 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
81 | pub fn discover(path: &Path, cargo_features: &CargoConfig) -> Result<ProjectWorkspace> { | 61 | pub enum ProjectRoot { |
82 | ProjectWorkspace::discover_with_sysroot(path, true, cargo_features) | 62 | ProjectJson(PathBuf), |
63 | CargoToml(PathBuf), | ||
64 | } | ||
65 | |||
66 | impl ProjectRoot { | ||
67 | pub fn from_manifest_file(path: PathBuf) -> Result<ProjectRoot> { | ||
68 | if path.ends_with("rust-project.json") { | ||
69 | return Ok(ProjectRoot::ProjectJson(path)); | ||
70 | } | ||
71 | if path.ends_with("Cargo.toml") { | ||
72 | return Ok(ProjectRoot::CargoToml(path)); | ||
73 | } | ||
74 | bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display()) | ||
83 | } | 75 | } |
84 | 76 | ||
85 | pub fn discover_with_sysroot( | 77 | pub fn discover_single(path: &Path) -> Result<ProjectRoot> { |
86 | path: &Path, | 78 | let mut candidates = ProjectRoot::discover(path)?; |
87 | with_sysroot: bool, | 79 | let res = match candidates.pop() { |
80 | None => bail!("no projects"), | ||
81 | Some(it) => it, | ||
82 | }; | ||
83 | |||
84 | if !candidates.is_empty() { | ||
85 | bail!("more than one project") | ||
86 | } | ||
87 | Ok(res) | ||
88 | } | ||
89 | |||
90 | pub fn discover(path: &Path) -> io::Result<Vec<ProjectRoot>> { | ||
91 | if let Some(project_json) = find_rust_project_json(path) { | ||
92 | return Ok(vec![ProjectRoot::ProjectJson(project_json)]); | ||
93 | } | ||
94 | return find_cargo_toml(path) | ||
95 | .map(|paths| paths.into_iter().map(ProjectRoot::CargoToml).collect()); | ||
96 | |||
97 | fn find_rust_project_json(path: &Path) -> Option<PathBuf> { | ||
98 | if path.ends_with("rust-project.json") { | ||
99 | return Some(path.to_path_buf()); | ||
100 | } | ||
101 | |||
102 | let mut curr = Some(path); | ||
103 | while let Some(path) = curr { | ||
104 | let candidate = path.join("rust-project.json"); | ||
105 | if candidate.exists() { | ||
106 | return Some(candidate); | ||
107 | } | ||
108 | curr = path.parent(); | ||
109 | } | ||
110 | |||
111 | None | ||
112 | } | ||
113 | |||
114 | fn find_cargo_toml(path: &Path) -> io::Result<Vec<PathBuf>> { | ||
115 | if path.ends_with("Cargo.toml") { | ||
116 | return Ok(vec![path.to_path_buf()]); | ||
117 | } | ||
118 | |||
119 | if let Some(p) = find_cargo_toml_in_parent_dir(path) { | ||
120 | return Ok(vec![p]); | ||
121 | } | ||
122 | |||
123 | let entities = read_dir(path)?; | ||
124 | Ok(find_cargo_toml_in_child_dir(entities)) | ||
125 | } | ||
126 | |||
127 | fn find_cargo_toml_in_parent_dir(path: &Path) -> Option<PathBuf> { | ||
128 | let mut curr = Some(path); | ||
129 | while let Some(path) = curr { | ||
130 | let candidate = path.join("Cargo.toml"); | ||
131 | if candidate.exists() { | ||
132 | return Some(candidate); | ||
133 | } | ||
134 | curr = path.parent(); | ||
135 | } | ||
136 | |||
137 | None | ||
138 | } | ||
139 | |||
140 | fn find_cargo_toml_in_child_dir(entities: ReadDir) -> Vec<PathBuf> { | ||
141 | // Only one level down to avoid cycles the easy way and stop a runaway scan with large projects | ||
142 | let mut valid_canditates = vec![]; | ||
143 | for entity in entities.filter_map(Result::ok) { | ||
144 | let candidate = entity.path().join("Cargo.toml"); | ||
145 | if candidate.exists() { | ||
146 | valid_canditates.push(candidate) | ||
147 | } | ||
148 | } | ||
149 | valid_canditates | ||
150 | } | ||
151 | } | ||
152 | } | ||
153 | |||
154 | impl ProjectWorkspace { | ||
155 | pub fn load( | ||
156 | root: ProjectRoot, | ||
88 | cargo_features: &CargoConfig, | 157 | cargo_features: &CargoConfig, |
158 | with_sysroot: bool, | ||
89 | ) -> Result<ProjectWorkspace> { | 159 | ) -> Result<ProjectWorkspace> { |
90 | match find_rust_project_json(path) { | 160 | let res = match root { |
91 | Some(json_path) => { | 161 | ProjectRoot::ProjectJson(project_json) => { |
92 | let file = File::open(&json_path) | 162 | let file = File::open(&project_json).with_context(|| { |
93 | .with_context(|| format!("Failed to open json file {}", json_path.display()))?; | 163 | format!("Failed to open json file {}", project_json.display()) |
164 | })?; | ||
94 | let reader = BufReader::new(file); | 165 | let reader = BufReader::new(file); |
95 | Ok(ProjectWorkspace::Json { | 166 | ProjectWorkspace::Json { |
96 | project: from_reader(reader).with_context(|| { | 167 | project: from_reader(reader).with_context(|| { |
97 | format!("Failed to deserialize json file {}", json_path.display()) | 168 | format!("Failed to deserialize json file {}", project_json.display()) |
98 | })?, | 169 | })?, |
99 | }) | 170 | } |
100 | } | 171 | } |
101 | None => { | 172 | ProjectRoot::CargoToml(cargo_toml) => { |
102 | let cargo_toml = find_cargo_toml(path).with_context(|| { | ||
103 | format!("Failed to find Cargo.toml for path {}", path.display()) | ||
104 | })?; | ||
105 | let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, cargo_features) | 173 | let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, cargo_features) |
106 | .with_context(|| { | 174 | .with_context(|| { |
107 | format!( | 175 | format!( |
@@ -119,9 +187,11 @@ impl ProjectWorkspace { | |||
119 | } else { | 187 | } else { |
120 | Sysroot::default() | 188 | Sysroot::default() |
121 | }; | 189 | }; |
122 | Ok(ProjectWorkspace::Cargo { cargo, sysroot }) | 190 | ProjectWorkspace::Cargo { cargo, sysroot } |
123 | } | 191 | } |
124 | } | 192 | }; |
193 | |||
194 | Ok(res) | ||
125 | } | 195 | } |
126 | 196 | ||
127 | /// Returns the roots for the current `ProjectWorkspace` | 197 | /// Returns the roots for the current `ProjectWorkspace` |
@@ -469,87 +539,6 @@ impl ProjectWorkspace { | |||
469 | } | 539 | } |
470 | } | 540 | } |
471 | 541 | ||
472 | fn find_rust_project_json(path: &Path) -> Option<PathBuf> { | ||
473 | if path.ends_with("rust-project.json") { | ||
474 | return Some(path.to_path_buf()); | ||
475 | } | ||
476 | |||
477 | let mut curr = Some(path); | ||
478 | while let Some(path) = curr { | ||
479 | let candidate = path.join("rust-project.json"); | ||
480 | if candidate.exists() { | ||
481 | return Some(candidate); | ||
482 | } | ||
483 | curr = path.parent(); | ||
484 | } | ||
485 | |||
486 | None | ||
487 | } | ||
488 | |||
489 | fn find_cargo_toml_in_parent_dir(path: &Path) -> Option<PathBuf> { | ||
490 | let mut curr = Some(path); | ||
491 | while let Some(path) = curr { | ||
492 | let candidate = path.join("Cargo.toml"); | ||
493 | if candidate.exists() { | ||
494 | return Some(candidate); | ||
495 | } | ||
496 | curr = path.parent(); | ||
497 | } | ||
498 | |||
499 | None | ||
500 | } | ||
501 | |||
502 | fn find_cargo_toml_in_child_dir(entities: ReadDir) -> Vec<PathBuf> { | ||
503 | // Only one level down to avoid cycles the easy way and stop a runaway scan with large projects | ||
504 | let mut valid_canditates = vec![]; | ||
505 | for entity in entities.filter_map(Result::ok) { | ||
506 | let candidate = entity.path().join("Cargo.toml"); | ||
507 | if candidate.exists() { | ||
508 | valid_canditates.push(candidate) | ||
509 | } | ||
510 | } | ||
511 | valid_canditates | ||
512 | } | ||
513 | |||
514 | fn find_cargo_toml(path: &Path) -> Result<PathBuf> { | ||
515 | if path.ends_with("Cargo.toml") { | ||
516 | return Ok(path.to_path_buf()); | ||
517 | } | ||
518 | |||
519 | if let Some(p) = find_cargo_toml_in_parent_dir(path) { | ||
520 | return Ok(p); | ||
521 | } | ||
522 | |||
523 | let entities = match read_dir(path) { | ||
524 | Ok(entities) => entities, | ||
525 | Err(e) => { | ||
526 | return Err(CargoTomlNotFoundError { | ||
527 | searched_at: path.to_path_buf(), | ||
528 | reason: format!("file system error: {}", e), | ||
529 | } | ||
530 | .into()); | ||
531 | } | ||
532 | }; | ||
533 | |||
534 | let mut valid_canditates = find_cargo_toml_in_child_dir(entities); | ||
535 | match valid_canditates.len() { | ||
536 | 1 => Ok(valid_canditates.remove(0)), | ||
537 | 0 => Err(CargoTomlNotFoundError { | ||
538 | searched_at: path.to_path_buf(), | ||
539 | reason: "no Cargo.toml file found".to_string(), | ||
540 | } | ||
541 | .into()), | ||
542 | _ => Err(CargoTomlNotFoundError { | ||
543 | searched_at: path.to_path_buf(), | ||
544 | reason: format!( | ||
545 | "multiple equally valid Cargo.toml files found: {:?}", | ||
546 | valid_canditates | ||
547 | ), | ||
548 | } | ||
549 | .into()), | ||
550 | } | ||
551 | } | ||
552 | |||
553 | pub fn get_rustc_cfg_options() -> CfgOptions { | 542 | pub fn get_rustc_cfg_options() -> CfgOptions { |
554 | let mut cfg_options = CfgOptions::default(); | 543 | let mut cfg_options = CfgOptions::default(); |
555 | 544 | ||
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index 7f87f4212..ea41bf85d 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -351,7 +351,7 @@ fn with_children( | |||
351 | // FIXME: use a more elegant way to re-fetch the node (#1185), make | 351 | // FIXME: use a more elegant way to re-fetch the node (#1185), make |
352 | // `range` private afterwards | 352 | // `range` private afterwards |
353 | let mut ptr = SyntaxNodePtr::new(parent); | 353 | let mut ptr = SyntaxNodePtr::new(parent); |
354 | ptr.range = TextRange::offset_len(ptr.range().start(), len); | 354 | ptr.range = TextRange::offset_len(ptr.range.start(), len); |
355 | ptr.to_node(&new_root_node) | 355 | ptr.to_node(&new_root_node) |
356 | } | 356 | } |
357 | 357 | ||
diff --git a/crates/ra_syntax/src/ptr.rs b/crates/ra_syntax/src/ptr.rs index bc48a2e71..3be648c2a 100644 --- a/crates/ra_syntax/src/ptr.rs +++ b/crates/ra_syntax/src/ptr.rs | |||
@@ -34,12 +34,8 @@ impl SyntaxNodePtr { | |||
34 | self.range | 34 | self.range |
35 | } | 35 | } |
36 | 36 | ||
37 | pub fn kind(&self) -> SyntaxKind { | ||
38 | self.kind | ||
39 | } | ||
40 | |||
41 | pub fn cast<N: AstNode>(self) -> Option<AstPtr<N>> { | 37 | pub fn cast<N: AstNode>(self) -> Option<AstPtr<N>> { |
42 | if !N::can_cast(self.kind()) { | 38 | if !N::can_cast(self.kind) { |
43 | return None; | 39 | return None; |
44 | } | 40 | } |
45 | Some(AstPtr { raw: self, _ty: PhantomData }) | 41 | Some(AstPtr { raw: self, _ty: PhantomData }) |
@@ -88,7 +84,7 @@ impl<N: AstNode> AstPtr<N> { | |||
88 | } | 84 | } |
89 | 85 | ||
90 | pub fn cast<U: AstNode>(self) -> Option<AstPtr<U>> { | 86 | pub fn cast<U: AstNode>(self) -> Option<AstPtr<U>> { |
91 | if !U::can_cast(self.raw.kind()) { | 87 | if !U::can_cast(self.raw.kind) { |
92 | return None; | 88 | return None; |
93 | } | 89 | } |
94 | Some(AstPtr { raw: self.raw, _ty: PhantomData }) | 90 | Some(AstPtr { raw: self.raw, _ty: PhantomData }) |
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index f5f773432..cee0248b6 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml | |||
@@ -46,7 +46,7 @@ ra_db = { path = "../ra_db" } | |||
46 | hir = { path = "../ra_hir", package = "ra_hir" } | 46 | hir = { path = "../ra_hir", package = "ra_hir" } |
47 | hir_def = { path = "../ra_hir_def", package = "ra_hir_def" } | 47 | hir_def = { path = "../ra_hir_def", package = "ra_hir_def" } |
48 | hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" } | 48 | hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" } |
49 | 49 | ra_proc_macro_srv = { path = "../ra_proc_macro_srv" } | |
50 | 50 | ||
51 | [target.'cfg(windows)'.dependencies] | 51 | [target.'cfg(windows)'.dependencies] |
52 | winapi = "0.3.8" | 52 | winapi = "0.3.8" |
diff --git a/crates/rust-analyzer/src/bin/args.rs b/crates/rust-analyzer/src/bin/args.rs index 3cf394bb4..5e19253a6 100644 --- a/crates/rust-analyzer/src/bin/args.rs +++ b/crates/rust-analyzer/src/bin/args.rs | |||
@@ -29,12 +29,23 @@ pub(crate) enum Command { | |||
29 | with_deps: bool, | 29 | with_deps: bool, |
30 | path: PathBuf, | 30 | path: PathBuf, |
31 | load_output_dirs: bool, | 31 | load_output_dirs: bool, |
32 | with_proc_macro: bool, | ||
32 | }, | 33 | }, |
33 | Bench { | 34 | Bench { |
34 | path: PathBuf, | 35 | path: PathBuf, |
35 | what: BenchWhat, | 36 | what: BenchWhat, |
36 | load_output_dirs: bool, | 37 | load_output_dirs: bool, |
38 | with_proc_macro: bool, | ||
37 | }, | 39 | }, |
40 | Diagnostics { | ||
41 | path: PathBuf, | ||
42 | load_output_dirs: bool, | ||
43 | with_proc_macro: bool, | ||
44 | /// Include files which are not modules. In rust-analyzer | ||
45 | /// this would include the parser test files. | ||
46 | all: bool, | ||
47 | }, | ||
48 | ProcMacro, | ||
38 | RunServer, | 49 | RunServer, |
39 | Version, | 50 | Version, |
40 | } | 51 | } |
@@ -141,6 +152,7 @@ FLAGS: | |||
141 | -h, --help Prints help information | 152 | -h, --help Prints help information |
142 | --memory-usage | 153 | --memory-usage |
143 | --load-output-dirs Load OUT_DIR values by running `cargo check` before analysis | 154 | --load-output-dirs Load OUT_DIR values by running `cargo check` before analysis |
155 | --with-proc-macro Use ra-proc-macro-srv for proc-macro expanding | ||
144 | -v, --verbose | 156 | -v, --verbose |
145 | -q, --quiet | 157 | -q, --quiet |
146 | 158 | ||
@@ -158,6 +170,7 @@ ARGS: | |||
158 | let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?; | 170 | let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?; |
159 | let with_deps: bool = matches.contains("--with-deps"); | 171 | let with_deps: bool = matches.contains("--with-deps"); |
160 | let load_output_dirs = matches.contains("--load-output-dirs"); | 172 | let load_output_dirs = matches.contains("--load-output-dirs"); |
173 | let with_proc_macro = matches.contains("--with-proc-macro"); | ||
161 | let path = { | 174 | let path = { |
162 | let mut trailing = matches.free()?; | 175 | let mut trailing = matches.free()?; |
163 | if trailing.len() != 1 { | 176 | if trailing.len() != 1 { |
@@ -166,7 +179,15 @@ ARGS: | |||
166 | trailing.pop().unwrap().into() | 179 | trailing.pop().unwrap().into() |
167 | }; | 180 | }; |
168 | 181 | ||
169 | Command::Stats { randomize, memory_usage, only, with_deps, path, load_output_dirs } | 182 | Command::Stats { |
183 | randomize, | ||
184 | memory_usage, | ||
185 | only, | ||
186 | with_deps, | ||
187 | path, | ||
188 | load_output_dirs, | ||
189 | with_proc_macro, | ||
190 | } | ||
170 | } | 191 | } |
171 | "analysis-bench" => { | 192 | "analysis-bench" => { |
172 | if matches.contains(["-h", "--help"]) { | 193 | if matches.contains(["-h", "--help"]) { |
@@ -180,6 +201,7 @@ USAGE: | |||
180 | FLAGS: | 201 | FLAGS: |
181 | -h, --help Prints help information | 202 | -h, --help Prints help information |
182 | --load-output-dirs Load OUT_DIR values by running `cargo check` before analysis | 203 | --load-output-dirs Load OUT_DIR values by running `cargo check` before analysis |
204 | --with-proc-macro Use ra-proc-macro-srv for proc-macro expanding | ||
183 | -v, --verbose | 205 | -v, --verbose |
184 | 206 | ||
185 | OPTIONS: | 207 | OPTIONS: |
@@ -207,8 +229,43 @@ ARGS: | |||
207 | ), | 229 | ), |
208 | }; | 230 | }; |
209 | let load_output_dirs = matches.contains("--load-output-dirs"); | 231 | let load_output_dirs = matches.contains("--load-output-dirs"); |
210 | Command::Bench { path, what, load_output_dirs } | 232 | let with_proc_macro = matches.contains("--with-proc-macro"); |
233 | Command::Bench { path, what, load_output_dirs, with_proc_macro } | ||
234 | } | ||
235 | "diagnostics" => { | ||
236 | if matches.contains(["-h", "--help"]) { | ||
237 | eprintln!( | ||
238 | "\ | ||
239 | ra-cli-diagnostics | ||
240 | |||
241 | USAGE: | ||
242 | rust-analyzer diagnostics [FLAGS] [PATH] | ||
243 | |||
244 | FLAGS: | ||
245 | -h, --help Prints help information | ||
246 | --load-output-dirs Load OUT_DIR values by running `cargo check` before analysis | ||
247 | --all Include all files rather than only modules | ||
248 | |||
249 | ARGS: | ||
250 | <PATH>" | ||
251 | ); | ||
252 | return Ok(Err(HelpPrinted)); | ||
253 | } | ||
254 | |||
255 | let load_output_dirs = matches.contains("--load-output-dirs"); | ||
256 | let with_proc_macro = matches.contains("--with-proc-macro"); | ||
257 | let all = matches.contains("--all"); | ||
258 | let path = { | ||
259 | let mut trailing = matches.free()?; | ||
260 | if trailing.len() != 1 { | ||
261 | bail!("Invalid flags"); | ||
262 | } | ||
263 | trailing.pop().unwrap().into() | ||
264 | }; | ||
265 | |||
266 | Command::Diagnostics { path, load_output_dirs, with_proc_macro, all } | ||
211 | } | 267 | } |
268 | "proc-macro" => Command::ProcMacro, | ||
212 | _ => { | 269 | _ => { |
213 | eprintln!( | 270 | eprintln!( |
214 | "\ | 271 | "\ |
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 608f4f67b..28b67cfe2 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs | |||
@@ -25,6 +25,7 @@ fn main() -> Result<()> { | |||
25 | with_deps, | 25 | with_deps, |
26 | path, | 26 | path, |
27 | load_output_dirs, | 27 | load_output_dirs, |
28 | with_proc_macro, | ||
28 | } => cli::analysis_stats( | 29 | } => cli::analysis_stats( |
29 | args.verbosity, | 30 | args.verbosity, |
30 | memory_usage, | 31 | memory_usage, |
@@ -33,12 +34,24 @@ fn main() -> Result<()> { | |||
33 | with_deps, | 34 | with_deps, |
34 | randomize, | 35 | randomize, |
35 | load_output_dirs, | 36 | load_output_dirs, |
37 | with_proc_macro, | ||
36 | )?, | 38 | )?, |
37 | 39 | ||
38 | args::Command::Bench { path, what, load_output_dirs } => { | 40 | args::Command::Bench { path, what, load_output_dirs, with_proc_macro } => { |
39 | cli::analysis_bench(args.verbosity, path.as_ref(), what, load_output_dirs)? | 41 | cli::analysis_bench( |
42 | args.verbosity, | ||
43 | path.as_ref(), | ||
44 | what, | ||
45 | load_output_dirs, | ||
46 | with_proc_macro, | ||
47 | )? | ||
40 | } | 48 | } |
41 | 49 | ||
50 | args::Command::Diagnostics { path, load_output_dirs, with_proc_macro, all } => { | ||
51 | cli::diagnostics(path.as_ref(), load_output_dirs, with_proc_macro, all)? | ||
52 | } | ||
53 | |||
54 | args::Command::ProcMacro => run_proc_macro_sv()?, | ||
42 | args::Command::RunServer => run_server()?, | 55 | args::Command::RunServer => run_server()?, |
43 | args::Command::Version => println!("rust-analyzer {}", env!("REV")), | 56 | args::Command::Version => println!("rust-analyzer {}", env!("REV")), |
44 | } | 57 | } |
@@ -52,6 +65,11 @@ fn setup_logging() -> Result<()> { | |||
52 | Ok(()) | 65 | Ok(()) |
53 | } | 66 | } |
54 | 67 | ||
68 | fn run_proc_macro_sv() -> Result<()> { | ||
69 | ra_proc_macro_srv::cli::run(); | ||
70 | Ok(()) | ||
71 | } | ||
72 | |||
55 | fn run_server() -> Result<()> { | 73 | fn run_server() -> Result<()> { |
56 | log::info!("lifecycle: server started"); | 74 | log::info!("lifecycle: server started"); |
57 | 75 | ||
diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs index c9738d101..a865a7c7e 100644 --- a/crates/rust-analyzer/src/cli.rs +++ b/crates/rust-analyzer/src/cli.rs | |||
@@ -3,6 +3,7 @@ | |||
3 | mod load_cargo; | 3 | mod load_cargo; |
4 | mod analysis_stats; | 4 | mod analysis_stats; |
5 | mod analysis_bench; | 5 | mod analysis_bench; |
6 | mod diagnostics; | ||
6 | mod progress_report; | 7 | mod progress_report; |
7 | 8 | ||
8 | use std::io::Read; | 9 | use std::io::Read; |
@@ -12,6 +13,10 @@ use ra_ide::{file_structure, Analysis}; | |||
12 | use ra_prof::profile; | 13 | use ra_prof::profile; |
13 | use ra_syntax::{AstNode, SourceFile}; | 14 | use ra_syntax::{AstNode, SourceFile}; |
14 | 15 | ||
16 | pub use analysis_bench::{analysis_bench, BenchWhat, Position}; | ||
17 | pub use analysis_stats::analysis_stats; | ||
18 | pub use diagnostics::diagnostics; | ||
19 | |||
15 | #[derive(Clone, Copy)] | 20 | #[derive(Clone, Copy)] |
16 | pub enum Verbosity { | 21 | pub enum Verbosity { |
17 | Spammy, | 22 | Spammy, |
@@ -60,9 +65,6 @@ pub fn highlight(rainbow: bool) -> Result<()> { | |||
60 | Ok(()) | 65 | Ok(()) |
61 | } | 66 | } |
62 | 67 | ||
63 | pub use analysis_bench::{analysis_bench, BenchWhat, Position}; | ||
64 | pub use analysis_stats::analysis_stats; | ||
65 | |||
66 | fn file() -> Result<SourceFile> { | 68 | fn file() -> Result<SourceFile> { |
67 | let text = read_stdin()?; | 69 | let text = read_stdin()?; |
68 | Ok(SourceFile::parse(&text).tree()) | 70 | Ok(SourceFile::parse(&text).tree()) |
diff --git a/crates/rust-analyzer/src/cli/analysis_bench.rs b/crates/rust-analyzer/src/cli/analysis_bench.rs index 7667873d5..6147ae207 100644 --- a/crates/rust-analyzer/src/cli/analysis_bench.rs +++ b/crates/rust-analyzer/src/cli/analysis_bench.rs | |||
@@ -47,12 +47,13 @@ pub fn analysis_bench( | |||
47 | path: &Path, | 47 | path: &Path, |
48 | what: BenchWhat, | 48 | what: BenchWhat, |
49 | load_output_dirs: bool, | 49 | load_output_dirs: bool, |
50 | with_proc_macro: bool, | ||
50 | ) -> Result<()> { | 51 | ) -> Result<()> { |
51 | ra_prof::init(); | 52 | ra_prof::init(); |
52 | 53 | ||
53 | let start = Instant::now(); | 54 | let start = Instant::now(); |
54 | eprint!("loading: "); | 55 | eprint!("loading: "); |
55 | let (mut host, roots) = load_cargo(path, load_output_dirs)?; | 56 | let (mut host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?; |
56 | let db = host.raw_database(); | 57 | let db = host.raw_database(); |
57 | eprintln!("{:?}\n", start.elapsed()); | 58 | eprintln!("{:?}\n", start.elapsed()); |
58 | 59 | ||
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index e9ee0b888..d442cbd63 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs | |||
@@ -25,9 +25,10 @@ pub fn analysis_stats( | |||
25 | with_deps: bool, | 25 | with_deps: bool, |
26 | randomize: bool, | 26 | randomize: bool, |
27 | load_output_dirs: bool, | 27 | load_output_dirs: bool, |
28 | with_proc_macro: bool, | ||
28 | ) -> Result<()> { | 29 | ) -> Result<()> { |
29 | let db_load_time = Instant::now(); | 30 | let db_load_time = Instant::now(); |
30 | let (mut host, roots) = load_cargo(path, load_output_dirs)?; | 31 | let (mut host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?; |
31 | let db = host.raw_database(); | 32 | let db = host.raw_database(); |
32 | println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); | 33 | println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); |
33 | let analysis_time = Instant::now(); | 34 | let analysis_time = Instant::now(); |
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs new file mode 100644 index 000000000..60daefa3e --- /dev/null +++ b/crates/rust-analyzer/src/cli/diagnostics.rs | |||
@@ -0,0 +1,79 @@ | |||
1 | //! Analyze all modules in a project for diagnostics. Exits with a non-zero status | ||
2 | //! code if any errors are found. | ||
3 | |||
4 | use anyhow::anyhow; | ||
5 | use ra_db::SourceDatabaseExt; | ||
6 | use ra_ide::Severity; | ||
7 | use std::{collections::HashSet, path::Path}; | ||
8 | |||
9 | use crate::cli::{load_cargo::load_cargo, Result}; | ||
10 | use hir::Semantics; | ||
11 | |||
12 | pub fn diagnostics( | ||
13 | path: &Path, | ||
14 | load_output_dirs: bool, | ||
15 | with_proc_macro: bool, | ||
16 | all: bool, | ||
17 | ) -> Result<()> { | ||
18 | let (host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?; | ||
19 | let db = host.raw_database(); | ||
20 | let analysis = host.analysis(); | ||
21 | let semantics = Semantics::new(db); | ||
22 | let members = roots | ||
23 | .into_iter() | ||
24 | .filter_map(|(source_root_id, project_root)| { | ||
25 | // filter out dependencies | ||
26 | if project_root.is_member() { | ||
27 | Some(source_root_id) | ||
28 | } else { | ||
29 | None | ||
30 | } | ||
31 | }) | ||
32 | .collect::<HashSet<_>>(); | ||
33 | |||
34 | let mut found_error = false; | ||
35 | let mut visited_files = HashSet::new(); | ||
36 | for source_root_id in members { | ||
37 | for file_id in db.source_root(source_root_id).walk() { | ||
38 | // Filter out files which are not actually modules (unless `--all` flag is | ||
39 | // passed). In the rust-analyzer repository this filters out the parser test files. | ||
40 | if semantics.to_module_def(file_id).is_some() || all { | ||
41 | if !visited_files.contains(&file_id) { | ||
42 | let crate_name = if let Some(module) = semantics.to_module_def(file_id) { | ||
43 | if let Some(name) = module.krate().display_name(db) { | ||
44 | format!("{}", name) | ||
45 | } else { | ||
46 | String::from("unknown") | ||
47 | } | ||
48 | } else { | ||
49 | String::from("unknown") | ||
50 | }; | ||
51 | println!( | ||
52 | "processing crate: {}, module: {}", | ||
53 | crate_name, | ||
54 | db.file_relative_path(file_id) | ||
55 | ); | ||
56 | for diagnostic in analysis.diagnostics(file_id).unwrap() { | ||
57 | if matches!(diagnostic.severity, Severity::Error) { | ||
58 | found_error = true; | ||
59 | } | ||
60 | |||
61 | println!("{:?}", diagnostic); | ||
62 | } | ||
63 | |||
64 | visited_files.insert(file_id); | ||
65 | } | ||
66 | } | ||
67 | } | ||
68 | } | ||
69 | |||
70 | println!(); | ||
71 | println!("diagnostic scan complete"); | ||
72 | |||
73 | if found_error { | ||
74 | println!(); | ||
75 | Err(anyhow!("diagnostic error detected")) | ||
76 | } else { | ||
77 | Ok(()) | ||
78 | } | ||
79 | } | ||
diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 43062ea10..eb9ac32c3 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs | |||
@@ -8,7 +8,7 @@ use crossbeam_channel::{unbounded, Receiver}; | |||
8 | use ra_db::{ExternSourceId, FileId, SourceRootId}; | 8 | use ra_db::{ExternSourceId, FileId, SourceRootId}; |
9 | use ra_ide::{AnalysisChange, AnalysisHost}; | 9 | use ra_ide::{AnalysisChange, AnalysisHost}; |
10 | use ra_project_model::{ | 10 | use ra_project_model::{ |
11 | get_rustc_cfg_options, CargoConfig, PackageRoot, ProcMacroClient, ProjectWorkspace, | 11 | get_rustc_cfg_options, CargoConfig, PackageRoot, ProcMacroClient, ProjectRoot, ProjectWorkspace, |
12 | }; | 12 | }; |
13 | use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask, Watch}; | 13 | use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask, Watch}; |
14 | use rustc_hash::{FxHashMap, FxHashSet}; | 14 | use rustc_hash::{FxHashMap, FxHashSet}; |
@@ -25,11 +25,14 @@ fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId { | |||
25 | pub(crate) fn load_cargo( | 25 | pub(crate) fn load_cargo( |
26 | root: &Path, | 26 | root: &Path, |
27 | load_out_dirs_from_check: bool, | 27 | load_out_dirs_from_check: bool, |
28 | with_proc_macro: bool, | ||
28 | ) -> Result<(AnalysisHost, FxHashMap<SourceRootId, PackageRoot>)> { | 29 | ) -> Result<(AnalysisHost, FxHashMap<SourceRootId, PackageRoot>)> { |
29 | let root = std::env::current_dir()?.join(root); | 30 | let root = std::env::current_dir()?.join(root); |
30 | let ws = ProjectWorkspace::discover( | 31 | let root = ProjectRoot::discover_single(&root)?; |
31 | root.as_ref(), | 32 | let ws = ProjectWorkspace::load( |
33 | root, | ||
32 | &CargoConfig { load_out_dirs_from_check, ..Default::default() }, | 34 | &CargoConfig { load_out_dirs_from_check, ..Default::default() }, |
35 | true, | ||
33 | )?; | 36 | )?; |
34 | 37 | ||
35 | let mut extern_dirs = FxHashSet::default(); | 38 | let mut extern_dirs = FxHashSet::default(); |
@@ -69,7 +72,14 @@ pub(crate) fn load_cargo( | |||
69 | }) | 72 | }) |
70 | .collect::<FxHashMap<_, _>>(); | 73 | .collect::<FxHashMap<_, _>>(); |
71 | 74 | ||
72 | let proc_macro_client = ProcMacroClient::dummy(); | 75 | let proc_macro_client = if !with_proc_macro { |
76 | ProcMacroClient::dummy() | ||
77 | } else { | ||
78 | let mut path = std::env::current_exe()?; | ||
79 | path.pop(); | ||
80 | path.push("rust-analyzer"); | ||
81 | ProcMacroClient::extern_process(&path, &["proc-macro"]).unwrap() | ||
82 | }; | ||
73 | let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client); | 83 | let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client); |
74 | Ok((host, source_roots)) | 84 | Ok((host, source_roots)) |
75 | } | 85 | } |
@@ -175,7 +185,7 @@ mod tests { | |||
175 | #[test] | 185 | #[test] |
176 | fn test_loading_rust_analyzer() { | 186 | fn test_loading_rust_analyzer() { |
177 | let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); | 187 | let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); |
178 | let (host, _roots) = load_cargo(path, false).unwrap(); | 188 | let (host, _roots) = load_cargo(path, false, false).unwrap(); |
179 | let n_crates = Crate::all(host.raw_database()).len(); | 189 | let n_crates = Crate::all(host.raw_database()).len(); |
180 | // RA has quite a few crates, but the exact count doesn't matter | 190 | // RA has quite a few crates, but the exact count doesn't matter |
181 | assert!(n_crates > 20); | 191 | assert!(n_crates > 20); |
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 4734df16a..2b45f1310 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs | |||
@@ -20,7 +20,7 @@ pub struct Config { | |||
20 | pub with_sysroot: bool, | 20 | pub with_sysroot: bool, |
21 | pub publish_diagnostics: bool, | 21 | pub publish_diagnostics: bool, |
22 | pub lru_capacity: Option<usize>, | 22 | pub lru_capacity: Option<usize>, |
23 | pub proc_macro_srv: Option<String>, | 23 | pub proc_macro_srv: Option<(String, Vec<String>)>, |
24 | pub files: FilesConfig, | 24 | pub files: FilesConfig, |
25 | pub notifications: NotificationsConfig, | 25 | pub notifications: NotificationsConfig, |
26 | 26 | ||
@@ -131,6 +131,18 @@ impl Config { | |||
131 | set(value, "/cargo/allFeatures", &mut self.cargo.all_features); | 131 | set(value, "/cargo/allFeatures", &mut self.cargo.all_features); |
132 | set(value, "/cargo/features", &mut self.cargo.features); | 132 | set(value, "/cargo/features", &mut self.cargo.features); |
133 | set(value, "/cargo/loadOutDirsFromCheck", &mut self.cargo.load_out_dirs_from_check); | 133 | set(value, "/cargo/loadOutDirsFromCheck", &mut self.cargo.load_out_dirs_from_check); |
134 | |||
135 | match get::<bool>(value, "/procMacro/enabled") { | ||
136 | Some(true) => { | ||
137 | if let Ok(mut path) = std::env::current_exe() { | ||
138 | path.pop(); | ||
139 | path.push("rust-analyzer"); | ||
140 | self.proc_macro_srv = Some((path.to_string_lossy().to_string(), vec!["proc-macro".to_string()])); | ||
141 | } | ||
142 | } | ||
143 | _ => self.proc_macro_srv = None, | ||
144 | } | ||
145 | |||
134 | match get::<Vec<String>>(value, "/rustfmt/overrideCommand") { | 146 | match get::<Vec<String>>(value, "/rustfmt/overrideCommand") { |
135 | Some(mut args) if !args.is_empty() => { | 147 | Some(mut args) if !args.is_empty() => { |
136 | let command = args.remove(0); | 148 | let command = args.remove(0); |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 8d1429196..fc4c77f8a 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -15,6 +15,7 @@ use std::{ | |||
15 | }; | 15 | }; |
16 | 16 | ||
17 | use crossbeam_channel::{never, select, unbounded, RecvError, Sender}; | 17 | use crossbeam_channel::{never, select, unbounded, RecvError, Sender}; |
18 | use itertools::Itertools; | ||
18 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; | 19 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; |
19 | use lsp_types::{ | 20 | use lsp_types::{ |
20 | NumberOrString, WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressCreateParams, | 21 | NumberOrString, WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressCreateParams, |
@@ -88,37 +89,46 @@ pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection) | |||
88 | 89 | ||
89 | let mut loop_state = LoopState::default(); | 90 | let mut loop_state = LoopState::default(); |
90 | let mut world_state = { | 91 | let mut world_state = { |
91 | // FIXME: support dynamic workspace loading. | ||
92 | let workspaces = { | 92 | let workspaces = { |
93 | let mut loaded_workspaces = Vec::new(); | 93 | // FIXME: support dynamic workspace loading. |
94 | for ws_root in &ws_roots { | 94 | let mut visited = FxHashSet::default(); |
95 | let workspace = ra_project_model::ProjectWorkspace::discover_with_sysroot( | 95 | let project_roots = ws_roots |
96 | ws_root.as_path(), | 96 | .iter() |
97 | config.with_sysroot, | 97 | .filter_map(|it| ra_project_model::ProjectRoot::discover(it).ok()) |
98 | &config.cargo, | 98 | .flatten() |
99 | ); | 99 | .filter(|it| visited.insert(it.clone())) |
100 | match workspace { | 100 | .collect::<Vec<_>>(); |
101 | Ok(workspace) => loaded_workspaces.push(workspace), | 101 | |
102 | Err(e) => { | 102 | if project_roots.is_empty() && config.notifications.cargo_toml_not_found { |
103 | log::error!("loading workspace failed: {:?}", e); | 103 | show_message( |
104 | 104 | req::MessageType::Error, | |
105 | if let Some(ra_project_model::CargoTomlNotFoundError { .. }) = | 105 | format!( |
106 | e.downcast_ref() | 106 | "rust-analyzer failed to discover workspace, no Cargo.toml found, dirs searched: {}", |
107 | { | 107 | ws_roots.iter().format_with(", ", |it, f| f(&it.display())) |
108 | if !config.notifications.cargo_toml_not_found { | 108 | ), |
109 | continue; | 109 | &connection.sender, |
110 | } | 110 | ); |
111 | } | 111 | }; |
112 | 112 | ||
113 | project_roots | ||
114 | .into_iter() | ||
115 | .filter_map(|root| { | ||
116 | ra_project_model::ProjectWorkspace::load( | ||
117 | root, | ||
118 | &config.cargo, | ||
119 | config.with_sysroot, | ||
120 | ) | ||
121 | .map_err(|err| { | ||
122 | log::error!("failed to load workspace: {:#}", err); | ||
113 | show_message( | 123 | show_message( |
114 | req::MessageType::Error, | 124 | req::MessageType::Error, |
115 | format!("rust-analyzer failed to load workspace: {:?}", e), | 125 | format!("rust-analyzer failed to load workspace: {:#}", err), |
116 | &connection.sender, | 126 | &connection.sender, |
117 | ); | 127 | ); |
118 | } | 128 | }) |
119 | } | 129 | .ok() |
120 | } | 130 | }) |
121 | loaded_workspaces | 131 | .collect::<Vec<_>>() |
122 | }; | 132 | }; |
123 | 133 | ||
124 | let globs = config | 134 | let globs = config |
diff --git a/crates/rust-analyzer/src/world.rs b/crates/rust-analyzer/src/world.rs index 6c42e1d76..f2ad453fa 100644 --- a/crates/rust-analyzer/src/world.rs +++ b/crates/rust-analyzer/src/world.rs | |||
@@ -64,6 +64,7 @@ pub struct WorldState { | |||
64 | pub latest_requests: Arc<RwLock<LatestRequests>>, | 64 | pub latest_requests: Arc<RwLock<LatestRequests>>, |
65 | pub flycheck: Option<Flycheck>, | 65 | pub flycheck: Option<Flycheck>, |
66 | pub diagnostics: DiagnosticCollection, | 66 | pub diagnostics: DiagnosticCollection, |
67 | pub proc_macro_client: ProcMacroClient, | ||
67 | } | 68 | } |
68 | 69 | ||
69 | /// An immutable snapshot of the world's state at a point in time. | 70 | /// An immutable snapshot of the world's state at a point in time. |
@@ -147,9 +148,9 @@ impl WorldState { | |||
147 | 148 | ||
148 | let proc_macro_client = match &config.proc_macro_srv { | 149 | let proc_macro_client = match &config.proc_macro_srv { |
149 | None => ProcMacroClient::dummy(), | 150 | None => ProcMacroClient::dummy(), |
150 | Some(srv) => { | 151 | Some((path, args)) => { |
151 | let path = Path::new(&srv); | 152 | let path = std::path::Path::new(path); |
152 | match ProcMacroClient::extern_process(path) { | 153 | match ProcMacroClient::extern_process(path, args) { |
153 | Ok(it) => it, | 154 | Ok(it) => it, |
154 | Err(err) => { | 155 | Err(err) => { |
155 | log::error!( | 156 | log::error!( |
@@ -192,6 +193,7 @@ impl WorldState { | |||
192 | latest_requests: Default::default(), | 193 | latest_requests: Default::default(), |
193 | flycheck, | 194 | flycheck, |
194 | diagnostics: Default::default(), | 195 | diagnostics: Default::default(), |
196 | proc_macro_client, | ||
195 | } | 197 | } |
196 | } | 198 | } |
197 | 199 | ||
diff --git a/crates/rust-analyzer/tests/heavy_tests/main.rs b/crates/rust-analyzer/tests/heavy_tests/main.rs index 638813311..1dd2676b6 100644 --- a/crates/rust-analyzer/tests/heavy_tests/main.rs +++ b/crates/rust-analyzer/tests/heavy_tests/main.rs | |||
@@ -9,7 +9,7 @@ use lsp_types::{ | |||
9 | }; | 9 | }; |
10 | use rust_analyzer::req::{ | 10 | use rust_analyzer::req::{ |
11 | CodeActionParams, CodeActionRequest, Completion, CompletionParams, DidOpenTextDocument, | 11 | CodeActionParams, CodeActionRequest, Completion, CompletionParams, DidOpenTextDocument, |
12 | Formatting, GotoDefinition, OnEnter, Runnables, RunnablesParams, | 12 | Formatting, GotoDefinition, HoverRequest, OnEnter, Runnables, RunnablesParams, |
13 | }; | 13 | }; |
14 | use serde_json::json; | 14 | use serde_json::json; |
15 | use tempfile::TempDir; | 15 | use tempfile::TempDir; |
@@ -625,3 +625,92 @@ fn main() { message(); } | |||
625 | )); | 625 | )); |
626 | assert!(format!("{}", res).contains("hello.rs")); | 626 | assert!(format!("{}", res).contains("hello.rs")); |
627 | } | 627 | } |
628 | |||
629 | #[test] | ||
630 | fn resolve_proc_macro() { | ||
631 | if skip_slow_tests() { | ||
632 | return; | ||
633 | } | ||
634 | let server = Project::with_fixture( | ||
635 | r###" | ||
636 | //- foo/Cargo.toml | ||
637 | [package] | ||
638 | name = "foo" | ||
639 | version = "0.0.0" | ||
640 | edition = "2018" | ||
641 | [dependencies] | ||
642 | bar = {path = "../bar"} | ||
643 | |||
644 | //- foo/src/main.rs | ||
645 | use bar::Bar; | ||
646 | trait Bar { | ||
647 | fn bar(); | ||
648 | } | ||
649 | #[derive(Bar)] | ||
650 | struct Foo {} | ||
651 | fn main() { | ||
652 | Foo::bar(); | ||
653 | } | ||
654 | |||
655 | //- bar/Cargo.toml | ||
656 | [package] | ||
657 | name = "bar" | ||
658 | version = "0.0.0" | ||
659 | edition = "2018" | ||
660 | |||
661 | [lib] | ||
662 | proc-macro = true | ||
663 | |||
664 | //- bar/src/lib.rs | ||
665 | extern crate proc_macro; | ||
666 | use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; | ||
667 | macro_rules! t { | ||
668 | ($n:literal) => { | ||
669 | TokenTree::from(Ident::new($n, Span::call_site())) | ||
670 | }; | ||
671 | ({}) => { | ||
672 | TokenTree::from(Group::new(Delimiter::Brace, TokenStream::new())) | ||
673 | }; | ||
674 | (()) => { | ||
675 | TokenTree::from(Group::new(Delimiter::Parenthesis, TokenStream::new())) | ||
676 | }; | ||
677 | } | ||
678 | #[proc_macro_derive(Bar)] | ||
679 | pub fn foo(_input: TokenStream) -> TokenStream { | ||
680 | // We hard code the output here for preventing to use any deps | ||
681 | let mut res = TokenStream::new(); | ||
682 | |||
683 | // impl Bar for Foo { fn bar() {} } | ||
684 | let mut tokens = vec![t!("impl"), t!("Bar"), t!("for"), t!("Foo")]; | ||
685 | let mut fn_stream = TokenStream::new(); | ||
686 | fn_stream.extend(vec![t!("fn"), t!("bar"), t!(()), t!({})]); | ||
687 | tokens.push(Group::new(Delimiter::Brace, fn_stream).into()); | ||
688 | res.extend(tokens); | ||
689 | res | ||
690 | } | ||
691 | |||
692 | "###, | ||
693 | ) | ||
694 | .with_config(|config| { | ||
695 | // FIXME: Use env!("CARGO_BIN_EXE_ra-analyzer") instead after | ||
696 | // https://github.com/rust-lang/cargo/pull/7697 landed | ||
697 | let macro_srv_path = std::path::Path::new(std::env!("CARGO_MANIFEST_DIR")) | ||
698 | .join("../../target/debug/rust-analyzer") | ||
699 | .to_string_lossy() | ||
700 | .to_string(); | ||
701 | |||
702 | config.cargo.load_out_dirs_from_check = true; | ||
703 | config.proc_macro_srv = Some((macro_srv_path, vec!["proc-macro".to_string()])); | ||
704 | }) | ||
705 | .root("foo") | ||
706 | .root("bar") | ||
707 | .server(); | ||
708 | server.wait_until_workspace_is_loaded(); | ||
709 | let res = server.send_request::<HoverRequest>(TextDocumentPositionParams::new( | ||
710 | server.doc_id("foo/src/main.rs"), | ||
711 | Position::new(7, 9), | ||
712 | )); | ||
713 | |||
714 | let value = res.get("contents").unwrap().get("value").unwrap().to_string(); | ||
715 | assert_eq!(value, r#""```rust\nfoo::Bar\nfn bar()\n```""#) | ||
716 | } | ||
diff --git a/docs/user/assists.md b/docs/user/assists.md index 1d9510423..6483ba4f3 100644 --- a/docs/user/assists.md +++ b/docs/user/assists.md | |||
@@ -77,7 +77,7 @@ fn foo() { | |||
77 | } | 77 | } |
78 | 78 | ||
79 | fn bar(arg: &str, baz: Baz) { | 79 | fn bar(arg: &str, baz: Baz) { |
80 | unimplemented!() | 80 | todo!() |
81 | } | 81 | } |
82 | 82 | ||
83 | ``` | 83 | ``` |
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json index eb4f299a1..e11cffd68 100644 --- a/editors/code/package-lock.json +++ b/editors/code/package-lock.json | |||
@@ -109,9 +109,9 @@ | |||
109 | } | 109 | } |
110 | }, | 110 | }, |
111 | "@types/vscode": { | 111 | "@types/vscode": { |
112 | "version": "1.43.0", | 112 | "version": "1.44.0", |
113 | "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.43.0.tgz", | 113 | "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.44.0.tgz", |
114 | "integrity": "sha512-kIaR9qzd80rJOxePKpCB/mdy00mz8Apt2QA5Y6rdrKFn13QNFNeP3Hzmsf37Bwh/3cS7QjtAeGSK7wSqAU0sYQ==", | 114 | "integrity": "sha512-WJZtZlinE3meRdH+I7wTsIhpz/GLhqEQwmPGeh4s1irWLwMzCeTV8WZ+pgPTwrDXoafVUWwo1LiZ9HJVHFlJSQ==", |
115 | "dev": true | 115 | "dev": true |
116 | }, | 116 | }, |
117 | "@typescript-eslint/eslint-plugin": { | 117 | "@typescript-eslint/eslint-plugin": { |
@@ -1776,32 +1776,32 @@ | |||
1776 | } | 1776 | } |
1777 | }, | 1777 | }, |
1778 | "vscode-jsonrpc": { | 1778 | "vscode-jsonrpc": { |
1779 | "version": "5.0.1", | 1779 | "version": "5.1.0-next.1", |
1780 | "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-5.0.1.tgz", | 1780 | "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-5.1.0-next.1.tgz", |
1781 | "integrity": "sha512-JvONPptw3GAQGXlVV2utDcHx0BiY34FupW/kI6mZ5x06ER5DdPG/tXWMVHjTNULF5uKPOUUD0SaXg5QaubJL0A==" | 1781 | "integrity": "sha512-mwLDojZkbmpizSJSmp690oa9FB9jig18SIDGZeBCvFc2/LYSRvMm/WwWtMBJuJ1MfFh7rZXfQige4Uje5Z9NzA==" |
1782 | }, | 1782 | }, |
1783 | "vscode-languageclient": { | 1783 | "vscode-languageclient": { |
1784 | "version": "6.1.3", | 1784 | "version": "7.0.0-next.1", |
1785 | "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-6.1.3.tgz", | 1785 | "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-7.0.0-next.1.tgz", |
1786 | "integrity": "sha512-YciJxk08iU5LmWu7j5dUt9/1OLjokKET6rME3cI4BRpiF6HZlusm2ZwPt0MYJ0lV5y43sZsQHhyon2xBg4ZJVA==", | 1786 | "integrity": "sha512-JrjCUhLpQZxQ5VpWpilOHDMhVsn0fdN5jBh1uFNhSr5c2loJvRdr9Km2EuSQOFfOQsBKx0+xvY8PbsypNEcJ6w==", |
1787 | "requires": { | 1787 | "requires": { |
1788 | "semver": "^6.3.0", | 1788 | "semver": "^6.3.0", |
1789 | "vscode-languageserver-protocol": "^3.15.3" | 1789 | "vscode-languageserver-protocol": "3.16.0-next.2" |
1790 | } | 1790 | } |
1791 | }, | 1791 | }, |
1792 | "vscode-languageserver-protocol": { | 1792 | "vscode-languageserver-protocol": { |
1793 | "version": "3.15.3", | 1793 | "version": "3.16.0-next.2", |
1794 | "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.15.3.tgz", | 1794 | "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.16.0-next.2.tgz", |
1795 | "integrity": "sha512-zrMuwHOAQRhjDSnflWdJG+O2ztMWss8GqUUB8dXLR/FPenwkiBNkMIJJYfSN6sgskvsF0rHAoBowNQfbyZnnvw==", | 1795 | "integrity": "sha512-atmkGT/W6tF0cx4SaWFYtFs2UeSeC28RPiap9myv2YZTaTCFvTBEPNWrU5QRKfkyM0tbgtGo6T3UCQ8tkDpjzA==", |
1796 | "requires": { | 1796 | "requires": { |
1797 | "vscode-jsonrpc": "^5.0.1", | 1797 | "vscode-jsonrpc": "5.1.0-next.1", |
1798 | "vscode-languageserver-types": "3.15.1" | 1798 | "vscode-languageserver-types": "3.16.0-next.1" |
1799 | } | 1799 | } |
1800 | }, | 1800 | }, |
1801 | "vscode-languageserver-types": { | 1801 | "vscode-languageserver-types": { |
1802 | "version": "3.15.1", | 1802 | "version": "3.16.0-next.1", |
1803 | "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.15.1.tgz", | 1803 | "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.16.0-next.1.tgz", |
1804 | "integrity": "sha512-+a9MPUQrNGRrGU630OGbYVQ+11iOIovjCkqxajPa9w57Sd5ruK8WQNsslzpa0x/QJqC8kRc2DUxWjIFwoNm4ZQ==" | 1804 | "integrity": "sha512-tZFUSbyjUcrh+qQf13ALX4QDdOfDX0cVaBFgy7ktJ0VwS7AW/yRKgGPSxVqqP9OCMNPdqP57O5q47w2pEwfaUg==" |
1805 | }, | 1805 | }, |
1806 | "which": { | 1806 | "which": { |
1807 | "version": "1.3.1", | 1807 | "version": "1.3.1", |
diff --git a/editors/code/package.json b/editors/code/package.json index 0bf7b6ae6..5ce59e54a 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -34,14 +34,14 @@ | |||
34 | "dependencies": { | 34 | "dependencies": { |
35 | "jsonc-parser": "^2.2.1", | 35 | "jsonc-parser": "^2.2.1", |
36 | "node-fetch": "^2.6.0", | 36 | "node-fetch": "^2.6.0", |
37 | "vscode-languageclient": "6.1.3" | 37 | "vscode-languageclient": "7.0.0-next.1" |
38 | }, | 38 | }, |
39 | "devDependencies": { | 39 | "devDependencies": { |
40 | "@rollup/plugin-commonjs": "^11.0.2", | 40 | "@rollup/plugin-commonjs": "^11.0.2", |
41 | "@rollup/plugin-node-resolve": "^7.1.1", | 41 | "@rollup/plugin-node-resolve": "^7.1.1", |
42 | "@types/node": "^12.12.34", | 42 | "@types/node": "^12.12.34", |
43 | "@types/node-fetch": "^2.5.5", | 43 | "@types/node-fetch": "^2.5.5", |
44 | "@types/vscode": "^1.43.0", | 44 | "@types/vscode": "^1.44.0", |
45 | "@typescript-eslint/eslint-plugin": "^2.27.0", | 45 | "@typescript-eslint/eslint-plugin": "^2.27.0", |
46 | "@typescript-eslint/parser": "^2.27.0", | 46 | "@typescript-eslint/parser": "^2.27.0", |
47 | "eslint": "^6.8.0", | 47 | "eslint": "^6.8.0", |
@@ -154,7 +154,7 @@ | |||
154 | "keybindings": [ | 154 | "keybindings": [ |
155 | { | 155 | { |
156 | "command": "rust-analyzer.parentModule", | 156 | "command": "rust-analyzer.parentModule", |
157 | "key": "ctrl+u", | 157 | "key": "ctrl+shift+u", |
158 | "when": "editorTextFocus && editorLangId == rust" | 158 | "when": "editorTextFocus && editorLangId == rust" |
159 | }, | 159 | }, |
160 | { | 160 | { |
@@ -388,6 +388,11 @@ | |||
388 | "description": "Enable logging of VS Code extensions itself", | 388 | "description": "Enable logging of VS Code extensions itself", |
389 | "type": "boolean", | 389 | "type": "boolean", |
390 | "default": false | 390 | "default": false |
391 | }, | ||
392 | "rust-analyzer.procMacro.enabled": { | ||
393 | "description": "Enable Proc macro support, cargo.loadOutDirsFromCheck must be enabled.", | ||
394 | "type": "boolean", | ||
395 | "default": false | ||
391 | } | 396 | } |
392 | } | 397 | } |
393 | }, | 398 | }, |
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index 35a05131c..3b2eec8ba 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts | |||
@@ -12,6 +12,7 @@ export class Config { | |||
12 | private readonly requiresReloadOpts = [ | 12 | private readonly requiresReloadOpts = [ |
13 | "serverPath", | 13 | "serverPath", |
14 | "cargo", | 14 | "cargo", |
15 | "procMacro", | ||
15 | "files", | 16 | "files", |
16 | "highlighting", | 17 | "highlighting", |
17 | "updates.channel", | 18 | "updates.channel", |
diff --git a/editors/code/src/inlay_hints.ts b/editors/code/src/inlay_hints.ts index da74f03d2..a09531797 100644 --- a/editors/code/src/inlay_hints.ts +++ b/editors/code/src/inlay_hints.ts | |||
@@ -3,13 +3,13 @@ import * as vscode from 'vscode'; | |||
3 | import * as ra from './rust-analyzer-api'; | 3 | import * as ra from './rust-analyzer-api'; |
4 | 4 | ||
5 | import { Ctx, Disposable } from './ctx'; | 5 | import { Ctx, Disposable } from './ctx'; |
6 | import { sendRequestWithRetry, isRustDocument, RustDocument, RustEditor } from './util'; | 6 | import { sendRequestWithRetry, isRustDocument, RustDocument, RustEditor, sleep } from './util'; |
7 | 7 | ||
8 | 8 | ||
9 | export function activateInlayHints(ctx: Ctx) { | 9 | export function activateInlayHints(ctx: Ctx) { |
10 | const maybeUpdater = { | 10 | const maybeUpdater = { |
11 | updater: null as null | HintsUpdater, | 11 | updater: null as null | HintsUpdater, |
12 | onConfigChange() { | 12 | async onConfigChange() { |
13 | if ( | 13 | if ( |
14 | !ctx.config.inlayHints.typeHints && | 14 | !ctx.config.inlayHints.typeHints && |
15 | !ctx.config.inlayHints.parameterHints && | 15 | !ctx.config.inlayHints.parameterHints && |
@@ -17,6 +17,7 @@ export function activateInlayHints(ctx: Ctx) { | |||
17 | ) { | 17 | ) { |
18 | return this.dispose(); | 18 | return this.dispose(); |
19 | } | 19 | } |
20 | await sleep(100); | ||
20 | if (this.updater) { | 21 | if (this.updater) { |
21 | this.updater.syncCacheAndRenderHints(); | 22 | this.updater.syncCacheAndRenderHints(); |
22 | } else { | 23 | } else { |
diff --git a/xtask/tests/tidy-tests/main.rs b/xtask/tests/tidy-tests/main.rs index b3d6ddac9..ead642acc 100644 --- a/xtask/tests/tidy-tests/main.rs +++ b/xtask/tests/tidy-tests/main.rs | |||
@@ -26,6 +26,7 @@ fn check_todo(path: &Path, text: &str) { | |||
26 | // Some of our assists generate `todo!()` so those files are whitelisted. | 26 | // Some of our assists generate `todo!()` so those files are whitelisted. |
27 | "doc_tests/generated.rs", | 27 | "doc_tests/generated.rs", |
28 | "handlers/add_missing_impl_members.rs", | 28 | "handlers/add_missing_impl_members.rs", |
29 | "handlers/add_function.rs", | ||
29 | // To support generating `todo!()` in assists, we have `expr_todo()` in ast::make. | 30 | // To support generating `todo!()` in assists, we have `expr_todo()` in ast::make. |
30 | "ast/make.rs", | 31 | "ast/make.rs", |
31 | ]; | 32 | ]; |