diff options
author | Dmitry <[email protected]> | 2020-08-18 13:37:22 +0100 |
---|---|---|
committer | Dmitry <[email protected]> | 2020-08-18 13:37:22 +0100 |
commit | e18748ed152989953e39492a6b44f8001267ce5f (patch) | |
tree | 300dc7ce2998d9521319ff76f4df6ec2d165d0a1 | |
parent | 73315c9168901ef6d676f017daaa9b4976380c03 (diff) | |
parent | b8dfc331abbfce6aad0c248c91c57bd9890a668f (diff) |
Merge remote-tracking branch 'rust-analyzer/master'
65 files changed, 859 insertions, 327 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2deb009ce..fb077e28d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml | |||
@@ -25,7 +25,7 @@ jobs: | |||
25 | strategy: | 25 | strategy: |
26 | fail-fast: false | 26 | fail-fast: false |
27 | matrix: | 27 | matrix: |
28 | os: [ubuntu-latest, windows-latest] #, macos-latest] | 28 | os: [ubuntu-latest, windows-latest, macos-latest] |
29 | 29 | ||
30 | steps: | 30 | steps: |
31 | - name: Checkout repository | 31 | - name: Checkout repository |
@@ -70,10 +70,6 @@ jobs: | |||
70 | - name: Prepare cache | 70 | - name: Prepare cache |
71 | run: cargo xtask pre-cache | 71 | run: cargo xtask pre-cache |
72 | 72 | ||
73 | - name: Prepare cache 2 | ||
74 | if: matrix.os == 'windows-latest' | ||
75 | run: Remove-Item ./target/debug/xtask.exe, ./target/debug/deps/xtask.exe | ||
76 | |||
77 | # Weird targets to catch non-portable code | 73 | # Weird targets to catch non-portable code |
78 | rust-cross: | 74 | rust-cross: |
79 | name: Rust Cross | 75 | name: Rust Cross |
diff --git a/Cargo.lock b/Cargo.lock index 2386c8f3a..ffa385106 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -162,9 +162,9 @@ checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" | |||
162 | 162 | ||
163 | [[package]] | 163 | [[package]] |
164 | name = "chalk-derive" | 164 | name = "chalk-derive" |
165 | version = "0.21.0" | 165 | version = "0.23.0" |
166 | source = "registry+https://github.com/rust-lang/crates.io-index" | 166 | source = "registry+https://github.com/rust-lang/crates.io-index" |
167 | checksum = "c1df0dbb57d74b4acd20f20fa66ab2acd09776b79eaeb9d8f947b2f3e01c40bf" | 167 | checksum = "c3cb438e961fd7f1183dc5e0bdcfd09253bf9b90592cf665d1ce6787d8a4908f" |
168 | dependencies = [ | 168 | dependencies = [ |
169 | "proc-macro2", | 169 | "proc-macro2", |
170 | "quote", | 170 | "quote", |
@@ -174,9 +174,9 @@ dependencies = [ | |||
174 | 174 | ||
175 | [[package]] | 175 | [[package]] |
176 | name = "chalk-ir" | 176 | name = "chalk-ir" |
177 | version = "0.21.0" | 177 | version = "0.23.0" |
178 | source = "registry+https://github.com/rust-lang/crates.io-index" | 178 | source = "registry+https://github.com/rust-lang/crates.io-index" |
179 | checksum = "44361a25dbdb1dc428f56ad7a3c21ba9ca12f3225c26a47919ff6fcb10a583d4" | 179 | checksum = "bb332abfcb015b148c6fbab39b1d13282745b0f7f312019dd8e138f5f3f0855d" |
180 | dependencies = [ | 180 | dependencies = [ |
181 | "chalk-derive", | 181 | "chalk-derive", |
182 | "lazy_static", | 182 | "lazy_static", |
@@ -184,9 +184,9 @@ dependencies = [ | |||
184 | 184 | ||
185 | [[package]] | 185 | [[package]] |
186 | name = "chalk-recursive" | 186 | name = "chalk-recursive" |
187 | version = "0.21.0" | 187 | version = "0.23.0" |
188 | source = "registry+https://github.com/rust-lang/crates.io-index" | 188 | source = "registry+https://github.com/rust-lang/crates.io-index" |
189 | checksum = "dd89556b98de156d5eaf21077d297cd2198628f10f2df140798ea3a5dd84bc86" | 189 | checksum = "e7c7673f10c5fa1acf7fa07d4f4c5917cbcf161ed3a952d14530c79950de32d2" |
190 | dependencies = [ | 190 | dependencies = [ |
191 | "chalk-derive", | 191 | "chalk-derive", |
192 | "chalk-ir", | 192 | "chalk-ir", |
@@ -197,9 +197,9 @@ dependencies = [ | |||
197 | 197 | ||
198 | [[package]] | 198 | [[package]] |
199 | name = "chalk-solve" | 199 | name = "chalk-solve" |
200 | version = "0.21.0" | 200 | version = "0.23.0" |
201 | source = "registry+https://github.com/rust-lang/crates.io-index" | 201 | source = "registry+https://github.com/rust-lang/crates.io-index" |
202 | checksum = "a886da37a0dc457057d86f78f026f7a09c6d8088aa13f4f4127fdb8dc80119a3" | 202 | checksum = "802de4eff72e5a5d2828e6c07224c74d66949dc6308aff025d0ae2871a11b4eb" |
203 | dependencies = [ | 203 | dependencies = [ |
204 | "chalk-derive", | 204 | "chalk-derive", |
205 | "chalk-ir", | 205 | "chalk-ir", |
@@ -214,9 +214,9 @@ dependencies = [ | |||
214 | 214 | ||
215 | [[package]] | 215 | [[package]] |
216 | name = "chrono" | 216 | name = "chrono" |
217 | version = "0.4.13" | 217 | version = "0.4.15" |
218 | source = "registry+https://github.com/rust-lang/crates.io-index" | 218 | source = "registry+https://github.com/rust-lang/crates.io-index" |
219 | checksum = "c74d84029116787153e02106bf53e66828452a4b325cc8652b788b5967c0a0b6" | 219 | checksum = "942f72db697d8767c22d46a598e01f2d3b475501ea43d0db4f16d90259182d0b" |
220 | dependencies = [ | 220 | dependencies = [ |
221 | "num-integer", | 221 | "num-integer", |
222 | "num-traits", | 222 | "num-traits", |
@@ -765,9 +765,9 @@ dependencies = [ | |||
765 | 765 | ||
766 | [[package]] | 766 | [[package]] |
767 | name = "lsp-server" | 767 | name = "lsp-server" |
768 | version = "0.3.3" | 768 | version = "0.3.4" |
769 | source = "registry+https://github.com/rust-lang/crates.io-index" | 769 | source = "registry+https://github.com/rust-lang/crates.io-index" |
770 | checksum = "53b4ace8ebe5d2aff3687ce0ed507f6020d6a47a7de2b0d3d664ea237ffb0c62" | 770 | checksum = "87fce8851309a325974ec76efe7c9d954d152c9ff4fded6520eb3c96d0aa3a96" |
771 | dependencies = [ | 771 | dependencies = [ |
772 | "crossbeam-channel", | 772 | "crossbeam-channel", |
773 | "log", | 773 | "log", |
@@ -971,9 +971,9 @@ checksum = "1ab52be62400ca80aa00285d25253d7f7c437b7375c4de678f5405d3afe82ca5" | |||
971 | 971 | ||
972 | [[package]] | 972 | [[package]] |
973 | name = "once_cell" | 973 | name = "once_cell" |
974 | version = "1.4.0" | 974 | version = "1.4.1" |
975 | source = "registry+https://github.com/rust-lang/crates.io-index" | 975 | source = "registry+https://github.com/rust-lang/crates.io-index" |
976 | checksum = "0b631f7e854af39a1739f401cf34a8a013dfe09eac4fa4dba91e9768bd28168d" | 976 | checksum = "260e51e7efe62b592207e9e13a68e43692a7a279171d6ba57abd208bf23645ad" |
977 | 977 | ||
978 | [[package]] | 978 | [[package]] |
979 | name = "oorandom" | 979 | name = "oorandom" |
@@ -1036,9 +1036,9 @@ dependencies = [ | |||
1036 | 1036 | ||
1037 | [[package]] | 1037 | [[package]] |
1038 | name = "perf-event-open-sys" | 1038 | name = "perf-event-open-sys" |
1039 | version = "0.3.2" | 1039 | version = "0.3.3" |
1040 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1040 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1041 | checksum = "83e7183862f36d10263d0a1ccaef50fef734ade948bf026afd1bd97355c78273" | 1041 | checksum = "d9ebe2b9ef0cb884ef778c5a533144e348e9839a9fcf67f3d24e1890ac9088d6" |
1042 | dependencies = [ | 1042 | dependencies = [ |
1043 | "libc", | 1043 | "libc", |
1044 | ] | 1044 | ] |
@@ -1097,6 +1097,7 @@ dependencies = [ | |||
1097 | "mbe", | 1097 | "mbe", |
1098 | "memmap", | 1098 | "memmap", |
1099 | "proc_macro_api", | 1099 | "proc_macro_api", |
1100 | "proc_macro_test", | ||
1100 | "serde_derive", | 1101 | "serde_derive", |
1101 | "test_utils", | 1102 | "test_utils", |
1102 | "toolchain", | 1103 | "toolchain", |
@@ -1104,6 +1105,10 @@ dependencies = [ | |||
1104 | ] | 1105 | ] |
1105 | 1106 | ||
1106 | [[package]] | 1107 | [[package]] |
1108 | name = "proc_macro_test" | ||
1109 | version = "0.0.0" | ||
1110 | |||
1111 | [[package]] | ||
1107 | name = "profile" | 1112 | name = "profile" |
1108 | version = "0.0.0" | 1113 | version = "0.0.0" |
1109 | dependencies = [ | 1114 | dependencies = [ |
@@ -1259,9 +1264,9 @@ dependencies = [ | |||
1259 | 1264 | ||
1260 | [[package]] | 1265 | [[package]] |
1261 | name = "rustc-ap-rustc_lexer" | 1266 | name = "rustc-ap-rustc_lexer" |
1262 | version = "671.0.0" | 1267 | version = "673.0.0" |
1263 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1268 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1264 | checksum = "22e1221f3bfa2943c942cf8da319ab2346887f8757778c29c7f1822cd27b521f" | 1269 | checksum = "f6b71fa1285bdefe5fb61e59b63d6cc246abf337f4acafdd620d721bc488e671" |
1265 | dependencies = [ | 1270 | dependencies = [ |
1266 | "unicode-xid", | 1271 | "unicode-xid", |
1267 | ] | 1272 | ] |
@@ -1450,6 +1455,7 @@ dependencies = [ | |||
1450 | "expect", | 1455 | "expect", |
1451 | "hir", | 1456 | "hir", |
1452 | "ide_db", | 1457 | "ide_db", |
1458 | "itertools", | ||
1453 | "rustc-hash", | 1459 | "rustc-hash", |
1454 | "syntax", | 1460 | "syntax", |
1455 | "test_utils", | 1461 | "test_utils", |
@@ -1573,9 +1579,9 @@ dependencies = [ | |||
1573 | 1579 | ||
1574 | [[package]] | 1580 | [[package]] |
1575 | name = "tinyvec" | 1581 | name = "tinyvec" |
1576 | version = "0.3.3" | 1582 | version = "0.3.4" |
1577 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1583 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1578 | checksum = "53953d2d3a5ad81d9f844a32f14ebb121f50b650cd59d0ee2a07cf13c617efed" | 1584 | checksum = "238ce071d267c5710f9d31451efec16c5ee22de34df17cc05e56cbc92e967117" |
1579 | 1585 | ||
1580 | [[package]] | 1586 | [[package]] |
1581 | name = "toolchain" | 1587 | name = "toolchain" |
diff --git a/crates/assists/src/ast_transform.rs b/crates/assists/src/ast_transform.rs index 4c41c16d8..5216862ba 100644 --- a/crates/assists/src/ast_transform.rs +++ b/crates/assists/src/ast_transform.rs | |||
@@ -7,6 +7,17 @@ use syntax::{ | |||
7 | ast::{self, AstNode}, | 7 | ast::{self, AstNode}, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { | ||
11 | SyntaxRewriter::from_fn(|element| match element { | ||
12 | syntax::SyntaxElement::Node(n) => { | ||
13 | let replacement = transformer.get_substitution(&n)?; | ||
14 | Some(replacement.into()) | ||
15 | } | ||
16 | _ => None, | ||
17 | }) | ||
18 | .rewrite_ast(&node) | ||
19 | } | ||
20 | |||
10 | pub trait AstTransform<'a> { | 21 | pub trait AstTransform<'a> { |
11 | fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode>; | 22 | fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode>; |
12 | 23 | ||
@@ -107,10 +118,7 @@ impl<'a> SubstituteTypeParams<'a> { | |||
107 | ast::Type::PathType(path_type) => path_type.path()?, | 118 | ast::Type::PathType(path_type) => path_type.path()?, |
108 | _ => return None, | 119 | _ => return None, |
109 | }; | 120 | }; |
110 | // FIXME: use `hir::Path::from_src` instead. | 121 | let resolution = self.source_scope.speculative_resolve(&path)?; |
111 | #[allow(deprecated)] | ||
112 | let path = hir::Path::from_ast(path)?; | ||
113 | let resolution = self.source_scope.resolve_hir_path(&path)?; | ||
114 | match resolution { | 122 | match resolution { |
115 | hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()), | 123 | hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()), |
116 | _ => None, | 124 | _ => None, |
@@ -146,10 +154,7 @@ impl<'a> QualifyPaths<'a> { | |||
146 | // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway | 154 | // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway |
147 | return None; | 155 | return None; |
148 | } | 156 | } |
149 | // FIXME: use `hir::Path::from_src` instead. | 157 | let resolution = self.source_scope.speculative_resolve(&p)?; |
150 | #[allow(deprecated)] | ||
151 | let hir_path = hir::Path::from_ast(p.clone()); | ||
152 | let resolution = self.source_scope.resolve_hir_path(&hir_path?)?; | ||
153 | match resolution { | 158 | match resolution { |
154 | PathResolution::Def(def) => { | 159 | PathResolution::Def(def) => { |
155 | let found_path = from.find_use_path(self.source_scope.db.upcast(), def)?; | 160 | let found_path = from.find_use_path(self.source_scope.db.upcast(), def)?; |
@@ -175,17 +180,6 @@ impl<'a> QualifyPaths<'a> { | |||
175 | } | 180 | } |
176 | } | 181 | } |
177 | 182 | ||
178 | pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { | ||
179 | SyntaxRewriter::from_fn(|element| match element { | ||
180 | syntax::SyntaxElement::Node(n) => { | ||
181 | let replacement = transformer.get_substitution(&n)?; | ||
182 | Some(replacement.into()) | ||
183 | } | ||
184 | _ => None, | ||
185 | }) | ||
186 | .rewrite_ast(&node) | ||
187 | } | ||
188 | |||
189 | impl<'a> AstTransform<'a> for QualifyPaths<'a> { | 183 | impl<'a> AstTransform<'a> for QualifyPaths<'a> { |
190 | fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode> { | 184 | fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode> { |
191 | self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) | 185 | self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) |
diff --git a/crates/assists/src/handlers/add_missing_impl_members.rs b/crates/assists/src/handlers/add_missing_impl_members.rs index 81b61ebf8..83a2ada9a 100644 --- a/crates/assists/src/handlers/add_missing_impl_members.rs +++ b/crates/assists/src/handlers/add_missing_impl_members.rs | |||
@@ -48,7 +48,6 @@ enum AddMissingImplMembersMode { | |||
48 | // fn foo(&self) -> u32 { | 48 | // fn foo(&self) -> u32 { |
49 | // ${0:todo!()} | 49 | // ${0:todo!()} |
50 | // } | 50 | // } |
51 | // | ||
52 | // } | 51 | // } |
53 | // ``` | 52 | // ``` |
54 | pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 53 | pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
@@ -89,8 +88,8 @@ pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext) - | |||
89 | // impl Trait for () { | 88 | // impl Trait for () { |
90 | // Type X = (); | 89 | // Type X = (); |
91 | // fn foo(&self) {} | 90 | // fn foo(&self) {} |
92 | // $0fn bar(&self) {} | ||
93 | // | 91 | // |
92 | // $0fn bar(&self) {} | ||
94 | // } | 93 | // } |
95 | // ``` | 94 | // ``` |
96 | pub(crate) fn add_missing_default_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 95 | pub(crate) fn add_missing_default_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
@@ -240,15 +239,18 @@ struct S; | |||
240 | 239 | ||
241 | impl Foo for S { | 240 | impl Foo for S { |
242 | fn bar(&self) {} | 241 | fn bar(&self) {} |
242 | |||
243 | $0type Output; | 243 | $0type Output; |
244 | |||
244 | const CONST: usize = 42; | 245 | const CONST: usize = 42; |
246 | |||
245 | fn foo(&self) { | 247 | fn foo(&self) { |
246 | todo!() | 248 | todo!() |
247 | } | 249 | } |
250 | |||
248 | fn baz(&self) { | 251 | fn baz(&self) { |
249 | todo!() | 252 | todo!() |
250 | } | 253 | } |
251 | |||
252 | }"#, | 254 | }"#, |
253 | ); | 255 | ); |
254 | } | 256 | } |
@@ -281,10 +283,10 @@ struct S; | |||
281 | 283 | ||
282 | impl Foo for S { | 284 | impl Foo for S { |
283 | fn bar(&self) {} | 285 | fn bar(&self) {} |
286 | |||
284 | fn foo(&self) { | 287 | fn foo(&self) { |
285 | ${0:todo!()} | 288 | ${0:todo!()} |
286 | } | 289 | } |
287 | |||
288 | }"#, | 290 | }"#, |
289 | ); | 291 | ); |
290 | } | 292 | } |
@@ -599,6 +601,7 @@ trait Foo { | |||
599 | struct S; | 601 | struct S; |
600 | impl Foo for S { | 602 | impl Foo for S { |
601 | $0type Output; | 603 | $0type Output; |
604 | |||
602 | fn foo(&self) { | 605 | fn foo(&self) { |
603 | todo!() | 606 | todo!() |
604 | } | 607 | } |
@@ -708,4 +711,56 @@ impl Tr for () { | |||
708 | }"#, | 711 | }"#, |
709 | ) | 712 | ) |
710 | } | 713 | } |
714 | |||
715 | #[test] | ||
716 | fn test_whitespace_fixup_preserves_bad_tokens() { | ||
717 | check_assist( | ||
718 | add_missing_impl_members, | ||
719 | r#" | ||
720 | trait Tr { | ||
721 | fn foo(); | ||
722 | } | ||
723 | |||
724 | impl Tr for ()<|> { | ||
725 | +++ | ||
726 | }"#, | ||
727 | r#" | ||
728 | trait Tr { | ||
729 | fn foo(); | ||
730 | } | ||
731 | |||
732 | impl Tr for () { | ||
733 | fn foo() { | ||
734 | ${0:todo!()} | ||
735 | } | ||
736 | +++ | ||
737 | }"#, | ||
738 | ) | ||
739 | } | ||
740 | |||
741 | #[test] | ||
742 | fn test_whitespace_fixup_preserves_comments() { | ||
743 | check_assist( | ||
744 | add_missing_impl_members, | ||
745 | r#" | ||
746 | trait Tr { | ||
747 | fn foo(); | ||
748 | } | ||
749 | |||
750 | impl Tr for ()<|> { | ||
751 | // very important | ||
752 | }"#, | ||
753 | r#" | ||
754 | trait Tr { | ||
755 | fn foo(); | ||
756 | } | ||
757 | |||
758 | impl Tr for () { | ||
759 | fn foo() { | ||
760 | ${0:todo!()} | ||
761 | } | ||
762 | // very important | ||
763 | }"#, | ||
764 | ) | ||
765 | } | ||
711 | } | 766 | } |
diff --git a/crates/assists/src/handlers/auto_import.rs b/crates/assists/src/handlers/auto_import.rs index cce789972..b9ec3f10b 100644 --- a/crates/assists/src/handlers/auto_import.rs +++ b/crates/assists/src/handlers/auto_import.rs | |||
@@ -53,7 +53,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
53 | |builder| { | 53 | |builder| { |
54 | insert_use_statement( | 54 | insert_use_statement( |
55 | &auto_import_assets.syntax_under_caret, | 55 | &auto_import_assets.syntax_under_caret, |
56 | &import, | 56 | &import.to_string(), |
57 | ctx, | 57 | ctx, |
58 | builder.text_edit_builder(), | 58 | builder.text_edit_builder(), |
59 | ); | 59 | ); |
diff --git a/crates/assists/src/handlers/expand_glob_import.rs b/crates/assists/src/handlers/expand_glob_import.rs index f690ec343..81d0af2f3 100644 --- a/crates/assists/src/handlers/expand_glob_import.rs +++ b/crates/assists/src/handlers/expand_glob_import.rs | |||
@@ -1,3 +1,4 @@ | |||
1 | use either::Either; | ||
1 | use hir::{AssocItem, MacroDef, ModuleDef, Name, PathResolution, ScopeDef, SemanticsScope}; | 2 | use hir::{AssocItem, MacroDef, ModuleDef, Name, PathResolution, ScopeDef, SemanticsScope}; |
2 | use ide_db::{ | 3 | use ide_db::{ |
3 | defs::{classify_name_ref, Definition, NameRefClass}, | 4 | defs::{classify_name_ref, Definition, NameRefClass}, |
@@ -10,8 +11,6 @@ use crate::{ | |||
10 | AssistId, AssistKind, | 11 | AssistId, AssistKind, |
11 | }; | 12 | }; |
12 | 13 | ||
13 | use either::Either; | ||
14 | |||
15 | // Assist: expand_glob_import | 14 | // Assist: expand_glob_import |
16 | // | 15 | // |
17 | // Expands glob imports. | 16 | // Expands glob imports. |
@@ -40,11 +39,15 @@ use either::Either; | |||
40 | pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 39 | pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
41 | let star = ctx.find_token_at_offset(T![*])?; | 40 | let star = ctx.find_token_at_offset(T![*])?; |
42 | let mod_path = find_mod_path(&star)?; | 41 | let mod_path = find_mod_path(&star)?; |
42 | let module = match ctx.sema.resolve_path(&mod_path)? { | ||
43 | PathResolution::Def(ModuleDef::Module(it)) => it, | ||
44 | _ => return None, | ||
45 | }; | ||
43 | 46 | ||
44 | let source_file = ctx.source_file(); | 47 | let source_file = ctx.source_file(); |
45 | let scope = ctx.sema.scope_at_offset(source_file.syntax(), ctx.offset()); | 48 | let scope = ctx.sema.scope_at_offset(source_file.syntax(), ctx.offset()); |
46 | 49 | ||
47 | let defs_in_mod = find_defs_in_mod(ctx, scope, &mod_path)?; | 50 | let defs_in_mod = find_defs_in_mod(ctx, scope, module)?; |
48 | let name_refs_in_source_file = | 51 | let name_refs_in_source_file = |
49 | source_file.syntax().descendants().filter_map(ast::NameRef::cast).collect(); | 52 | source_file.syntax().descendants().filter_map(ast::NameRef::cast).collect(); |
50 | let used_names = find_used_names(ctx, defs_in_mod, name_refs_in_source_file); | 53 | let used_names = find_used_names(ctx, defs_in_mod, name_refs_in_source_file); |
@@ -82,17 +85,8 @@ impl Def { | |||
82 | fn find_defs_in_mod( | 85 | fn find_defs_in_mod( |
83 | ctx: &AssistContext, | 86 | ctx: &AssistContext, |
84 | from: SemanticsScope<'_>, | 87 | from: SemanticsScope<'_>, |
85 | path: &ast::Path, | 88 | module: hir::Module, |
86 | ) -> Option<Vec<Def>> { | 89 | ) -> Option<Vec<Def>> { |
87 | let hir_path = ctx.sema.lower_path(&path)?; | ||
88 | let module = if let Some(PathResolution::Def(ModuleDef::Module(module))) = | ||
89 | from.resolve_hir_path_qualifier(&hir_path) | ||
90 | { | ||
91 | module | ||
92 | } else { | ||
93 | return None; | ||
94 | }; | ||
95 | |||
96 | let module_scope = module.scope(ctx.db(), from.module()); | 90 | let module_scope = module.scope(ctx.db(), from.module()); |
97 | 91 | ||
98 | let mut defs = vec![]; | 92 | let mut defs = vec![]; |
diff --git a/crates/assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/assists/src/handlers/extract_struct_from_enum_variant.rs index 4bcdae7ba..d62e06b4a 100644 --- a/crates/assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/assists/src/handlers/extract_struct_from_enum_variant.rs | |||
@@ -106,7 +106,12 @@ fn insert_import( | |||
106 | if let Some(mut mod_path) = mod_path { | 106 | if let Some(mut mod_path) = mod_path { |
107 | mod_path.segments.pop(); | 107 | mod_path.segments.pop(); |
108 | mod_path.segments.push(variant_hir_name.clone()); | 108 | mod_path.segments.push(variant_hir_name.clone()); |
109 | insert_use_statement(path.syntax(), &mod_path, ctx, builder.text_edit_builder()); | 109 | insert_use_statement( |
110 | path.syntax(), | ||
111 | &mod_path.to_string(), | ||
112 | ctx, | ||
113 | builder.text_edit_builder(), | ||
114 | ); | ||
110 | } | 115 | } |
111 | Some(()) | 116 | Some(()) |
112 | } | 117 | } |
diff --git a/crates/assists/src/handlers/replace_qualified_name_with_use.rs b/crates/assists/src/handlers/replace_qualified_name_with_use.rs index 011bf1106..470e5f8ff 100644 --- a/crates/assists/src/handlers/replace_qualified_name_with_use.rs +++ b/crates/assists/src/handlers/replace_qualified_name_with_use.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | use hir; | 1 | use syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SyntaxNode, TextRange}; |
2 | use syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SmolStr, SyntaxNode}; | 2 | use test_utils::mark; |
3 | 3 | ||
4 | use crate::{ | 4 | use crate::{ |
5 | utils::{find_insert_use_container, insert_use_statement}, | 5 | utils::{find_insert_use_container, insert_use_statement}, |
@@ -28,12 +28,19 @@ pub(crate) fn replace_qualified_name_with_use( | |||
28 | if path.syntax().ancestors().find_map(ast::Use::cast).is_some() { | 28 | if path.syntax().ancestors().find_map(ast::Use::cast).is_some() { |
29 | return None; | 29 | return None; |
30 | } | 30 | } |
31 | 31 | if path.qualifier().is_none() { | |
32 | let hir_path = ctx.sema.lower_path(&path)?; | 32 | mark::hit!(dont_import_trivial_paths); |
33 | let segments = collect_hir_path_segments(&hir_path)?; | ||
34 | if segments.len() < 2 { | ||
35 | return None; | 33 | return None; |
36 | } | 34 | } |
35 | let path_to_import = path.to_string().clone(); | ||
36 | let path_to_import = match path.segment()?.generic_arg_list() { | ||
37 | Some(generic_args) => { | ||
38 | let generic_args_start = | ||
39 | generic_args.syntax().text_range().start() - path.syntax().text_range().start(); | ||
40 | &path_to_import[TextRange::up_to(generic_args_start)] | ||
41 | } | ||
42 | None => path_to_import.as_str(), | ||
43 | }; | ||
37 | 44 | ||
38 | let target = path.syntax().text_range(); | 45 | let target = path.syntax().text_range(); |
39 | acc.add( | 46 | acc.add( |
@@ -41,12 +48,16 @@ pub(crate) fn replace_qualified_name_with_use( | |||
41 | "Replace qualified path with use", | 48 | "Replace qualified path with use", |
42 | target, | 49 | target, |
43 | |builder| { | 50 | |builder| { |
44 | let path_to_import = hir_path.mod_path().clone(); | ||
45 | let container = match find_insert_use_container(path.syntax(), ctx) { | 51 | let container = match find_insert_use_container(path.syntax(), ctx) { |
46 | Some(c) => c, | 52 | Some(c) => c, |
47 | None => return, | 53 | None => return, |
48 | }; | 54 | }; |
49 | insert_use_statement(path.syntax(), &path_to_import, ctx, builder.text_edit_builder()); | 55 | insert_use_statement( |
56 | path.syntax(), | ||
57 | &path_to_import.to_string(), | ||
58 | ctx, | ||
59 | builder.text_edit_builder(), | ||
60 | ); | ||
50 | 61 | ||
51 | // Now that we've brought the name into scope, re-qualify all paths that could be | 62 | // Now that we've brought the name into scope, re-qualify all paths that could be |
52 | // affected (that is, all paths inside the node we added the `use` to). | 63 | // affected (that is, all paths inside the node we added the `use` to). |
@@ -58,26 +69,6 @@ pub(crate) fn replace_qualified_name_with_use( | |||
58 | ) | 69 | ) |
59 | } | 70 | } |
60 | 71 | ||
61 | fn collect_hir_path_segments(path: &hir::Path) -> Option<Vec<SmolStr>> { | ||
62 | let mut ps = Vec::<SmolStr>::with_capacity(10); | ||
63 | match path.kind() { | ||
64 | hir::PathKind::Abs => ps.push("".into()), | ||
65 | hir::PathKind::Crate => ps.push("crate".into()), | ||
66 | hir::PathKind::Plain => {} | ||
67 | hir::PathKind::Super(0) => ps.push("self".into()), | ||
68 | hir::PathKind::Super(lvl) => { | ||
69 | let mut chain = "super".to_string(); | ||
70 | for _ in 0..*lvl { | ||
71 | chain += "::super"; | ||
72 | } | ||
73 | ps.push(chain.into()); | ||
74 | } | ||
75 | hir::PathKind::DollarCrate(_) => return None, | ||
76 | } | ||
77 | ps.extend(path.segments().iter().map(|it| it.name.to_string().into())); | ||
78 | Some(ps) | ||
79 | } | ||
80 | |||
81 | /// Adds replacements to `re` that shorten `path` in all descendants of `node`. | 72 | /// Adds replacements to `re` that shorten `path` in all descendants of `node`. |
82 | fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path: ast::Path) { | 73 | fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path: ast::Path) { |
83 | for child in node.children() { | 74 | for child in node.children() { |
@@ -467,7 +458,8 @@ impl Debug for Foo { | |||
467 | } | 458 | } |
468 | 459 | ||
469 | #[test] | 460 | #[test] |
470 | fn test_replace_not_applicable_one_segment() { | 461 | fn dont_import_trivial_paths() { |
462 | mark::check!(dont_import_trivial_paths); | ||
471 | check_assist_not_applicable( | 463 | check_assist_not_applicable( |
472 | replace_qualified_name_with_use, | 464 | replace_qualified_name_with_use, |
473 | r" | 465 | r" |
diff --git a/crates/assists/src/lib.rs b/crates/assists/src/lib.rs index ae90d68a3..c589b08dc 100644 --- a/crates/assists/src/lib.rs +++ b/crates/assists/src/lib.rs | |||
@@ -66,13 +66,13 @@ pub struct GroupLabel(pub String); | |||
66 | 66 | ||
67 | #[derive(Debug, Clone)] | 67 | #[derive(Debug, Clone)] |
68 | pub struct Assist { | 68 | pub struct Assist { |
69 | id: AssistId, | 69 | pub id: AssistId, |
70 | /// Short description of the assist, as shown in the UI. | 70 | /// Short description of the assist, as shown in the UI. |
71 | label: String, | 71 | label: String, |
72 | group: Option<GroupLabel>, | 72 | pub group: Option<GroupLabel>, |
73 | /// Target ranges are used to sort assists: the smaller the target range, | 73 | /// Target ranges are used to sort assists: the smaller the target range, |
74 | /// the more specific assist is, and so it should be sorted first. | 74 | /// the more specific assist is, and so it should be sorted first. |
75 | target: TextRange, | 75 | pub target: TextRange, |
76 | } | 76 | } |
77 | 77 | ||
78 | #[derive(Debug, Clone)] | 78 | #[derive(Debug, Clone)] |
@@ -82,6 +82,11 @@ pub struct ResolvedAssist { | |||
82 | } | 82 | } |
83 | 83 | ||
84 | impl Assist { | 84 | impl Assist { |
85 | fn new(id: AssistId, label: String, group: Option<GroupLabel>, target: TextRange) -> Assist { | ||
86 | assert!(label.starts_with(char::is_uppercase)); | ||
87 | Assist { id, label, group, target } | ||
88 | } | ||
89 | |||
85 | /// Return all the assists applicable at the given position. | 90 | /// Return all the assists applicable at the given position. |
86 | /// | 91 | /// |
87 | /// Assists are returned in the "unresolved" state, that is only labels are | 92 | /// Assists are returned in the "unresolved" state, that is only labels are |
@@ -114,30 +119,8 @@ impl Assist { | |||
114 | acc.finish_resolved() | 119 | acc.finish_resolved() |
115 | } | 120 | } |
116 | 121 | ||
117 | pub(crate) fn new( | 122 | pub fn label(&self) -> &str { |
118 | id: AssistId, | 123 | self.label.as_str() |
119 | label: String, | ||
120 | group: Option<GroupLabel>, | ||
121 | target: TextRange, | ||
122 | ) -> Assist { | ||
123 | assert!(label.starts_with(|c: char| c.is_uppercase())); | ||
124 | Assist { id, label, group, target } | ||
125 | } | ||
126 | |||
127 | pub fn id(&self) -> AssistId { | ||
128 | self.id | ||
129 | } | ||
130 | |||
131 | pub fn label(&self) -> String { | ||
132 | self.label.clone() | ||
133 | } | ||
134 | |||
135 | pub fn group(&self) -> Option<GroupLabel> { | ||
136 | self.group.clone() | ||
137 | } | ||
138 | |||
139 | pub fn target(&self) -> TextRange { | ||
140 | self.target | ||
141 | } | 124 | } |
142 | } | 125 | } |
143 | 126 | ||
diff --git a/crates/assists/src/tests/generated.rs b/crates/assists/src/tests/generated.rs index d16e6fb0a..173567003 100644 --- a/crates/assists/src/tests/generated.rs +++ b/crates/assists/src/tests/generated.rs | |||
@@ -82,8 +82,8 @@ trait Trait { | |||
82 | impl Trait for () { | 82 | impl Trait for () { |
83 | Type X = (); | 83 | Type X = (); |
84 | fn foo(&self) {} | 84 | fn foo(&self) {} |
85 | $0fn bar(&self) {} | ||
86 | 85 | ||
86 | $0fn bar(&self) {} | ||
87 | } | 87 | } |
88 | "#####, | 88 | "#####, |
89 | ) | 89 | ) |
@@ -115,7 +115,6 @@ impl Trait<u32> for () { | |||
115 | fn foo(&self) -> u32 { | 115 | fn foo(&self) -> u32 { |
116 | ${0:todo!()} | 116 | ${0:todo!()} |
117 | } | 117 | } |
118 | |||
119 | } | 118 | } |
120 | "#####, | 119 | "#####, |
121 | ) | 120 | ) |
diff --git a/crates/assists/src/utils/insert_use.rs b/crates/assists/src/utils/insert_use.rs index 50a62ee82..49096a67c 100644 --- a/crates/assists/src/utils/insert_use.rs +++ b/crates/assists/src/utils/insert_use.rs | |||
@@ -5,7 +5,6 @@ | |||
5 | use std::iter::successors; | 5 | use std::iter::successors; |
6 | 6 | ||
7 | use either::Either; | 7 | use either::Either; |
8 | use hir::{self, ModPath}; | ||
9 | use syntax::{ | 8 | use syntax::{ |
10 | ast::{self, NameOwner, VisibilityOwner}, | 9 | ast::{self, NameOwner, VisibilityOwner}, |
11 | AstNode, AstToken, Direction, SmolStr, | 10 | AstNode, AstToken, Direction, SmolStr, |
@@ -35,11 +34,11 @@ pub(crate) fn find_insert_use_container( | |||
35 | pub(crate) fn insert_use_statement( | 34 | pub(crate) fn insert_use_statement( |
36 | // Ideally the position of the cursor, used to | 35 | // Ideally the position of the cursor, used to |
37 | position: &SyntaxNode, | 36 | position: &SyntaxNode, |
38 | path_to_import: &ModPath, | 37 | path_to_import: &str, |
39 | ctx: &AssistContext, | 38 | ctx: &AssistContext, |
40 | builder: &mut TextEditBuilder, | 39 | builder: &mut TextEditBuilder, |
41 | ) { | 40 | ) { |
42 | let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::<Vec<_>>(); | 41 | let target = path_to_import.split("::").map(SmolStr::new).collect::<Vec<_>>(); |
43 | let container = find_insert_use_container(position, ctx); | 42 | let container = find_insert_use_container(position, ctx); |
44 | 43 | ||
45 | if let Some(container) = container { | 44 | if let Some(container) = container { |
diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs index 5dc3ae3b1..c442654dd 100644 --- a/crates/hir/src/code_model.rs +++ b/crates/hir/src/code_model.rs | |||
@@ -12,6 +12,7 @@ use hir_def::{ | |||
12 | docs::Documentation, | 12 | docs::Documentation, |
13 | expr::{BindingAnnotation, Pat, PatId}, | 13 | expr::{BindingAnnotation, Pat, PatId}, |
14 | import_map, | 14 | import_map, |
15 | path::ModPath, | ||
15 | per_ns::PerNs, | 16 | per_ns::PerNs, |
16 | resolver::{HasResolver, Resolver}, | 17 | resolver::{HasResolver, Resolver}, |
17 | src::HasSource as _, | 18 | src::HasSource as _, |
@@ -344,11 +345,7 @@ impl Module { | |||
344 | 345 | ||
345 | /// Finds a path that can be used to refer to the given item from within | 346 | /// Finds a path that can be used to refer to the given item from within |
346 | /// this module, if possible. | 347 | /// this module, if possible. |
347 | pub fn find_use_path( | 348 | pub fn find_use_path(self, db: &dyn DefDatabase, item: impl Into<ItemInNs>) -> Option<ModPath> { |
348 | self, | ||
349 | db: &dyn DefDatabase, | ||
350 | item: impl Into<ItemInNs>, | ||
351 | ) -> Option<hir_def::path::ModPath> { | ||
352 | hir_def::find_path::find_path(db, item.into(), self.into()) | 349 | hir_def::find_path::find_path(db, item.into(), self.into()) |
353 | } | 350 | } |
354 | } | 351 | } |
@@ -1126,7 +1123,7 @@ impl ImplDef { | |||
1126 | .value | 1123 | .value |
1127 | .attrs() | 1124 | .attrs() |
1128 | .filter_map(|it| { | 1125 | .filter_map(|it| { |
1129 | let path = hir_def::path::ModPath::from_src(it.path()?, &hygenic)?; | 1126 | let path = ModPath::from_src(it.path()?, &hygenic)?; |
1130 | if path.as_ident()?.to_string() == "derive" { | 1127 | if path.as_ident()?.to_string() == "derive" { |
1131 | Some(it) | 1128 | Some(it) |
1132 | } else { | 1129 | } else { |
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 4ae2bd085..8961ba8fd 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -48,7 +48,7 @@ pub use hir_def::{ | |||
48 | builtin_type::BuiltinType, | 48 | builtin_type::BuiltinType, |
49 | docs::Documentation, | 49 | docs::Documentation, |
50 | nameres::ModuleSource, | 50 | nameres::ModuleSource, |
51 | path::{ModPath, Path, PathKind}, | 51 | path::ModPath, |
52 | type_ref::{Mutability, TypeRef}, | 52 | type_ref::{Mutability, TypeRef}, |
53 | }; | 53 | }; |
54 | pub use hir_expand::{ | 54 | pub use hir_expand::{ |
@@ -60,4 +60,7 @@ pub use hir_ty::display::HirDisplay; | |||
60 | // These are negative re-exports: pub using these names is forbidden, they | 60 | // These are negative re-exports: pub using these names is forbidden, they |
61 | // should remain private to hir internals. | 61 | // should remain private to hir internals. |
62 | #[allow(unused)] | 62 | #[allow(unused)] |
63 | use hir_expand::hygiene::Hygiene; | 63 | use { |
64 | hir_def::path::{Path, PathKind}, | ||
65 | hir_expand::hygiene::Hygiene, | ||
66 | }; | ||
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 3953017c3..c693176fa 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs | |||
@@ -6,7 +6,7 @@ use std::{cell::RefCell, fmt, iter::successors}; | |||
6 | 6 | ||
7 | use base_db::{FileId, FileRange}; | 7 | use base_db::{FileId, FileRange}; |
8 | use hir_def::{ | 8 | use hir_def::{ |
9 | resolver::{self, HasResolver, Resolver}, | 9 | resolver::{self, HasResolver, Resolver, TypeNs}, |
10 | AsMacroCall, FunctionId, TraitId, VariantId, | 10 | AsMacroCall, FunctionId, TraitId, VariantId, |
11 | }; | 11 | }; |
12 | use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo}; | 12 | use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo}; |
@@ -22,12 +22,11 @@ use crate::{ | |||
22 | db::HirDatabase, | 22 | db::HirDatabase, |
23 | diagnostics::Diagnostic, | 23 | diagnostics::Diagnostic, |
24 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, | 24 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, |
25 | source_analyzer::{resolve_hir_path, resolve_hir_path_qualifier, SourceAnalyzer}, | 25 | source_analyzer::{resolve_hir_path, SourceAnalyzer}, |
26 | AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, | 26 | AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, |
27 | Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, TypeRef, | 27 | Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, TypeRef, |
28 | VariantDef, | 28 | VariantDef, |
29 | }; | 29 | }; |
30 | use resolver::TypeNs; | ||
31 | 30 | ||
32 | #[derive(Debug, Clone, PartialEq, Eq)] | 31 | #[derive(Debug, Clone, PartialEq, Eq)] |
33 | pub enum PathResolution { | 32 | pub enum PathResolution { |
@@ -228,10 +227,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
228 | self.imp.resolve_variant(record_lit).map(VariantDef::from) | 227 | self.imp.resolve_variant(record_lit).map(VariantDef::from) |
229 | } | 228 | } |
230 | 229 | ||
231 | pub fn lower_path(&self, path: &ast::Path) -> Option<Path> { | ||
232 | self.imp.lower_path(path) | ||
233 | } | ||
234 | |||
235 | pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { | 230 | pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { |
236 | self.imp.resolve_bind_pat_to_const(pat) | 231 | self.imp.resolve_bind_pat_to_const(pat) |
237 | } | 232 | } |
@@ -467,11 +462,6 @@ impl<'db> SemanticsImpl<'db> { | |||
467 | self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit) | 462 | self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit) |
468 | } | 463 | } |
469 | 464 | ||
470 | fn lower_path(&self, path: &ast::Path) -> Option<Path> { | ||
471 | let src = self.find_file(path.syntax().clone()); | ||
472 | Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into())) | ||
473 | } | ||
474 | |||
475 | fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { | 465 | fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { |
476 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) | 466 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) |
477 | } | 467 | } |
@@ -758,28 +748,7 @@ impl<'a> SemanticsScope<'a> { | |||
758 | pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> { | 748 | pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> { |
759 | let hygiene = Hygiene::new(self.db.upcast(), self.file_id); | 749 | let hygiene = Hygiene::new(self.db.upcast(), self.file_id); |
760 | let path = Path::from_src(path.clone(), &hygiene)?; | 750 | let path = Path::from_src(path.clone(), &hygiene)?; |
761 | self.resolve_hir_path(&path) | 751 | resolve_hir_path(self.db, &self.resolver, &path) |
762 | } | ||
763 | |||
764 | pub fn resolve_hir_path(&self, path: &Path) -> Option<PathResolution> { | ||
765 | resolve_hir_path(self.db, &self.resolver, path) | ||
766 | } | ||
767 | |||
768 | /// Resolves a path where we know it is a qualifier of another path. | ||
769 | /// | ||
770 | /// For example, if we have: | ||
771 | /// ``` | ||
772 | /// mod my { | ||
773 | /// pub mod foo { | ||
774 | /// struct Bar; | ||
775 | /// } | ||
776 | /// | ||
777 | /// pub fn foo() {} | ||
778 | /// } | ||
779 | /// ``` | ||
780 | /// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. | ||
781 | pub fn resolve_hir_path_qualifier(&self, path: &Path) -> Option<PathResolution> { | ||
782 | resolve_hir_path_qualifier(self.db, &self.resolver, path) | ||
783 | } | 752 | } |
784 | } | 753 | } |
785 | 754 | ||
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 8750584f9..1d13c4f1d 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs | |||
@@ -13,6 +13,7 @@ use hir_def::{ | |||
13 | Body, BodySourceMap, | 13 | Body, BodySourceMap, |
14 | }, | 14 | }, |
15 | expr::{ExprId, Pat, PatId}, | 15 | expr::{ExprId, Pat, PatId}, |
16 | path::{ModPath, Path, PathKind}, | ||
16 | resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, | 17 | resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, |
17 | AsMacroCall, DefWithBodyId, FieldId, FunctionId, LocalFieldId, VariantId, | 18 | AsMacroCall, DefWithBodyId, FieldId, FunctionId, LocalFieldId, VariantId, |
18 | }; | 19 | }; |
@@ -28,8 +29,7 @@ use syntax::{ | |||
28 | 29 | ||
29 | use crate::{ | 30 | use crate::{ |
30 | db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Field, Function, Local, | 31 | db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Field, Function, Local, |
31 | MacroDef, ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias, | 32 | MacroDef, ModuleDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, |
32 | TypeParam, | ||
33 | }; | 33 | }; |
34 | use base_db::CrateId; | 34 | use base_db::CrateId; |
35 | 35 | ||
@@ -508,7 +508,7 @@ pub(crate) fn resolve_hir_path( | |||
508 | /// } | 508 | /// } |
509 | /// ``` | 509 | /// ``` |
510 | /// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. | 510 | /// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. |
511 | pub(crate) fn resolve_hir_path_qualifier( | 511 | fn resolve_hir_path_qualifier( |
512 | db: &dyn HirDatabase, | 512 | db: &dyn HirDatabase, |
513 | resolver: &Resolver, | 513 | resolver: &Resolver, |
514 | path: &Path, | 514 | path: &Path, |
diff --git a/crates/hir_def/src/diagnostics.rs b/crates/hir_def/src/diagnostics.rs index 2e38a978f..c7723de00 100644 --- a/crates/hir_def/src/diagnostics.rs +++ b/crates/hir_def/src/diagnostics.rs | |||
@@ -15,6 +15,9 @@ pub struct UnresolvedModule { | |||
15 | } | 15 | } |
16 | 16 | ||
17 | impl Diagnostic for UnresolvedModule { | 17 | impl Diagnostic for UnresolvedModule { |
18 | fn name(&self) -> &'static str { | ||
19 | "unresolved-module" | ||
20 | } | ||
18 | fn message(&self) -> String { | 21 | fn message(&self) -> String { |
19 | "unresolved module".to_string() | 22 | "unresolved module".to_string() |
20 | } | 23 | } |
diff --git a/crates/hir_def/src/path.rs b/crates/hir_def/src/path.rs index 74d26f08b..99395667d 100644 --- a/crates/hir_def/src/path.rs +++ b/crates/hir_def/src/path.rs | |||
@@ -154,12 +154,6 @@ pub enum GenericArg { | |||
154 | 154 | ||
155 | impl Path { | 155 | impl Path { |
156 | /// Converts an `ast::Path` to `Path`. Works with use trees. | 156 | /// Converts an `ast::Path` to `Path`. Works with use trees. |
157 | #[deprecated = "Doesn't handle hygiene, don't add new calls, remove old ones"] | ||
158 | pub fn from_ast(path: ast::Path) -> Option<Path> { | ||
159 | lower::lower_path(path, &Hygiene::new_unhygienic()) | ||
160 | } | ||
161 | |||
162 | /// Converts an `ast::Path` to `Path`. Works with use trees. | ||
163 | /// It correctly handles `$crate` based path from macro call. | 157 | /// It correctly handles `$crate` based path from macro call. |
164 | pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<Path> { | 158 | pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<Path> { |
165 | lower::lower_path(path, hygiene) | 159 | lower::lower_path(path, hygiene) |
diff --git a/crates/hir_expand/src/diagnostics.rs b/crates/hir_expand/src/diagnostics.rs index 59d35debe..6c81b2501 100644 --- a/crates/hir_expand/src/diagnostics.rs +++ b/crates/hir_expand/src/diagnostics.rs | |||
@@ -21,6 +21,7 @@ use syntax::SyntaxNodePtr; | |||
21 | use crate::InFile; | 21 | use crate::InFile; |
22 | 22 | ||
23 | pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static { | 23 | pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static { |
24 | fn name(&self) -> &'static str; | ||
24 | fn message(&self) -> String; | 25 | fn message(&self) -> String; |
25 | /// Used in highlighting and related purposes | 26 | /// Used in highlighting and related purposes |
26 | fn display_source(&self) -> InFile<SyntaxNodePtr>; | 27 | fn display_source(&self) -> InFile<SyntaxNodePtr>; |
diff --git a/crates/hir_ty/Cargo.toml b/crates/hir_ty/Cargo.toml index 83b5013a9..a319b0ce8 100644 --- a/crates/hir_ty/Cargo.toml +++ b/crates/hir_ty/Cargo.toml | |||
@@ -16,9 +16,9 @@ ena = "0.14.0" | |||
16 | log = "0.4.8" | 16 | log = "0.4.8" |
17 | rustc-hash = "1.1.0" | 17 | rustc-hash = "1.1.0" |
18 | scoped-tls = "1" | 18 | scoped-tls = "1" |
19 | chalk-solve = { version = "0.21.0" } | 19 | chalk-solve = { version = "0.23.0" } |
20 | chalk-ir = { version = "0.21.0" } | 20 | chalk-ir = { version = "0.23.0" } |
21 | chalk-recursive = { version = "0.21.0" } | 21 | chalk-recursive = { version = "0.23.0" } |
22 | 22 | ||
23 | stdx = { path = "../stdx" } | 23 | stdx = { path = "../stdx" } |
24 | hir_def = { path = "../hir_def" } | 24 | hir_def = { path = "../hir_def" } |
diff --git a/crates/hir_ty/src/diagnostics.rs b/crates/hir_ty/src/diagnostics.rs index ae0cf8d09..38fa24ee0 100644 --- a/crates/hir_ty/src/diagnostics.rs +++ b/crates/hir_ty/src/diagnostics.rs | |||
@@ -32,6 +32,10 @@ pub struct NoSuchField { | |||
32 | } | 32 | } |
33 | 33 | ||
34 | impl Diagnostic for NoSuchField { | 34 | impl Diagnostic for NoSuchField { |
35 | fn name(&self) -> &'static str { | ||
36 | "no-such-field" | ||
37 | } | ||
38 | |||
35 | fn message(&self) -> String { | 39 | fn message(&self) -> String { |
36 | "no such field".to_string() | 40 | "no such field".to_string() |
37 | } | 41 | } |
@@ -54,6 +58,9 @@ pub struct MissingFields { | |||
54 | } | 58 | } |
55 | 59 | ||
56 | impl Diagnostic for MissingFields { | 60 | impl Diagnostic for MissingFields { |
61 | fn name(&self) -> &'static str { | ||
62 | "missing-structure-fields" | ||
63 | } | ||
57 | fn message(&self) -> String { | 64 | fn message(&self) -> String { |
58 | let mut buf = String::from("Missing structure fields:\n"); | 65 | let mut buf = String::from("Missing structure fields:\n"); |
59 | for field in &self.missed_fields { | 66 | for field in &self.missed_fields { |
@@ -87,6 +94,9 @@ pub struct MissingPatFields { | |||
87 | } | 94 | } |
88 | 95 | ||
89 | impl Diagnostic for MissingPatFields { | 96 | impl Diagnostic for MissingPatFields { |
97 | fn name(&self) -> &'static str { | ||
98 | "missing-pat-fields" | ||
99 | } | ||
90 | fn message(&self) -> String { | 100 | fn message(&self) -> String { |
91 | let mut buf = String::from("Missing structure fields:\n"); | 101 | let mut buf = String::from("Missing structure fields:\n"); |
92 | for field in &self.missed_fields { | 102 | for field in &self.missed_fields { |
@@ -117,6 +127,9 @@ pub struct MissingMatchArms { | |||
117 | } | 127 | } |
118 | 128 | ||
119 | impl Diagnostic for MissingMatchArms { | 129 | impl Diagnostic for MissingMatchArms { |
130 | fn name(&self) -> &'static str { | ||
131 | "missing-match-arm" | ||
132 | } | ||
120 | fn message(&self) -> String { | 133 | fn message(&self) -> String { |
121 | String::from("Missing match arm") | 134 | String::from("Missing match arm") |
122 | } | 135 | } |
@@ -135,6 +148,9 @@ pub struct MissingOkInTailExpr { | |||
135 | } | 148 | } |
136 | 149 | ||
137 | impl Diagnostic for MissingOkInTailExpr { | 150 | impl Diagnostic for MissingOkInTailExpr { |
151 | fn name(&self) -> &'static str { | ||
152 | "missing-ok-in-tail-expr" | ||
153 | } | ||
138 | fn message(&self) -> String { | 154 | fn message(&self) -> String { |
139 | "wrap return expression in Ok".to_string() | 155 | "wrap return expression in Ok".to_string() |
140 | } | 156 | } |
@@ -153,6 +169,9 @@ pub struct BreakOutsideOfLoop { | |||
153 | } | 169 | } |
154 | 170 | ||
155 | impl Diagnostic for BreakOutsideOfLoop { | 171 | impl Diagnostic for BreakOutsideOfLoop { |
172 | fn name(&self) -> &'static str { | ||
173 | "break-outside-of-loop" | ||
174 | } | ||
156 | fn message(&self) -> String { | 175 | fn message(&self) -> String { |
157 | "break outside of loop".to_string() | 176 | "break outside of loop".to_string() |
158 | } | 177 | } |
@@ -171,6 +190,9 @@ pub struct MissingUnsafe { | |||
171 | } | 190 | } |
172 | 191 | ||
173 | impl Diagnostic for MissingUnsafe { | 192 | impl Diagnostic for MissingUnsafe { |
193 | fn name(&self) -> &'static str { | ||
194 | "missing-unsafe" | ||
195 | } | ||
174 | fn message(&self) -> String { | 196 | fn message(&self) -> String { |
175 | format!("This operation is unsafe and requires an unsafe function or block") | 197 | format!("This operation is unsafe and requires an unsafe function or block") |
176 | } | 198 | } |
@@ -191,6 +213,9 @@ pub struct MismatchedArgCount { | |||
191 | } | 213 | } |
192 | 214 | ||
193 | impl Diagnostic for MismatchedArgCount { | 215 | impl Diagnostic for MismatchedArgCount { |
216 | fn name(&self) -> &'static str { | ||
217 | "mismatched-arg-count" | ||
218 | } | ||
194 | fn message(&self) -> String { | 219 | fn message(&self) -> String { |
195 | let s = if self.expected == 1 { "" } else { "s" }; | 220 | let s = if self.expected == 1 { "" } else { "s" }; |
196 | format!("Expected {} argument{}, found {}", self.expected, s, self.found) | 221 | format!("Expected {} argument{}, found {}", self.expected, s, self.found) |
diff --git a/crates/hir_ty/src/diagnostics/expr.rs b/crates/hir_ty/src/diagnostics/expr.rs index fb76e2e4e..278a4b947 100644 --- a/crates/hir_ty/src/diagnostics/expr.rs +++ b/crates/hir_ty/src/diagnostics/expr.rs | |||
@@ -223,10 +223,10 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
223 | db.body_with_source_map(self.owner.into()); | 223 | db.body_with_source_map(self.owner.into()); |
224 | 224 | ||
225 | let match_expr_ty = match infer.type_of_expr.get(match_expr) { | 225 | let match_expr_ty = match infer.type_of_expr.get(match_expr) { |
226 | Some(ty) => ty, | ||
227 | // If we can't resolve the type of the match expression | 226 | // If we can't resolve the type of the match expression |
228 | // we cannot perform exhaustiveness checks. | 227 | // we cannot perform exhaustiveness checks. |
229 | None => return, | 228 | None | Some(Ty::Unknown) => return, |
229 | Some(ty) => ty, | ||
230 | }; | 230 | }; |
231 | 231 | ||
232 | let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db }; | 232 | let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db }; |
diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs index 7f007f1d6..5bd03f2ac 100644 --- a/crates/hir_ty/src/diagnostics/match_check.rs +++ b/crates/hir_ty/src/diagnostics/match_check.rs | |||
@@ -1335,6 +1335,23 @@ fn panic(a: Category, b: Category) { | |||
1335 | ); | 1335 | ); |
1336 | } | 1336 | } |
1337 | 1337 | ||
1338 | #[test] | ||
1339 | fn unknown_type() { | ||
1340 | check_diagnostics( | ||
1341 | r#" | ||
1342 | enum Option<T> { Some(T), None } | ||
1343 | |||
1344 | fn main() { | ||
1345 | // `Never` is deliberately not defined so that it's an uninferred type. | ||
1346 | match Option::<Never>::None { | ||
1347 | None => (), | ||
1348 | Some(never) => match never {}, | ||
1349 | } | ||
1350 | } | ||
1351 | "#, | ||
1352 | ); | ||
1353 | } | ||
1354 | |||
1338 | mod false_negatives { | 1355 | mod false_negatives { |
1339 | //! The implementation of match checking here is a work in progress. As we roll this out, we | 1356 | //! The implementation of match checking here is a work in progress. As we roll this out, we |
1340 | //! prefer false negatives to false positives (ideally there would be no false positives). This | 1357 | //! prefer false negatives to false positives (ideally there would be no false positives). This |
diff --git a/crates/hir_ty/src/traits.rs b/crates/hir_ty/src/traits.rs index 1c3abb18f..14cd3a2b4 100644 --- a/crates/hir_ty/src/traits.rs +++ b/crates/hir_ty/src/traits.rs | |||
@@ -170,11 +170,11 @@ fn solve( | |||
170 | let mut solve = || { | 170 | let mut solve = || { |
171 | if is_chalk_print() { | 171 | if is_chalk_print() { |
172 | let logging_db = LoggingRustIrDatabase::new(context); | 172 | let logging_db = LoggingRustIrDatabase::new(context); |
173 | let solution = solver.solve_limited(&logging_db, goal, should_continue); | 173 | let solution = solver.solve_limited(&logging_db, goal, &should_continue); |
174 | log::debug!("chalk program:\n{}", logging_db); | 174 | log::debug!("chalk program:\n{}", logging_db); |
175 | solution | 175 | solution |
176 | } else { | 176 | } else { |
177 | solver.solve_limited(&context, goal, should_continue) | 177 | solver.solve_limited(&context, goal, &should_continue) |
178 | } | 178 | } |
179 | }; | 179 | }; |
180 | 180 | ||
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs index a3ec98178..606a6064b 100644 --- a/crates/ide/src/diagnostics.rs +++ b/crates/ide/src/diagnostics.rs | |||
@@ -4,7 +4,7 @@ | |||
4 | //! macro-expanded files, but we need to present them to the users in terms of | 4 | //! macro-expanded files, but we need to present them to the users in terms of |
5 | //! original files. So we need to map the ranges. | 5 | //! original files. So we need to map the ranges. |
6 | 6 | ||
7 | use std::cell::RefCell; | 7 | use std::{cell::RefCell, collections::HashSet}; |
8 | 8 | ||
9 | use base_db::SourceDatabase; | 9 | use base_db::SourceDatabase; |
10 | use hir::{diagnostics::DiagnosticSinkBuilder, Semantics}; | 10 | use hir::{diagnostics::DiagnosticSinkBuilder, Semantics}; |
@@ -31,6 +31,7 @@ pub(crate) fn diagnostics( | |||
31 | db: &RootDatabase, | 31 | db: &RootDatabase, |
32 | file_id: FileId, | 32 | file_id: FileId, |
33 | enable_experimental: bool, | 33 | enable_experimental: bool, |
34 | disabled_diagnostics: Option<HashSet<String>>, | ||
34 | ) -> Vec<Diagnostic> { | 35 | ) -> Vec<Diagnostic> { |
35 | let _p = profile::span("diagnostics"); | 36 | let _p = profile::span("diagnostics"); |
36 | let sema = Semantics::new(db); | 37 | let sema = Semantics::new(db); |
@@ -39,6 +40,7 @@ pub(crate) fn diagnostics( | |||
39 | 40 | ||
40 | // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. | 41 | // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. |
41 | res.extend(parse.errors().iter().take(128).map(|err| Diagnostic { | 42 | res.extend(parse.errors().iter().take(128).map(|err| Diagnostic { |
43 | name: None, | ||
42 | range: err.range(), | 44 | range: err.range(), |
43 | message: format!("Syntax Error: {}", err), | 45 | message: format!("Syntax Error: {}", err), |
44 | severity: Severity::Error, | 46 | severity: Severity::Error, |
@@ -50,7 +52,7 @@ pub(crate) fn diagnostics( | |||
50 | check_struct_shorthand_initialization(&mut res, file_id, &node); | 52 | check_struct_shorthand_initialization(&mut res, file_id, &node); |
51 | } | 53 | } |
52 | let res = RefCell::new(res); | 54 | let res = RefCell::new(res); |
53 | let mut sink = DiagnosticSinkBuilder::new() | 55 | let mut sink_builder = DiagnosticSinkBuilder::new() |
54 | .on::<hir::diagnostics::UnresolvedModule, _>(|d| { | 56 | .on::<hir::diagnostics::UnresolvedModule, _>(|d| { |
55 | res.borrow_mut().push(diagnostic_with_fix(d, &sema)); | 57 | res.borrow_mut().push(diagnostic_with_fix(d, &sema)); |
56 | }) | 58 | }) |
@@ -64,10 +66,19 @@ pub(crate) fn diagnostics( | |||
64 | res.borrow_mut().push(diagnostic_with_fix(d, &sema)); | 66 | res.borrow_mut().push(diagnostic_with_fix(d, &sema)); |
65 | }) | 67 | }) |
66 | // Only collect experimental diagnostics when they're enabled. | 68 | // Only collect experimental diagnostics when they're enabled. |
67 | .filter(|diag| !diag.is_experimental() || enable_experimental) | 69 | .filter(|diag| !diag.is_experimental() || enable_experimental); |
70 | |||
71 | if let Some(disabled_diagnostics) = disabled_diagnostics { | ||
72 | // Do not collect disabled diagnostics. | ||
73 | sink_builder = sink_builder.filter(move |diag| !disabled_diagnostics.contains(diag.name())); | ||
74 | } | ||
75 | |||
76 | // Finalize the `DiagnosticSink` building process. | ||
77 | let mut sink = sink_builder | ||
68 | // Diagnostics not handled above get no fix and default treatment. | 78 | // Diagnostics not handled above get no fix and default treatment. |
69 | .build(|d| { | 79 | .build(|d| { |
70 | res.borrow_mut().push(Diagnostic { | 80 | res.borrow_mut().push(Diagnostic { |
81 | name: Some(d.name().into()), | ||
71 | message: d.message(), | 82 | message: d.message(), |
72 | range: sema.diagnostics_display_range(d).range, | 83 | range: sema.diagnostics_display_range(d).range, |
73 | severity: Severity::Error, | 84 | severity: Severity::Error, |
@@ -84,6 +95,7 @@ pub(crate) fn diagnostics( | |||
84 | 95 | ||
85 | fn diagnostic_with_fix<D: DiagnosticWithFix>(d: &D, sema: &Semantics<RootDatabase>) -> Diagnostic { | 96 | fn diagnostic_with_fix<D: DiagnosticWithFix>(d: &D, sema: &Semantics<RootDatabase>) -> Diagnostic { |
86 | Diagnostic { | 97 | Diagnostic { |
98 | name: Some(d.name().into()), | ||
87 | range: sema.diagnostics_display_range(d).range, | 99 | range: sema.diagnostics_display_range(d).range, |
88 | message: d.message(), | 100 | message: d.message(), |
89 | severity: Severity::Error, | 101 | severity: Severity::Error, |
@@ -110,6 +122,7 @@ fn check_unnecessary_braces_in_use_statement( | |||
110 | }); | 122 | }); |
111 | 123 | ||
112 | acc.push(Diagnostic { | 124 | acc.push(Diagnostic { |
125 | name: None, | ||
113 | range: use_range, | 126 | range: use_range, |
114 | message: "Unnecessary braces in use statement".to_string(), | 127 | message: "Unnecessary braces in use statement".to_string(), |
115 | severity: Severity::WeakWarning, | 128 | severity: Severity::WeakWarning, |
@@ -156,6 +169,7 @@ fn check_struct_shorthand_initialization( | |||
156 | 169 | ||
157 | let field_range = record_field.syntax().text_range(); | 170 | let field_range = record_field.syntax().text_range(); |
158 | acc.push(Diagnostic { | 171 | acc.push(Diagnostic { |
172 | name: None, | ||
159 | range: field_range, | 173 | range: field_range, |
160 | message: "Shorthand struct initialization".to_string(), | 174 | message: "Shorthand struct initialization".to_string(), |
161 | severity: Severity::WeakWarning, | 175 | severity: Severity::WeakWarning, |
@@ -173,6 +187,7 @@ fn check_struct_shorthand_initialization( | |||
173 | 187 | ||
174 | #[cfg(test)] | 188 | #[cfg(test)] |
175 | mod tests { | 189 | mod tests { |
190 | use std::collections::HashSet; | ||
176 | use stdx::trim_indent; | 191 | use stdx::trim_indent; |
177 | use test_utils::assert_eq_text; | 192 | use test_utils::assert_eq_text; |
178 | 193 | ||
@@ -188,7 +203,8 @@ mod tests { | |||
188 | let after = trim_indent(ra_fixture_after); | 203 | let after = trim_indent(ra_fixture_after); |
189 | 204 | ||
190 | let (analysis, file_position) = analysis_and_position(ra_fixture_before); | 205 | let (analysis, file_position) = analysis_and_position(ra_fixture_before); |
191 | let diagnostic = analysis.diagnostics(file_position.file_id, true).unwrap().pop().unwrap(); | 206 | let diagnostic = |
207 | analysis.diagnostics(file_position.file_id, true, None).unwrap().pop().unwrap(); | ||
192 | let mut fix = diagnostic.fix.unwrap(); | 208 | let mut fix = diagnostic.fix.unwrap(); |
193 | let edit = fix.source_change.source_file_edits.pop().unwrap().edit; | 209 | let edit = fix.source_change.source_file_edits.pop().unwrap().edit; |
194 | let target_file_contents = analysis.file_text(file_position.file_id).unwrap(); | 210 | let target_file_contents = analysis.file_text(file_position.file_id).unwrap(); |
@@ -214,7 +230,7 @@ mod tests { | |||
214 | let ra_fixture_after = &trim_indent(ra_fixture_after); | 230 | let ra_fixture_after = &trim_indent(ra_fixture_after); |
215 | let (analysis, file_pos) = analysis_and_position(ra_fixture_before); | 231 | let (analysis, file_pos) = analysis_and_position(ra_fixture_before); |
216 | let current_file_id = file_pos.file_id; | 232 | let current_file_id = file_pos.file_id; |
217 | let diagnostic = analysis.diagnostics(current_file_id, true).unwrap().pop().unwrap(); | 233 | let diagnostic = analysis.diagnostics(current_file_id, true, None).unwrap().pop().unwrap(); |
218 | let mut fix = diagnostic.fix.unwrap(); | 234 | let mut fix = diagnostic.fix.unwrap(); |
219 | let edit = fix.source_change.source_file_edits.pop().unwrap(); | 235 | let edit = fix.source_change.source_file_edits.pop().unwrap(); |
220 | let changed_file_id = edit.file_id; | 236 | let changed_file_id = edit.file_id; |
@@ -235,14 +251,58 @@ mod tests { | |||
235 | let analysis = mock.analysis(); | 251 | let analysis = mock.analysis(); |
236 | let diagnostics = files | 252 | let diagnostics = files |
237 | .into_iter() | 253 | .into_iter() |
238 | .flat_map(|file_id| analysis.diagnostics(file_id, true).unwrap()) | 254 | .flat_map(|file_id| analysis.diagnostics(file_id, true, None).unwrap()) |
239 | .collect::<Vec<_>>(); | 255 | .collect::<Vec<_>>(); |
240 | assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics); | 256 | assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics); |
241 | } | 257 | } |
242 | 258 | ||
259 | /// Takes a multi-file input fixture with annotated cursor position and the list of disabled diagnostics, | ||
260 | /// and checks that provided diagnostics aren't spawned during analysis. | ||
261 | fn check_disabled_diagnostics(ra_fixture: &str, disabled_diagnostics: &[&'static str]) { | ||
262 | let disabled_diagnostics: HashSet<_> = | ||
263 | disabled_diagnostics.into_iter().map(|diag| diag.to_string()).collect(); | ||
264 | |||
265 | let mock = MockAnalysis::with_files(ra_fixture); | ||
266 | let files = mock.files().map(|(it, _)| it).collect::<Vec<_>>(); | ||
267 | let analysis = mock.analysis(); | ||
268 | |||
269 | let diagnostics = files | ||
270 | .clone() | ||
271 | .into_iter() | ||
272 | .flat_map(|file_id| { | ||
273 | analysis.diagnostics(file_id, true, Some(disabled_diagnostics.clone())).unwrap() | ||
274 | }) | ||
275 | .collect::<Vec<_>>(); | ||
276 | |||
277 | // First, we have to check that diagnostic is not emitted when it's added to the disabled diagnostics list. | ||
278 | for diagnostic in diagnostics { | ||
279 | if let Some(name) = diagnostic.name { | ||
280 | assert!(!disabled_diagnostics.contains(&name), "Diagnostic {} is disabled", name); | ||
281 | } | ||
282 | } | ||
283 | |||
284 | // Then, we must reset the config and repeat the check, so that we'll be sure that without | ||
285 | // config these diagnostics are emitted. | ||
286 | // This is required for tests to not become outdated if e.g. diagnostics name changes: | ||
287 | // without this additional run the test will pass simply because a diagnostic with an old name | ||
288 | // will no longer exist. | ||
289 | let diagnostics = files | ||
290 | .into_iter() | ||
291 | .flat_map(|file_id| analysis.diagnostics(file_id, true, None).unwrap()) | ||
292 | .collect::<Vec<_>>(); | ||
293 | |||
294 | assert!( | ||
295 | diagnostics | ||
296 | .into_iter() | ||
297 | .filter_map(|diag| diag.name) | ||
298 | .any(|name| disabled_diagnostics.contains(&name)), | ||
299 | "At least one of the diagnostics was not emitted even without config; are the diagnostics names correct?" | ||
300 | ); | ||
301 | } | ||
302 | |||
243 | fn check_expect(ra_fixture: &str, expect: Expect) { | 303 | fn check_expect(ra_fixture: &str, expect: Expect) { |
244 | let (analysis, file_id) = single_file(ra_fixture); | 304 | let (analysis, file_id) = single_file(ra_fixture); |
245 | let diagnostics = analysis.diagnostics(file_id, true).unwrap(); | 305 | let diagnostics = analysis.diagnostics(file_id, true, None).unwrap(); |
246 | expect.assert_debug_eq(&diagnostics) | 306 | expect.assert_debug_eq(&diagnostics) |
247 | } | 307 | } |
248 | 308 | ||
@@ -502,6 +562,9 @@ fn test_fn() { | |||
502 | expect![[r#" | 562 | expect![[r#" |
503 | [ | 563 | [ |
504 | Diagnostic { | 564 | Diagnostic { |
565 | name: Some( | ||
566 | "unresolved-module", | ||
567 | ), | ||
505 | message: "unresolved module", | 568 | message: "unresolved module", |
506 | range: 0..8, | 569 | range: 0..8, |
507 | severity: Error, | 570 | severity: Error, |
@@ -675,4 +738,9 @@ struct Foo { | |||
675 | ", | 738 | ", |
676 | ) | 739 | ) |
677 | } | 740 | } |
741 | |||
742 | #[test] | ||
743 | fn test_disabled_diagnostics() { | ||
744 | check_disabled_diagnostics(r#"mod foo;"#, &["unresolved-module"]); | ||
745 | } | ||
678 | } | 746 | } |
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 002adf915..596bc872d 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs | |||
@@ -43,7 +43,7 @@ pub struct InlayHint { | |||
43 | // rust-analyzer shows additional information inline with the source code. | 43 | // rust-analyzer shows additional information inline with the source code. |
44 | // Editors usually render this using read-only virtual text snippets interspersed with code. | 44 | // Editors usually render this using read-only virtual text snippets interspersed with code. |
45 | // | 45 | // |
46 | // rust-analyzer shows hits for | 46 | // rust-analyzer shows hints for |
47 | // | 47 | // |
48 | // * types of local variables | 48 | // * types of local variables |
49 | // * names of function arguments | 49 | // * names of function arguments |
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index eb6389529..4b797f374 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs | |||
@@ -44,7 +44,7 @@ mod syntax_highlighting; | |||
44 | mod syntax_tree; | 44 | mod syntax_tree; |
45 | mod typing; | 45 | mod typing; |
46 | 46 | ||
47 | use std::sync::Arc; | 47 | use std::{collections::HashSet, sync::Arc}; |
48 | 48 | ||
49 | use base_db::{ | 49 | use base_db::{ |
50 | salsa::{self, ParallelDatabase}, | 50 | salsa::{self, ParallelDatabase}, |
@@ -101,6 +101,7 @@ pub type Cancelable<T> = Result<T, Canceled>; | |||
101 | 101 | ||
102 | #[derive(Debug)] | 102 | #[derive(Debug)] |
103 | pub struct Diagnostic { | 103 | pub struct Diagnostic { |
104 | pub name: Option<String>, | ||
104 | pub message: String, | 105 | pub message: String, |
105 | pub range: TextRange, | 106 | pub range: TextRange, |
106 | pub severity: Severity, | 107 | pub severity: Severity, |
@@ -147,7 +148,7 @@ pub struct AnalysisHost { | |||
147 | } | 148 | } |
148 | 149 | ||
149 | impl AnalysisHost { | 150 | impl AnalysisHost { |
150 | pub fn new(lru_capacity: Option<usize>) -> AnalysisHost { | 151 | pub fn new(lru_capacity: Option<usize>) -> Self { |
151 | AnalysisHost { db: RootDatabase::new(lru_capacity) } | 152 | AnalysisHost { db: RootDatabase::new(lru_capacity) } |
152 | } | 153 | } |
153 | 154 | ||
@@ -496,8 +497,11 @@ impl Analysis { | |||
496 | &self, | 497 | &self, |
497 | file_id: FileId, | 498 | file_id: FileId, |
498 | enable_experimental: bool, | 499 | enable_experimental: bool, |
500 | disabled_diagnostics: Option<HashSet<String>>, | ||
499 | ) -> Cancelable<Vec<Diagnostic>> { | 501 | ) -> Cancelable<Vec<Diagnostic>> { |
500 | self.with_db(|db| diagnostics::diagnostics(db, file_id, enable_experimental)) | 502 | self.with_db(|db| { |
503 | diagnostics::diagnostics(db, file_id, enable_experimental, disabled_diagnostics) | ||
504 | }) | ||
501 | } | 505 | } |
502 | 506 | ||
503 | /// Returns the edit required to rename reference at the position to the new | 507 | /// Returns the edit required to rename reference at the position to the new |
diff --git a/crates/proc_macro_api/src/lib.rs b/crates/proc_macro_api/src/lib.rs index 15db57eb2..d5e87cf7d 100644 --- a/crates/proc_macro_api/src/lib.rs +++ b/crates/proc_macro_api/src/lib.rs | |||
@@ -89,9 +89,8 @@ impl ProcMacroClient { | |||
89 | macros | 89 | macros |
90 | .into_iter() | 90 | .into_iter() |
91 | .filter_map(|(name, kind)| { | 91 | .filter_map(|(name, kind)| { |
92 | // FIXME: Support custom derive only for now. | ||
93 | match kind { | 92 | match kind { |
94 | ProcMacroKind::CustomDerive => { | 93 | ProcMacroKind::CustomDerive | ProcMacroKind::FuncLike => { |
95 | let name = SmolStr::new(&name); | 94 | let name = SmolStr::new(&name); |
96 | let expander: Arc<dyn tt::TokenExpander> = | 95 | let expander: Arc<dyn tt::TokenExpander> = |
97 | Arc::new(ProcMacroProcessExpander { | 96 | Arc::new(ProcMacroProcessExpander { |
@@ -101,7 +100,8 @@ impl ProcMacroClient { | |||
101 | }); | 100 | }); |
102 | Some((name, expander)) | 101 | Some((name, expander)) |
103 | } | 102 | } |
104 | _ => None, | 103 | // FIXME: Attribute macro are currently unsupported. |
104 | ProcMacroKind::Attr => None, | ||
105 | } | 105 | } |
106 | }) | 106 | }) |
107 | .collect() | 107 | .collect() |
diff --git a/crates/proc_macro_srv/Cargo.toml b/crates/proc_macro_srv/Cargo.toml index 7171f0808..a468b5560 100644 --- a/crates/proc_macro_srv/Cargo.toml +++ b/crates/proc_macro_srv/Cargo.toml | |||
@@ -21,7 +21,9 @@ test_utils = { path = "../test_utils" } | |||
21 | [dev-dependencies] | 21 | [dev-dependencies] |
22 | cargo_metadata = "0.11.1" | 22 | cargo_metadata = "0.11.1" |
23 | difference = "2.0.0" | 23 | difference = "2.0.0" |
24 | # used as proc macro test target | 24 | |
25 | # used as proc macro test targets | ||
25 | serde_derive = "1.0.106" | 26 | serde_derive = "1.0.106" |
27 | proc_macro_test = { path = "../proc_macro_test" } | ||
26 | 28 | ||
27 | toolchain = { path = "../toolchain" } | 29 | toolchain = { path = "../toolchain" } |
diff --git a/crates/proc_macro_srv/src/tests/mod.rs b/crates/proc_macro_srv/src/tests/mod.rs index 8e6f28abd..1a827cbd7 100644 --- a/crates/proc_macro_srv/src/tests/mod.rs +++ b/crates/proc_macro_srv/src/tests/mod.rs | |||
@@ -35,7 +35,7 @@ SUBTREE $ | |||
35 | 35 | ||
36 | #[test] | 36 | #[test] |
37 | fn test_derive_proc_macro_list() { | 37 | fn test_derive_proc_macro_list() { |
38 | let res = list("serde_derive", "1.0").join("\n"); | 38 | let res = list("serde_derive", "1").join("\n"); |
39 | 39 | ||
40 | assert_eq_text!( | 40 | assert_eq_text!( |
41 | &res, | 41 | &res, |
@@ -43,3 +43,16 @@ fn test_derive_proc_macro_list() { | |||
43 | Deserialize [CustomDerive]"# | 43 | Deserialize [CustomDerive]"# |
44 | ); | 44 | ); |
45 | } | 45 | } |
46 | |||
47 | /// Tests that we find and classify non-derive macros correctly. | ||
48 | #[test] | ||
49 | fn list_test_macros() { | ||
50 | let res = list("proc_macro_test", "0.0.0").join("\n"); | ||
51 | |||
52 | assert_eq_text!( | ||
53 | &res, | ||
54 | r#"function_like_macro [FuncLike] | ||
55 | attribute_macro [Attr] | ||
56 | DummyTrait [CustomDerive]"# | ||
57 | ); | ||
58 | } | ||
diff --git a/crates/proc_macro_srv/src/tests/utils.rs b/crates/proc_macro_srv/src/tests/utils.rs index 5828512d6..36942147d 100644 --- a/crates/proc_macro_srv/src/tests/utils.rs +++ b/crates/proc_macro_srv/src/tests/utils.rs | |||
@@ -13,7 +13,7 @@ mod fixtures { | |||
13 | // Use current project metadata to get the proc-macro dylib path | 13 | // Use current project metadata to get the proc-macro dylib path |
14 | pub fn dylib_path(crate_name: &str, version: &str) -> std::path::PathBuf { | 14 | pub fn dylib_path(crate_name: &str, version: &str) -> std::path::PathBuf { |
15 | let command = Command::new(toolchain::cargo()) | 15 | let command = Command::new(toolchain::cargo()) |
16 | .args(&["check", "--message-format", "json"]) | 16 | .args(&["check", "--tests", "--message-format", "json"]) |
17 | .output() | 17 | .output() |
18 | .unwrap() | 18 | .unwrap() |
19 | .stdout; | 19 | .stdout; |
diff --git a/crates/proc_macro_test/Cargo.toml b/crates/proc_macro_test/Cargo.toml new file mode 100644 index 000000000..7b0f64f31 --- /dev/null +++ b/crates/proc_macro_test/Cargo.toml | |||
@@ -0,0 +1,10 @@ | |||
1 | [package] | ||
2 | name = "proc_macro_test" | ||
3 | version = "0.0.0" | ||
4 | license = "MIT OR Apache-2.0" | ||
5 | authors = ["rust-analyzer developers"] | ||
6 | edition = "2018" | ||
7 | |||
8 | [lib] | ||
9 | doctest = false | ||
10 | proc-macro = true | ||
diff --git a/crates/proc_macro_test/src/lib.rs b/crates/proc_macro_test/src/lib.rs new file mode 100644 index 000000000..ec2a114a3 --- /dev/null +++ b/crates/proc_macro_test/src/lib.rs | |||
@@ -0,0 +1,18 @@ | |||
1 | //! Exports a few trivial procedural macros for testing. | ||
2 | |||
3 | use proc_macro::TokenStream; | ||
4 | |||
5 | #[proc_macro] | ||
6 | pub fn function_like_macro(args: TokenStream) -> TokenStream { | ||
7 | args | ||
8 | } | ||
9 | |||
10 | #[proc_macro_attribute] | ||
11 | pub fn attribute_macro(_args: TokenStream, item: TokenStream) -> TokenStream { | ||
12 | item | ||
13 | } | ||
14 | |||
15 | #[proc_macro_derive(DummyTrait)] | ||
16 | pub fn derive_macro(_item: TokenStream) -> TokenStream { | ||
17 | TokenStream::new() | ||
18 | } | ||
diff --git a/crates/rust-analyzer/src/cli/analysis_bench.rs b/crates/rust-analyzer/src/cli/analysis_bench.rs index 0f614f9e0..43f0196af 100644 --- a/crates/rust-analyzer/src/cli/analysis_bench.rs +++ b/crates/rust-analyzer/src/cli/analysis_bench.rs | |||
@@ -71,7 +71,7 @@ impl BenchCmd { | |||
71 | match &self.what { | 71 | match &self.what { |
72 | BenchWhat::Highlight { .. } => { | 72 | BenchWhat::Highlight { .. } => { |
73 | let res = do_work(&mut host, file_id, |analysis| { | 73 | let res = do_work(&mut host, file_id, |analysis| { |
74 | analysis.diagnostics(file_id, true).unwrap(); | 74 | analysis.diagnostics(file_id, true, None).unwrap(); |
75 | analysis.highlight_as_html(file_id, false).unwrap() | 75 | analysis.highlight_as_html(file_id, false).unwrap() |
76 | }); | 76 | }); |
77 | if verbosity.is_verbose() { | 77 | if verbosity.is_verbose() { |
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 3371c4fd3..31eb7ff3f 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs | |||
@@ -47,7 +47,7 @@ pub fn diagnostics( | |||
47 | String::from("unknown") | 47 | String::from("unknown") |
48 | }; | 48 | }; |
49 | println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id)); | 49 | println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id)); |
50 | for diagnostic in analysis.diagnostics(file_id, true).unwrap() { | 50 | for diagnostic in analysis.diagnostics(file_id, true, None).unwrap() { |
51 | if matches!(diagnostic.severity, Severity::Error) { | 51 | if matches!(diagnostic.severity, Severity::Error) { |
52 | found_error = true; | 52 | found_error = true; |
53 | } | 53 | } |
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 33fb5e9c2..44fd7c286 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs | |||
@@ -7,7 +7,7 @@ | |||
7 | //! configure the server itself, feature flags are passed into analysis, and | 7 | //! configure the server itself, feature flags are passed into analysis, and |
8 | //! tweak things like automatic insertion of `()` in completions. | 8 | //! tweak things like automatic insertion of `()` in completions. |
9 | 9 | ||
10 | use std::{ffi::OsString, path::PathBuf}; | 10 | use std::{collections::HashSet, ffi::OsString, path::PathBuf}; |
11 | 11 | ||
12 | use flycheck::FlycheckConfig; | 12 | use flycheck::FlycheckConfig; |
13 | use ide::{AssistConfig, CompletionConfig, HoverConfig, InlayHintsConfig}; | 13 | use ide::{AssistConfig, CompletionConfig, HoverConfig, InlayHintsConfig}; |
@@ -45,6 +45,14 @@ pub struct Config { | |||
45 | pub with_sysroot: bool, | 45 | pub with_sysroot: bool, |
46 | pub linked_projects: Vec<LinkedProject>, | 46 | pub linked_projects: Vec<LinkedProject>, |
47 | pub root_path: AbsPathBuf, | 47 | pub root_path: AbsPathBuf, |
48 | |||
49 | pub analysis: AnalysisConfig, | ||
50 | } | ||
51 | |||
52 | /// Configuration parameters for the analysis run. | ||
53 | #[derive(Debug, Default, Clone)] | ||
54 | pub struct AnalysisConfig { | ||
55 | pub disabled_diagnostics: HashSet<String>, | ||
48 | } | 56 | } |
49 | 57 | ||
50 | #[derive(Debug, Clone, Eq, PartialEq)] | 58 | #[derive(Debug, Clone, Eq, PartialEq)] |
@@ -176,6 +184,8 @@ impl Config { | |||
176 | hover: HoverConfig::default(), | 184 | hover: HoverConfig::default(), |
177 | linked_projects: Vec::new(), | 185 | linked_projects: Vec::new(), |
178 | root_path, | 186 | root_path, |
187 | |||
188 | analysis: AnalysisConfig::default(), | ||
179 | } | 189 | } |
180 | } | 190 | } |
181 | 191 | ||
@@ -293,6 +303,8 @@ impl Config { | |||
293 | goto_type_def: data.hoverActions_enable && data.hoverActions_gotoTypeDef, | 303 | goto_type_def: data.hoverActions_enable && data.hoverActions_gotoTypeDef, |
294 | }; | 304 | }; |
295 | 305 | ||
306 | self.analysis = AnalysisConfig { disabled_diagnostics: data.analysis_disabledDiagnostics }; | ||
307 | |||
296 | log::info!("Config::update() = {:#?}", self); | 308 | log::info!("Config::update() = {:#?}", self); |
297 | } | 309 | } |
298 | 310 | ||
@@ -357,6 +369,14 @@ impl Config { | |||
357 | self.client_caps.status_notification = get_bool("statusNotification"); | 369 | self.client_caps.status_notification = get_bool("statusNotification"); |
358 | } | 370 | } |
359 | } | 371 | } |
372 | |||
373 | pub fn disabled_diagnostics(&self) -> Option<HashSet<String>> { | ||
374 | if self.analysis.disabled_diagnostics.is_empty() { | ||
375 | None | ||
376 | } else { | ||
377 | Some(self.analysis.disabled_diagnostics.clone()) | ||
378 | } | ||
379 | } | ||
360 | } | 380 | } |
361 | 381 | ||
362 | #[derive(Deserialize)] | 382 | #[derive(Deserialize)] |
@@ -444,5 +464,7 @@ config_data! { | |||
444 | rustfmt_overrideCommand: Option<Vec<String>> = None, | 464 | rustfmt_overrideCommand: Option<Vec<String>> = None, |
445 | 465 | ||
446 | withSysroot: bool = true, | 466 | withSysroot: bool = true, |
467 | |||
468 | analysis_disabledDiagnostics: HashSet<String> = HashSet::new(), | ||
447 | } | 469 | } |
448 | } | 470 | } |
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 74f73655a..4f77b1b4d 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs | |||
@@ -272,19 +272,24 @@ pub(crate) fn handle_document_symbol( | |||
272 | parents.push((doc_symbol, symbol.parent)); | 272 | parents.push((doc_symbol, symbol.parent)); |
273 | } | 273 | } |
274 | let mut document_symbols = Vec::new(); | 274 | let mut document_symbols = Vec::new(); |
275 | // Constructs `document_symbols` from `parents`, in order from the end. | ||
275 | while let Some((node, parent)) = parents.pop() { | 276 | while let Some((node, parent)) = parents.pop() { |
276 | match parent { | 277 | match parent { |
277 | None => document_symbols.push(node), | 278 | None => document_symbols.push(node), |
278 | Some(i) => { | 279 | Some(i) => { |
279 | let children = &mut parents[i].0.children; | 280 | parents[i].0.children.get_or_insert_with(Vec::new).push(node); |
280 | if children.is_none() { | ||
281 | *children = Some(Vec::new()); | ||
282 | } | ||
283 | children.as_mut().unwrap().push(node); | ||
284 | } | 281 | } |
285 | } | 282 | } |
286 | } | 283 | } |
287 | 284 | ||
285 | fn reverse(symbols: &mut Vec<DocumentSymbol>) { | ||
286 | for sym in symbols.iter_mut() { | ||
287 | sym.children.as_mut().map(|c| reverse(c)); | ||
288 | } | ||
289 | symbols.reverse(); | ||
290 | } | ||
291 | reverse(&mut document_symbols); | ||
292 | |||
288 | let res = if snap.config.client_caps.hierarchical_symbols { | 293 | let res = if snap.config.client_caps.hierarchical_symbols { |
289 | document_symbols.into() | 294 | document_symbols.into() |
290 | } else { | 295 | } else { |
@@ -770,7 +775,11 @@ fn handle_fixes( | |||
770 | None => {} | 775 | None => {} |
771 | }; | 776 | }; |
772 | 777 | ||
773 | let diagnostics = snap.analysis.diagnostics(file_id, snap.config.experimental_diagnostics)?; | 778 | let diagnostics = snap.analysis.diagnostics( |
779 | file_id, | ||
780 | snap.config.experimental_diagnostics, | ||
781 | snap.config.disabled_diagnostics(), | ||
782 | )?; | ||
774 | 783 | ||
775 | for fix in diagnostics | 784 | for fix in diagnostics |
776 | .into_iter() | 785 | .into_iter() |
@@ -859,10 +868,10 @@ pub(crate) fn handle_resolve_code_action( | |||
859 | .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()); | 868 | .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()); |
860 | 869 | ||
861 | let assists = snap.analysis.resolved_assists(&snap.config.assist, frange)?; | 870 | let assists = snap.analysis.resolved_assists(&snap.config.assist, frange)?; |
862 | let (id_string, index) = split_once(¶ms.id, ':').unwrap(); | 871 | let (id, index) = split_once(¶ms.id, ':').unwrap(); |
863 | let index = index.parse::<usize>().unwrap(); | 872 | let index = index.parse::<usize>().unwrap(); |
864 | let assist = &assists[index]; | 873 | let assist = &assists[index]; |
865 | assert!(assist.assist.id().0 == id_string); | 874 | assert!(assist.assist.id.0 == id); |
866 | Ok(to_proto::resolved_code_action(&snap, assist.clone())?.edit) | 875 | Ok(to_proto::resolved_code_action(&snap, assist.clone())?.edit) |
867 | } | 876 | } |
868 | 877 | ||
@@ -1044,7 +1053,11 @@ pub(crate) fn publish_diagnostics( | |||
1044 | let line_index = snap.analysis.file_line_index(file_id)?; | 1053 | let line_index = snap.analysis.file_line_index(file_id)?; |
1045 | let diagnostics: Vec<Diagnostic> = snap | 1054 | let diagnostics: Vec<Diagnostic> = snap |
1046 | .analysis | 1055 | .analysis |
1047 | .diagnostics(file_id, snap.config.experimental_diagnostics)? | 1056 | .diagnostics( |
1057 | file_id, | ||
1058 | snap.config.experimental_diagnostics, | ||
1059 | snap.config.disabled_diagnostics(), | ||
1060 | )? | ||
1048 | .into_iter() | 1061 | .into_iter() |
1049 | .map(|d| Diagnostic { | 1062 | .map(|d| Diagnostic { |
1050 | range: to_proto::range(&line_index, d.range), | 1063 | range: to_proto::range(&line_index, d.range), |
diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index 3976b6529..e1a28b1b4 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs | |||
@@ -237,8 +237,13 @@ pub enum Status { | |||
237 | Invalid, | 237 | Invalid, |
238 | } | 238 | } |
239 | 239 | ||
240 | #[derive(Deserialize, Serialize)] | ||
241 | pub struct StatusParams { | ||
242 | pub status: Status, | ||
243 | } | ||
244 | |||
240 | impl Notification for StatusNotification { | 245 | impl Notification for StatusNotification { |
241 | type Params = Status; | 246 | type Params = StatusParams; |
242 | const METHOD: &'static str = "rust-analyzer/status"; | 247 | const METHOD: &'static str = "rust-analyzer/status"; |
243 | } | 248 | } |
244 | 249 | ||
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index a2cfb4e0d..505505a77 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs | |||
@@ -13,6 +13,7 @@ use crate::{ | |||
13 | lsp_ext, | 13 | lsp_ext, |
14 | main_loop::Task, | 14 | main_loop::Task, |
15 | }; | 15 | }; |
16 | use lsp_ext::StatusParams; | ||
16 | 17 | ||
17 | impl GlobalState { | 18 | impl GlobalState { |
18 | pub(crate) fn update_configuration(&mut self, config: Config) { | 19 | pub(crate) fn update_configuration(&mut self, config: Config) { |
@@ -85,7 +86,9 @@ impl GlobalState { | |||
85 | Status::Invalid => lsp_ext::Status::Invalid, | 86 | Status::Invalid => lsp_ext::Status::Invalid, |
86 | Status::NeedsReload => lsp_ext::Status::NeedsReload, | 87 | Status::NeedsReload => lsp_ext::Status::NeedsReload, |
87 | }; | 88 | }; |
88 | self.send_notification::<lsp_ext::StatusNotification>(lsp_status); | 89 | self.send_notification::<lsp_ext::StatusNotification>(StatusParams { |
90 | status: lsp_status, | ||
91 | }); | ||
89 | } | 92 | } |
90 | } | 93 | } |
91 | pub(crate) fn fetch_workspaces(&mut self) { | 94 | pub(crate) fn fetch_workspaces(&mut self) { |
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 8a2cfa2ae..535de2f71 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -704,10 +704,10 @@ pub(crate) fn unresolved_code_action( | |||
704 | index: usize, | 704 | index: usize, |
705 | ) -> Result<lsp_ext::CodeAction> { | 705 | ) -> Result<lsp_ext::CodeAction> { |
706 | let res = lsp_ext::CodeAction { | 706 | let res = lsp_ext::CodeAction { |
707 | title: assist.label(), | 707 | title: assist.label().to_string(), |
708 | id: Some(format!("{}:{}", assist.id().0.to_owned(), index.to_string())), | 708 | id: Some(format!("{}:{}", assist.id.0, index.to_string())), |
709 | group: assist.group().filter(|_| snap.config.client_caps.code_action_group).map(|gr| gr.0), | 709 | group: assist.group.filter(|_| snap.config.client_caps.code_action_group).map(|gr| gr.0), |
710 | kind: Some(code_action_kind(assist.id().1)), | 710 | kind: Some(code_action_kind(assist.id.1)), |
711 | edit: None, | 711 | edit: None, |
712 | is_preferred: None, | 712 | is_preferred: None, |
713 | }; | 713 | }; |
diff --git a/crates/rust-analyzer/tests/heavy_tests/main.rs b/crates/rust-analyzer/tests/rust-analyzer/main.rs index 7370505f8..fa315ff8e 100644 --- a/crates/rust-analyzer/tests/heavy_tests/main.rs +++ b/crates/rust-analyzer/tests/rust-analyzer/main.rs | |||
@@ -1,3 +1,13 @@ | |||
1 | //! The most high-level integrated tests for rust-analyzer. | ||
2 | //! | ||
3 | //! This tests run a full LSP event loop, spawn cargo and process stdlib from | ||
4 | //! sysroot. For this reason, the tests here are very slow, and should be | ||
5 | //! avoided unless absolutely necessary. | ||
6 | //! | ||
7 | //! In particular, it's fine *not* to test that client & server agree on | ||
8 | //! specific JSON shapes here -- there's little value in such tests, as we can't | ||
9 | //! be sure without a real client anyway. | ||
10 | |||
1 | mod testdir; | 11 | mod testdir; |
2 | mod support; | 12 | mod support; |
3 | 13 | ||
diff --git a/crates/rust-analyzer/tests/heavy_tests/support.rs b/crates/rust-analyzer/tests/rust-analyzer/support.rs index 5bafeba79..5bafeba79 100644 --- a/crates/rust-analyzer/tests/heavy_tests/support.rs +++ b/crates/rust-analyzer/tests/rust-analyzer/support.rs | |||
diff --git a/crates/rust-analyzer/tests/heavy_tests/testdir.rs b/crates/rust-analyzer/tests/rust-analyzer/testdir.rs index 7487e7429..7487e7429 100644 --- a/crates/rust-analyzer/tests/heavy_tests/testdir.rs +++ b/crates/rust-analyzer/tests/rust-analyzer/testdir.rs | |||
diff --git a/crates/ssr/Cargo.toml b/crates/ssr/Cargo.toml index 56c1f7761..7c2090de3 100644 --- a/crates/ssr/Cargo.toml +++ b/crates/ssr/Cargo.toml | |||
@@ -12,6 +12,7 @@ doctest = false | |||
12 | 12 | ||
13 | [dependencies] | 13 | [dependencies] |
14 | rustc-hash = "1.1.0" | 14 | rustc-hash = "1.1.0" |
15 | itertools = "0.9.0" | ||
15 | 16 | ||
16 | text_edit = { path = "../text_edit" } | 17 | text_edit = { path = "../text_edit" } |
17 | syntax = { path = "../syntax" } | 18 | syntax = { path = "../syntax" } |
diff --git a/crates/ssr/src/lib.rs b/crates/ssr/src/lib.rs index 292bd5b9a..ba669fd56 100644 --- a/crates/ssr/src/lib.rs +++ b/crates/ssr/src/lib.rs | |||
@@ -21,7 +21,10 @@ | |||
21 | // code in the `foo` module, we'll insert just `Bar`. | 21 | // code in the `foo` module, we'll insert just `Bar`. |
22 | // | 22 | // |
23 | // Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will | 23 | // Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will |
24 | // match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. | 24 | // match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. When a |
25 | // placeholder is the receiver of a method call in the search pattern (e.g. `$s.foo()`), but not in | ||
26 | // the replacement template (e.g. `bar($s)`), then *, & and &mut will be added as needed to mirror | ||
27 | // whatever autoderef and autoref was happening implicitly in the matched code. | ||
25 | // | 28 | // |
26 | // The scope of the search / replace will be restricted to the current selection if any, otherwise | 29 | // The scope of the search / replace will be restricted to the current selection if any, otherwise |
27 | // it will apply to the whole workspace. | 30 | // it will apply to the whole workspace. |
diff --git a/crates/ssr/src/matching.rs b/crates/ssr/src/matching.rs index ffc7202ae..8bb5ced90 100644 --- a/crates/ssr/src/matching.rs +++ b/crates/ssr/src/matching.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | //! process of matching, placeholder values are recorded. | 2 | //! process of matching, placeholder values are recorded. |
3 | 3 | ||
4 | use crate::{ | 4 | use crate::{ |
5 | parsing::{Constraint, NodeKind, Placeholder}, | 5 | parsing::{Constraint, NodeKind, Placeholder, Var}, |
6 | resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo}, | 6 | resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo}, |
7 | SsrMatches, | 7 | SsrMatches, |
8 | }; | 8 | }; |
@@ -56,10 +56,6 @@ pub struct Match { | |||
56 | pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>, | 56 | pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>, |
57 | } | 57 | } |
58 | 58 | ||
59 | /// Represents a `$var` in an SSR query. | ||
60 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
61 | pub(crate) struct Var(pub String); | ||
62 | |||
63 | /// Information about a placeholder bound in a match. | 59 | /// Information about a placeholder bound in a match. |
64 | #[derive(Debug)] | 60 | #[derive(Debug)] |
65 | pub(crate) struct PlaceholderMatch { | 61 | pub(crate) struct PlaceholderMatch { |
@@ -69,6 +65,10 @@ pub(crate) struct PlaceholderMatch { | |||
69 | pub(crate) range: FileRange, | 65 | pub(crate) range: FileRange, |
70 | /// More matches, found within `node`. | 66 | /// More matches, found within `node`. |
71 | pub(crate) inner_matches: SsrMatches, | 67 | pub(crate) inner_matches: SsrMatches, |
68 | /// How many times the code that the placeholder matched needed to be dereferenced. Will only be | ||
69 | /// non-zero if the placeholder matched to the receiver of a method call. | ||
70 | pub(crate) autoderef_count: usize, | ||
71 | pub(crate) autoref_kind: ast::SelfParamKind, | ||
72 | } | 72 | } |
73 | 73 | ||
74 | #[derive(Debug)] | 74 | #[derive(Debug)] |
@@ -173,7 +173,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
173 | code: &SyntaxNode, | 173 | code: &SyntaxNode, |
174 | ) -> Result<(), MatchFailed> { | 174 | ) -> Result<(), MatchFailed> { |
175 | // Handle placeholders. | 175 | // Handle placeholders. |
176 | if let Some(placeholder) = self.get_placeholder(&SyntaxElement::Node(pattern.clone())) { | 176 | if let Some(placeholder) = self.get_placeholder_for_node(pattern) { |
177 | for constraint in &placeholder.constraints { | 177 | for constraint in &placeholder.constraints { |
178 | self.check_constraint(constraint, code)?; | 178 | self.check_constraint(constraint, code)?; |
179 | } | 179 | } |
@@ -183,8 +183,8 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
183 | // probably can't fail range validation, but just to be safe... | 183 | // probably can't fail range validation, but just to be safe... |
184 | self.validate_range(&original_range)?; | 184 | self.validate_range(&original_range)?; |
185 | matches_out.placeholder_values.insert( | 185 | matches_out.placeholder_values.insert( |
186 | Var(placeholder.ident.to_string()), | 186 | placeholder.ident.clone(), |
187 | PlaceholderMatch::new(code, original_range), | 187 | PlaceholderMatch::new(Some(code), original_range), |
188 | ); | 188 | ); |
189 | } | 189 | } |
190 | return Ok(()); | 190 | return Ok(()); |
@@ -487,7 +487,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
487 | } | 487 | } |
488 | if let Phase::Second(match_out) = phase { | 488 | if let Phase::Second(match_out) = phase { |
489 | match_out.placeholder_values.insert( | 489 | match_out.placeholder_values.insert( |
490 | Var(placeholder.ident.to_string()), | 490 | placeholder.ident.clone(), |
491 | PlaceholderMatch::from_range(FileRange { | 491 | PlaceholderMatch::from_range(FileRange { |
492 | file_id: self.sema.original_range(code).file_id, | 492 | file_id: self.sema.original_range(code).file_id, |
493 | range: first_matched_token | 493 | range: first_matched_token |
@@ -536,18 +536,40 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
536 | if pattern_ufcs.function != code_resolved_function { | 536 | if pattern_ufcs.function != code_resolved_function { |
537 | fail_match!("Method call resolved to a different function"); | 537 | fail_match!("Method call resolved to a different function"); |
538 | } | 538 | } |
539 | if code_resolved_function.has_self_param(self.sema.db) { | ||
540 | if let (Some(pattern_type), Some(expr)) = (&pattern_ufcs.qualifier_type, &code.expr()) { | ||
541 | self.check_expr_type(pattern_type, expr)?; | ||
542 | } | ||
543 | } | ||
544 | // Check arguments. | 539 | // Check arguments. |
545 | let mut pattern_args = pattern_ufcs | 540 | let mut pattern_args = pattern_ufcs |
546 | .call_expr | 541 | .call_expr |
547 | .arg_list() | 542 | .arg_list() |
548 | .ok_or_else(|| match_error!("Pattern function call has no args"))? | 543 | .ok_or_else(|| match_error!("Pattern function call has no args"))? |
549 | .args(); | 544 | .args(); |
550 | self.attempt_match_opt(phase, pattern_args.next(), code.expr())?; | 545 | // If the function we're calling takes a self parameter, then we store additional |
546 | // information on the placeholder match about autoderef and autoref. This allows us to use | ||
547 | // the placeholder in a context where autoderef and autoref don't apply. | ||
548 | if code_resolved_function.has_self_param(self.sema.db) { | ||
549 | if let (Some(pattern_type), Some(expr)) = (&pattern_ufcs.qualifier_type, &code.expr()) { | ||
550 | let deref_count = self.check_expr_type(pattern_type, expr)?; | ||
551 | let pattern_receiver = pattern_args.next(); | ||
552 | self.attempt_match_opt(phase, pattern_receiver.clone(), code.expr())?; | ||
553 | if let Phase::Second(match_out) = phase { | ||
554 | if let Some(placeholder_value) = pattern_receiver | ||
555 | .and_then(|n| self.get_placeholder_for_node(n.syntax())) | ||
556 | .and_then(|placeholder| { | ||
557 | match_out.placeholder_values.get_mut(&placeholder.ident) | ||
558 | }) | ||
559 | { | ||
560 | placeholder_value.autoderef_count = deref_count; | ||
561 | placeholder_value.autoref_kind = self | ||
562 | .sema | ||
563 | .resolve_method_call_as_callable(code) | ||
564 | .and_then(|callable| callable.receiver_param(self.sema.db)) | ||
565 | .map(|self_param| self_param.kind()) | ||
566 | .unwrap_or(ast::SelfParamKind::Owned); | ||
567 | } | ||
568 | } | ||
569 | } | ||
570 | } else { | ||
571 | self.attempt_match_opt(phase, pattern_args.next(), code.expr())?; | ||
572 | } | ||
551 | let mut code_args = | 573 | let mut code_args = |
552 | code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args(); | 574 | code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args(); |
553 | loop { | 575 | loop { |
@@ -575,26 +597,35 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
575 | self.attempt_match_node_children(phase, pattern_ufcs.call_expr.syntax(), code.syntax()) | 597 | self.attempt_match_node_children(phase, pattern_ufcs.call_expr.syntax(), code.syntax()) |
576 | } | 598 | } |
577 | 599 | ||
600 | /// Verifies that `expr` matches `pattern_type`, possibly after dereferencing some number of | ||
601 | /// times. Returns the number of times it needed to be dereferenced. | ||
578 | fn check_expr_type( | 602 | fn check_expr_type( |
579 | &self, | 603 | &self, |
580 | pattern_type: &hir::Type, | 604 | pattern_type: &hir::Type, |
581 | expr: &ast::Expr, | 605 | expr: &ast::Expr, |
582 | ) -> Result<(), MatchFailed> { | 606 | ) -> Result<usize, MatchFailed> { |
583 | use hir::HirDisplay; | 607 | use hir::HirDisplay; |
584 | let code_type = self.sema.type_of_expr(&expr).ok_or_else(|| { | 608 | let code_type = self.sema.type_of_expr(&expr).ok_or_else(|| { |
585 | match_error!("Failed to get receiver type for `{}`", expr.syntax().text()) | 609 | match_error!("Failed to get receiver type for `{}`", expr.syntax().text()) |
586 | })?; | 610 | })?; |
587 | if !code_type | 611 | // Temporary needed to make the borrow checker happy. |
612 | let res = code_type | ||
588 | .autoderef(self.sema.db) | 613 | .autoderef(self.sema.db) |
589 | .any(|deref_code_type| *pattern_type == deref_code_type) | 614 | .enumerate() |
590 | { | 615 | .find(|(_, deref_code_type)| pattern_type == deref_code_type) |
591 | fail_match!( | 616 | .map(|(count, _)| count) |
592 | "Pattern type `{}` didn't match code type `{}`", | 617 | .ok_or_else(|| { |
593 | pattern_type.display(self.sema.db), | 618 | match_error!( |
594 | code_type.display(self.sema.db) | 619 | "Pattern type `{}` didn't match code type `{}`", |
595 | ); | 620 | pattern_type.display(self.sema.db), |
596 | } | 621 | code_type.display(self.sema.db) |
597 | Ok(()) | 622 | ) |
623 | }); | ||
624 | res | ||
625 | } | ||
626 | |||
627 | fn get_placeholder_for_node(&self, node: &SyntaxNode) -> Option<&Placeholder> { | ||
628 | self.get_placeholder(&SyntaxElement::Node(node.clone())) | ||
598 | } | 629 | } |
599 | 630 | ||
600 | fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> { | 631 | fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> { |
@@ -676,12 +707,18 @@ fn recording_match_fail_reasons() -> bool { | |||
676 | } | 707 | } |
677 | 708 | ||
678 | impl PlaceholderMatch { | 709 | impl PlaceholderMatch { |
679 | fn new(node: &SyntaxNode, range: FileRange) -> Self { | 710 | fn new(node: Option<&SyntaxNode>, range: FileRange) -> Self { |
680 | Self { node: Some(node.clone()), range, inner_matches: SsrMatches::default() } | 711 | Self { |
712 | node: node.cloned(), | ||
713 | range, | ||
714 | inner_matches: SsrMatches::default(), | ||
715 | autoderef_count: 0, | ||
716 | autoref_kind: ast::SelfParamKind::Owned, | ||
717 | } | ||
681 | } | 718 | } |
682 | 719 | ||
683 | fn from_range(range: FileRange) -> Self { | 720 | fn from_range(range: FileRange) -> Self { |
684 | Self { node: None, range, inner_matches: SsrMatches::default() } | 721 | Self::new(None, range) |
685 | } | 722 | } |
686 | } | 723 | } |
687 | 724 | ||
diff --git a/crates/ssr/src/parsing.rs b/crates/ssr/src/parsing.rs index 9570e96e3..05b66dcd7 100644 --- a/crates/ssr/src/parsing.rs +++ b/crates/ssr/src/parsing.rs | |||
@@ -8,7 +8,7 @@ | |||
8 | use crate::errors::bail; | 8 | use crate::errors::bail; |
9 | use crate::{SsrError, SsrPattern, SsrRule}; | 9 | use crate::{SsrError, SsrPattern, SsrRule}; |
10 | use rustc_hash::{FxHashMap, FxHashSet}; | 10 | use rustc_hash::{FxHashMap, FxHashSet}; |
11 | use std::str::FromStr; | 11 | use std::{fmt::Display, str::FromStr}; |
12 | use syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T}; | 12 | use syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T}; |
13 | use test_utils::mark; | 13 | use test_utils::mark; |
14 | 14 | ||
@@ -34,12 +34,16 @@ pub(crate) enum PatternElement { | |||
34 | #[derive(Clone, Debug, PartialEq, Eq)] | 34 | #[derive(Clone, Debug, PartialEq, Eq)] |
35 | pub(crate) struct Placeholder { | 35 | pub(crate) struct Placeholder { |
36 | /// The name of this placeholder. e.g. for "$a", this would be "a" | 36 | /// The name of this placeholder. e.g. for "$a", this would be "a" |
37 | pub(crate) ident: SmolStr, | 37 | pub(crate) ident: Var, |
38 | /// A unique name used in place of this placeholder when we parse the pattern as Rust code. | 38 | /// A unique name used in place of this placeholder when we parse the pattern as Rust code. |
39 | stand_in_name: String, | 39 | stand_in_name: String, |
40 | pub(crate) constraints: Vec<Constraint>, | 40 | pub(crate) constraints: Vec<Constraint>, |
41 | } | 41 | } |
42 | 42 | ||
43 | /// Represents a `$var` in an SSR query. | ||
44 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
45 | pub(crate) struct Var(pub String); | ||
46 | |||
43 | #[derive(Clone, Debug, PartialEq, Eq)] | 47 | #[derive(Clone, Debug, PartialEq, Eq)] |
44 | pub(crate) enum Constraint { | 48 | pub(crate) enum Constraint { |
45 | Kind(NodeKind), | 49 | Kind(NodeKind), |
@@ -205,7 +209,7 @@ fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> { | |||
205 | if token.kind == T![$] { | 209 | if token.kind == T![$] { |
206 | let placeholder = parse_placeholder(&mut tokens)?; | 210 | let placeholder = parse_placeholder(&mut tokens)?; |
207 | if !placeholder_names.insert(placeholder.ident.clone()) { | 211 | if !placeholder_names.insert(placeholder.ident.clone()) { |
208 | bail!("Name `{}` repeats more than once", placeholder.ident); | 212 | bail!("Placeholder `{}` repeats more than once", placeholder.ident); |
209 | } | 213 | } |
210 | res.push(PatternElement::Placeholder(placeholder)); | 214 | res.push(PatternElement::Placeholder(placeholder)); |
211 | } else { | 215 | } else { |
@@ -228,7 +232,7 @@ fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { | |||
228 | for p in &rule.template.tokens { | 232 | for p in &rule.template.tokens { |
229 | if let PatternElement::Placeholder(placeholder) = p { | 233 | if let PatternElement::Placeholder(placeholder) = p { |
230 | if !defined_placeholders.contains(&placeholder.ident) { | 234 | if !defined_placeholders.contains(&placeholder.ident) { |
231 | undefined.push(format!("${}", placeholder.ident)); | 235 | undefined.push(placeholder.ident.to_string()); |
232 | } | 236 | } |
233 | if !placeholder.constraints.is_empty() { | 237 | if !placeholder.constraints.is_empty() { |
234 | bail!("Replacement placeholders cannot have constraints"); | 238 | bail!("Replacement placeholders cannot have constraints"); |
@@ -344,7 +348,17 @@ impl NodeKind { | |||
344 | 348 | ||
345 | impl Placeholder { | 349 | impl Placeholder { |
346 | fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self { | 350 | fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self { |
347 | Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name } | 351 | Self { |
352 | stand_in_name: format!("__placeholder_{}", name), | ||
353 | constraints, | ||
354 | ident: Var(name.to_string()), | ||
355 | } | ||
356 | } | ||
357 | } | ||
358 | |||
359 | impl Display for Var { | ||
360 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
361 | write!(f, "${}", self.0) | ||
348 | } | 362 | } |
349 | } | 363 | } |
350 | 364 | ||
diff --git a/crates/ssr/src/replacing.rs b/crates/ssr/src/replacing.rs index 8f8fe6149..29284e3f1 100644 --- a/crates/ssr/src/replacing.rs +++ b/crates/ssr/src/replacing.rs | |||
@@ -1,10 +1,11 @@ | |||
1 | //! Code for applying replacement templates for matches that have previously been found. | 1 | //! Code for applying replacement templates for matches that have previously been found. |
2 | 2 | ||
3 | use crate::matching::Var; | ||
4 | use crate::{resolving::ResolvedRule, Match, SsrMatches}; | 3 | use crate::{resolving::ResolvedRule, Match, SsrMatches}; |
4 | use itertools::Itertools; | ||
5 | use rustc_hash::{FxHashMap, FxHashSet}; | 5 | use rustc_hash::{FxHashMap, FxHashSet}; |
6 | use syntax::ast::{self, AstToken}; | 6 | use syntax::ast::{self, AstToken}; |
7 | use syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize}; | 7 | use syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize}; |
8 | use test_utils::mark; | ||
8 | use text_edit::TextEdit; | 9 | use text_edit::TextEdit; |
9 | 10 | ||
10 | /// Returns a text edit that will replace each match in `matches` with its corresponding replacement | 11 | /// Returns a text edit that will replace each match in `matches` with its corresponding replacement |
@@ -114,11 +115,33 @@ impl ReplacementRenderer<'_> { | |||
114 | fn render_token(&mut self, token: &SyntaxToken) { | 115 | fn render_token(&mut self, token: &SyntaxToken) { |
115 | if let Some(placeholder) = self.rule.get_placeholder(&token) { | 116 | if let Some(placeholder) = self.rule.get_placeholder(&token) { |
116 | if let Some(placeholder_value) = | 117 | if let Some(placeholder_value) = |
117 | self.match_info.placeholder_values.get(&Var(placeholder.ident.to_string())) | 118 | self.match_info.placeholder_values.get(&placeholder.ident) |
118 | { | 119 | { |
119 | let range = &placeholder_value.range.range; | 120 | let range = &placeholder_value.range.range; |
120 | let mut matched_text = | 121 | let mut matched_text = |
121 | self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned(); | 122 | self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned(); |
123 | // If a method call is performed directly on the placeholder, then autoderef and | ||
124 | // autoref will apply, so we can just substitute whatever the placeholder matched to | ||
125 | // directly. If we're not applying a method call, then we need to add explicitly | ||
126 | // deref and ref in order to match whatever was being done implicitly at the match | ||
127 | // site. | ||
128 | if !token_is_method_call_receiver(token) | ||
129 | && (placeholder_value.autoderef_count > 0 | ||
130 | || placeholder_value.autoref_kind != ast::SelfParamKind::Owned) | ||
131 | { | ||
132 | mark::hit!(replace_autoref_autoderef_capture); | ||
133 | let ref_kind = match placeholder_value.autoref_kind { | ||
134 | ast::SelfParamKind::Owned => "", | ||
135 | ast::SelfParamKind::Ref => "&", | ||
136 | ast::SelfParamKind::MutRef => "&mut ", | ||
137 | }; | ||
138 | matched_text = format!( | ||
139 | "{}{}{}", | ||
140 | ref_kind, | ||
141 | "*".repeat(placeholder_value.autoderef_count), | ||
142 | matched_text | ||
143 | ); | ||
144 | } | ||
122 | let edit = matches_to_edit_at_offset( | 145 | let edit = matches_to_edit_at_offset( |
123 | &placeholder_value.inner_matches, | 146 | &placeholder_value.inner_matches, |
124 | self.file_src, | 147 | self.file_src, |
@@ -179,6 +202,29 @@ impl ReplacementRenderer<'_> { | |||
179 | } | 202 | } |
180 | } | 203 | } |
181 | 204 | ||
205 | /// Returns whether token is the receiver of a method call. Note, being within the receiver of a | ||
206 | /// method call doesn't count. e.g. if the token is `$a`, then `$a.foo()` will return true, while | ||
207 | /// `($a + $b).foo()` or `x.foo($a)` will return false. | ||
208 | fn token_is_method_call_receiver(token: &SyntaxToken) -> bool { | ||
209 | use syntax::ast::AstNode; | ||
210 | // Find the first method call among the ancestors of `token`, then check if the only token | ||
211 | // within the receiver is `token`. | ||
212 | if let Some(receiver) = | ||
213 | token.ancestors().find_map(ast::MethodCallExpr::cast).and_then(|call| call.expr()) | ||
214 | { | ||
215 | let tokens = receiver.syntax().descendants_with_tokens().filter_map(|node_or_token| { | ||
216 | match node_or_token { | ||
217 | SyntaxElement::Token(t) => Some(t), | ||
218 | _ => None, | ||
219 | } | ||
220 | }); | ||
221 | if let Some((only_token,)) = tokens.collect_tuple() { | ||
222 | return only_token == *token; | ||
223 | } | ||
224 | } | ||
225 | false | ||
226 | } | ||
227 | |||
182 | fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> { | 228 | fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> { |
183 | use syntax::ast::AstNode; | 229 | use syntax::ast::AstNode; |
184 | if ast::Expr::can_cast(kind) { | 230 | if ast::Expr::can_cast(kind) { |
diff --git a/crates/ssr/src/tests.rs b/crates/ssr/src/tests.rs index 0d0a00090..e45c88864 100644 --- a/crates/ssr/src/tests.rs +++ b/crates/ssr/src/tests.rs | |||
@@ -31,7 +31,7 @@ fn parser_two_delimiters() { | |||
31 | fn parser_repeated_name() { | 31 | fn parser_repeated_name() { |
32 | assert_eq!( | 32 | assert_eq!( |
33 | parse_error_text("foo($a, $a) ==>>"), | 33 | parse_error_text("foo($a, $a) ==>>"), |
34 | "Parse error: Name `a` repeats more than once" | 34 | "Parse error: Placeholder `$a` repeats more than once" |
35 | ); | 35 | ); |
36 | } | 36 | } |
37 | 37 | ||
@@ -1172,3 +1172,110 @@ fn match_trait_method_call() { | |||
1172 | assert_matches("Bar::foo($a, $b)", code, &["v1.foo(1)", "Bar::foo(&v1, 3)", "v1_ref.foo(5)"]); | 1172 | assert_matches("Bar::foo($a, $b)", code, &["v1.foo(1)", "Bar::foo(&v1, 3)", "v1_ref.foo(5)"]); |
1173 | assert_matches("Bar2::foo($a, $b)", code, &["v2.foo(2)", "Bar2::foo(&v2, 4)", "v2_ref.foo(6)"]); | 1173 | assert_matches("Bar2::foo($a, $b)", code, &["v2.foo(2)", "Bar2::foo(&v2, 4)", "v2_ref.foo(6)"]); |
1174 | } | 1174 | } |
1175 | |||
1176 | #[test] | ||
1177 | fn replace_autoref_autoderef_capture() { | ||
1178 | // Here we have several calls to `$a.foo()`. In the first case autoref is applied, in the | ||
1179 | // second, we already have a reference, so it isn't. When $a is used in a context where autoref | ||
1180 | // doesn't apply, we need to prefix it with `&`. Finally, we have some cases where autoderef | ||
1181 | // needs to be applied. | ||
1182 | mark::check!(replace_autoref_autoderef_capture); | ||
1183 | let code = r#" | ||
1184 | struct Foo {} | ||
1185 | impl Foo { | ||
1186 | fn foo(&self) {} | ||
1187 | fn foo2(&self) {} | ||
1188 | } | ||
1189 | fn bar(_: &Foo) {} | ||
1190 | fn main() { | ||
1191 | let f = Foo {}; | ||
1192 | let fr = &f; | ||
1193 | let fr2 = &fr; | ||
1194 | let fr3 = &fr2; | ||
1195 | f.foo(); | ||
1196 | fr.foo(); | ||
1197 | fr2.foo(); | ||
1198 | fr3.foo(); | ||
1199 | } | ||
1200 | "#; | ||
1201 | assert_ssr_transform( | ||
1202 | "Foo::foo($a) ==>> bar($a)", | ||
1203 | code, | ||
1204 | expect![[r#" | ||
1205 | struct Foo {} | ||
1206 | impl Foo { | ||
1207 | fn foo(&self) {} | ||
1208 | fn foo2(&self) {} | ||
1209 | } | ||
1210 | fn bar(_: &Foo) {} | ||
1211 | fn main() { | ||
1212 | let f = Foo {}; | ||
1213 | let fr = &f; | ||
1214 | let fr2 = &fr; | ||
1215 | let fr3 = &fr2; | ||
1216 | bar(&f); | ||
1217 | bar(&*fr); | ||
1218 | bar(&**fr2); | ||
1219 | bar(&***fr3); | ||
1220 | } | ||
1221 | "#]], | ||
1222 | ); | ||
1223 | // If the placeholder is used as the receiver of another method call, then we don't need to | ||
1224 | // explicitly autoderef or autoref. | ||
1225 | assert_ssr_transform( | ||
1226 | "Foo::foo($a) ==>> $a.foo2()", | ||
1227 | code, | ||
1228 | expect![[r#" | ||
1229 | struct Foo {} | ||
1230 | impl Foo { | ||
1231 | fn foo(&self) {} | ||
1232 | fn foo2(&self) {} | ||
1233 | } | ||
1234 | fn bar(_: &Foo) {} | ||
1235 | fn main() { | ||
1236 | let f = Foo {}; | ||
1237 | let fr = &f; | ||
1238 | let fr2 = &fr; | ||
1239 | let fr3 = &fr2; | ||
1240 | f.foo2(); | ||
1241 | fr.foo2(); | ||
1242 | fr2.foo2(); | ||
1243 | fr3.foo2(); | ||
1244 | } | ||
1245 | "#]], | ||
1246 | ); | ||
1247 | } | ||
1248 | |||
1249 | #[test] | ||
1250 | fn replace_autoref_mut() { | ||
1251 | let code = r#" | ||
1252 | struct Foo {} | ||
1253 | impl Foo { | ||
1254 | fn foo(&mut self) {} | ||
1255 | } | ||
1256 | fn bar(_: &mut Foo) {} | ||
1257 | fn main() { | ||
1258 | let mut f = Foo {}; | ||
1259 | f.foo(); | ||
1260 | let fr = &mut f; | ||
1261 | fr.foo(); | ||
1262 | } | ||
1263 | "#; | ||
1264 | assert_ssr_transform( | ||
1265 | "Foo::foo($a) ==>> bar($a)", | ||
1266 | code, | ||
1267 | expect![[r#" | ||
1268 | struct Foo {} | ||
1269 | impl Foo { | ||
1270 | fn foo(&mut self) {} | ||
1271 | } | ||
1272 | fn bar(_: &mut Foo) {} | ||
1273 | fn main() { | ||
1274 | let mut f = Foo {}; | ||
1275 | bar(&mut f); | ||
1276 | let fr = &mut f; | ||
1277 | bar(&mut *fr); | ||
1278 | } | ||
1279 | "#]], | ||
1280 | ); | ||
1281 | } | ||
diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index 3c5027fe5..265d19288 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs | |||
@@ -17,7 +17,7 @@ pub fn timeit(label: &'static str) -> impl Drop { | |||
17 | 17 | ||
18 | impl Drop for Guard { | 18 | impl Drop for Guard { |
19 | fn drop(&mut self) { | 19 | fn drop(&mut self) { |
20 | eprintln!("{}: {:?}", self.label, self.start.elapsed()) | 20 | eprintln!("{}: {:.2?}", self.label, self.start.elapsed()) |
21 | } | 21 | } |
22 | } | 22 | } |
23 | 23 | ||
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 47e351f9d..ec3132da8 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml | |||
@@ -13,7 +13,7 @@ doctest = false | |||
13 | [dependencies] | 13 | [dependencies] |
14 | itertools = "0.9.0" | 14 | itertools = "0.9.0" |
15 | rowan = "0.10.0" | 15 | rowan = "0.10.0" |
16 | rustc_lexer = { version = "671.0.0", package = "rustc-ap-rustc_lexer" } | 16 | rustc_lexer = { version = "673.0.0", package = "rustc-ap-rustc_lexer" } |
17 | rustc-hash = "1.1.0" | 17 | rustc-hash = "1.1.0" |
18 | arrayvec = "0.5.1" | 18 | arrayvec = "0.5.1" |
19 | once_cell = "1.3.1" | 19 | once_cell = "1.3.1" |
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs index 190746e09..060b20966 100644 --- a/crates/syntax/src/ast/edit.rs +++ b/crates/syntax/src/ast/edit.rs | |||
@@ -91,29 +91,52 @@ impl ast::AssocItemList { | |||
91 | res = make_multiline(res); | 91 | res = make_multiline(res); |
92 | } | 92 | } |
93 | items.into_iter().for_each(|it| res = res.append_item(it)); | 93 | items.into_iter().for_each(|it| res = res.append_item(it)); |
94 | res | 94 | res.fixup_trailing_whitespace().unwrap_or(res) |
95 | } | 95 | } |
96 | 96 | ||
97 | #[must_use] | 97 | #[must_use] |
98 | pub fn append_item(&self, item: ast::AssocItem) -> ast::AssocItemList { | 98 | pub fn append_item(&self, item: ast::AssocItem) -> ast::AssocItemList { |
99 | let (indent, position) = match self.assoc_items().last() { | 99 | let (indent, position, whitespace) = match self.assoc_items().last() { |
100 | Some(it) => ( | 100 | Some(it) => ( |
101 | leading_indent(it.syntax()).unwrap_or_default().to_string(), | 101 | leading_indent(it.syntax()).unwrap_or_default().to_string(), |
102 | InsertPosition::After(it.syntax().clone().into()), | 102 | InsertPosition::After(it.syntax().clone().into()), |
103 | "\n\n", | ||
103 | ), | 104 | ), |
104 | None => match self.l_curly_token() { | 105 | None => match self.l_curly_token() { |
105 | Some(it) => ( | 106 | Some(it) => ( |
106 | " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(), | 107 | " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(), |
107 | InsertPosition::After(it.into()), | 108 | InsertPosition::After(it.into()), |
109 | "\n", | ||
108 | ), | 110 | ), |
109 | None => return self.clone(), | 111 | None => return self.clone(), |
110 | }, | 112 | }, |
111 | }; | 113 | }; |
112 | let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); | 114 | let ws = tokens::WsBuilder::new(&format!("{}{}", whitespace, indent)); |
113 | let to_insert: ArrayVec<[SyntaxElement; 2]> = | 115 | let to_insert: ArrayVec<[SyntaxElement; 2]> = |
114 | [ws.ws().into(), item.syntax().clone().into()].into(); | 116 | [ws.ws().into(), item.syntax().clone().into()].into(); |
115 | self.insert_children(position, to_insert) | 117 | self.insert_children(position, to_insert) |
116 | } | 118 | } |
119 | |||
120 | /// Remove extra whitespace between last item and closing curly brace. | ||
121 | fn fixup_trailing_whitespace(&self) -> Option<ast::AssocItemList> { | ||
122 | let first_token_after_items = | ||
123 | self.assoc_items().last()?.syntax().next_sibling_or_token()?; | ||
124 | let last_token_before_curly = self.r_curly_token()?.prev_sibling_or_token()?; | ||
125 | if last_token_before_curly != first_token_after_items { | ||
126 | // there is something more between last item and | ||
127 | // right curly than just whitespace - bail out | ||
128 | return None; | ||
129 | } | ||
130 | let whitespace = | ||
131 | last_token_before_curly.clone().into_token().and_then(ast::Whitespace::cast)?; | ||
132 | let text = whitespace.syntax().text(); | ||
133 | let newline = text.rfind("\n")?; | ||
134 | let keep = tokens::WsBuilder::new(&text[newline..]); | ||
135 | Some(self.replace_children( | ||
136 | first_token_after_items..=last_token_before_curly, | ||
137 | std::iter::once(keep.ws().into()), | ||
138 | )) | ||
139 | } | ||
117 | } | 140 | } |
118 | 141 | ||
119 | impl ast::RecordExprFieldList { | 142 | impl ast::RecordExprFieldList { |
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index 1be01fd88..2e3133449 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md | |||
@@ -412,7 +412,13 @@ Reloads project information (that is, re-executes `cargo metadata`). | |||
412 | 412 | ||
413 | **Method:** `rust-analyzer/status` | 413 | **Method:** `rust-analyzer/status` |
414 | 414 | ||
415 | **Notification:** `"loading" | "ready" | "invalid" | "needsReload"` | 415 | **Notification:** |
416 | |||
417 | ```typescript | ||
418 | interface StatusParams { | ||
419 | status: "loading" | "ready" | "invalid" | "needsReload", | ||
420 | } | ||
421 | ``` | ||
416 | 422 | ||
417 | This notification is sent from server to client. | 423 | This notification is sent from server to client. |
418 | The client can use it to display persistent status to the user (in modline). | 424 | The client can use it to display persistent status to the user (in modline). |
diff --git a/docs/dev/style.md b/docs/dev/style.md index 963a6d73d..8effddcda 100644 --- a/docs/dev/style.md +++ b/docs/dev/style.md | |||
@@ -176,6 +176,35 @@ fn frobnicate(walrus: Option<Walrus>) { | |||
176 | } | 176 | } |
177 | ``` | 177 | ``` |
178 | 178 | ||
179 | # Getters & Setters | ||
180 | |||
181 | If a field can have any value without breaking invariants, make the field public. | ||
182 | Conversely, if there is an invariant, document it, enforce it in the "constructor" function, make the field private, and provide a getter. | ||
183 | Never provide setters. | ||
184 | |||
185 | Getters should return borrowed data: | ||
186 | |||
187 | ``` | ||
188 | struct Person { | ||
189 | // Invariant: never empty | ||
190 | first_name: String, | ||
191 | middle_name: Option<String> | ||
192 | } | ||
193 | |||
194 | // Good | ||
195 | impl Person { | ||
196 | fn first_name(&self) -> &str { self.first_name.as_str() } | ||
197 | fn middle_name(&self) -> Option<&str> { self.middle_name.as_ref() } | ||
198 | } | ||
199 | |||
200 | // Not as good | ||
201 | impl Person { | ||
202 | fn first_name(&self) -> String { self.first_name.clone() } | ||
203 | fn middle_name(&self) -> &Option<String> { &self.middle_name } | ||
204 | } | ||
205 | ``` | ||
206 | |||
207 | |||
179 | # Premature Pessimization | 208 | # Premature Pessimization |
180 | 209 | ||
181 | Avoid writing code which is slower than it needs to be. | 210 | Avoid writing code which is slower than it needs to be. |
diff --git a/editors/code/.eslintignore b/editors/code/.eslintignore new file mode 100644 index 000000000..3df5c860b --- /dev/null +++ b/editors/code/.eslintignore | |||
@@ -0,0 +1,3 @@ | |||
1 | node_modules | ||
2 | .eslintrc.js | ||
3 | rollup.config.js \ No newline at end of file | ||
diff --git a/editors/code/package.json b/editors/code/package.json index ee5f96bf3..429ff5def 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -609,6 +609,15 @@ | |||
609 | }, | 609 | }, |
610 | "description": "List of warnings that should be displayed with hint severity.\nThe warnings will be indicated by faded text or three dots in code and will not show up in the problems panel.", | 610 | "description": "List of warnings that should be displayed with hint severity.\nThe warnings will be indicated by faded text or three dots in code and will not show up in the problems panel.", |
611 | "default": [] | 611 | "default": [] |
612 | }, | ||
613 | "rust-analyzer.analysis.disabledDiagnostics": { | ||
614 | "type": "array", | ||
615 | "uniqueItems": true, | ||
616 | "items": { | ||
617 | "type": "string" | ||
618 | }, | ||
619 | "description": "List of rust-analyzer diagnostics to disable", | ||
620 | "default": [] | ||
612 | } | 621 | } |
613 | } | 622 | } |
614 | }, | 623 | }, |
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index 6e767babf..543f7e02e 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts | |||
@@ -36,7 +36,7 @@ export class Ctx { | |||
36 | 36 | ||
37 | res.pushCleanup(client.start()); | 37 | res.pushCleanup(client.start()); |
38 | await client.onReady(); | 38 | await client.onReady(); |
39 | client.onNotification(ra.status, (status) => res.setStatus(status)); | 39 | client.onNotification(ra.status, (params) => res.setStatus(params.status)); |
40 | return res; | 40 | return res; |
41 | } | 41 | } |
42 | 42 | ||
diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts index 494d51c83..8663737a6 100644 --- a/editors/code/src/lsp_ext.ts +++ b/editors/code/src/lsp_ext.ts | |||
@@ -8,7 +8,10 @@ export const analyzerStatus = new lc.RequestType<null, string, void>("rust-analy | |||
8 | export const memoryUsage = new lc.RequestType<null, string, void>("rust-analyzer/memoryUsage"); | 8 | export const memoryUsage = new lc.RequestType<null, string, void>("rust-analyzer/memoryUsage"); |
9 | 9 | ||
10 | export type Status = "loading" | "ready" | "invalid" | "needsReload"; | 10 | export type Status = "loading" | "ready" | "invalid" | "needsReload"; |
11 | export const status = new lc.NotificationType<Status>("rust-analyzer/status"); | 11 | export interface StatusParams { |
12 | status: Status; | ||
13 | } | ||
14 | export const status = new lc.NotificationType<StatusParams>("rust-analyzer/status"); | ||
12 | 15 | ||
13 | export const reloadWorkspace = new lc.RequestType<null, null, void>("rust-analyzer/reloadWorkspace"); | 16 | export const reloadWorkspace = new lc.RequestType<null, null, void>("rust-analyzer/reloadWorkspace"); |
14 | 17 | ||
diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index 4b2b614fa..c468468de 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs | |||
@@ -16,7 +16,11 @@ use std::{ | |||
16 | path::{Path, PathBuf}, | 16 | path::{Path, PathBuf}, |
17 | }; | 17 | }; |
18 | 18 | ||
19 | use crate::{not_bash::fs2, project_root, Result}; | 19 | use crate::{ |
20 | ensure_rustfmt, | ||
21 | not_bash::{fs2, pushenv, run}, | ||
22 | project_root, Result, | ||
23 | }; | ||
20 | 24 | ||
21 | pub use self::{ | 25 | pub use self::{ |
22 | gen_assists_docs::{generate_assists_docs, generate_assists_tests}, | 26 | gen_assists_docs::{generate_assists_docs, generate_assists_tests}, |
@@ -71,6 +75,18 @@ fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> { | |||
71 | } | 75 | } |
72 | } | 76 | } |
73 | 77 | ||
78 | const PREAMBLE: &str = "Generated file, do not edit by hand, see `xtask/src/codegen`"; | ||
79 | |||
80 | fn reformat(text: impl std::fmt::Display) -> Result<String> { | ||
81 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); | ||
82 | ensure_rustfmt()?; | ||
83 | let stdout = run!( | ||
84 | "rustfmt --config-path {} --config fn_single_line=true", project_root().join("rustfmt.toml").display(); | ||
85 | <text.to_string().as_bytes() | ||
86 | )?; | ||
87 | Ok(format!("//! {}\n\n{}\n", PREAMBLE, stdout)) | ||
88 | } | ||
89 | |||
74 | fn extract_comment_blocks(text: &str) -> Vec<Vec<String>> { | 90 | fn extract_comment_blocks(text: &str) -> Vec<Vec<String>> { |
75 | do_extract_comment_blocks(text, false).into_iter().map(|(_line, block)| block).collect() | 91 | do_extract_comment_blocks(text, false).into_iter().map(|(_line, block)| block).collect() |
76 | } | 92 | } |
diff --git a/xtask/src/codegen/gen_assists_docs.rs b/xtask/src/codegen/gen_assists_docs.rs index 526941f73..4f4968594 100644 --- a/xtask/src/codegen/gen_assists_docs.rs +++ b/xtask/src/codegen/gen_assists_docs.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | use std::{fmt, fs, path::Path}; | 3 | use std::{fmt, fs, path::Path}; |
4 | 4 | ||
5 | use crate::{ | 5 | use crate::{ |
6 | codegen::{self, extract_comment_blocks_with_empty_lines, Location, Mode}, | 6 | codegen::{self, extract_comment_blocks_with_empty_lines, reformat, Location, Mode, PREAMBLE}, |
7 | project_root, rust_files, Result, | 7 | project_root, rust_files, Result, |
8 | }; | 8 | }; |
9 | 9 | ||
@@ -15,7 +15,7 @@ pub fn generate_assists_tests(mode: Mode) -> Result<()> { | |||
15 | pub fn generate_assists_docs(mode: Mode) -> Result<()> { | 15 | pub fn generate_assists_docs(mode: Mode) -> Result<()> { |
16 | let assists = Assist::collect()?; | 16 | let assists = Assist::collect()?; |
17 | let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); | 17 | let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); |
18 | let contents = contents.trim().to_string() + "\n"; | 18 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); |
19 | let dst = project_root().join("docs/user/generated_assists.adoc"); | 19 | let dst = project_root().join("docs/user/generated_assists.adoc"); |
20 | codegen::update(&dst, &contents, mode) | 20 | codegen::update(&dst, &contents, mode) |
21 | } | 21 | } |
@@ -134,7 +134,7 @@ r#####" | |||
134 | 134 | ||
135 | buf.push_str(&test) | 135 | buf.push_str(&test) |
136 | } | 136 | } |
137 | let buf = crate::reformat(buf)?; | 137 | let buf = reformat(buf)?; |
138 | codegen::update(&project_root().join(codegen::ASSISTS_TESTS), &buf, mode) | 138 | codegen::update(&project_root().join(codegen::ASSISTS_TESTS), &buf, mode) |
139 | } | 139 | } |
140 | 140 | ||
diff --git a/xtask/src/codegen/gen_feature_docs.rs b/xtask/src/codegen/gen_feature_docs.rs index 31bc3839d..3f0013e82 100644 --- a/xtask/src/codegen/gen_feature_docs.rs +++ b/xtask/src/codegen/gen_feature_docs.rs | |||
@@ -3,14 +3,14 @@ | |||
3 | use std::{fmt, fs, path::PathBuf}; | 3 | use std::{fmt, fs, path::PathBuf}; |
4 | 4 | ||
5 | use crate::{ | 5 | use crate::{ |
6 | codegen::{self, extract_comment_blocks_with_empty_lines, Location, Mode}, | 6 | codegen::{self, extract_comment_blocks_with_empty_lines, Location, Mode, PREAMBLE}, |
7 | project_root, rust_files, Result, | 7 | project_root, rust_files, Result, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | pub fn generate_feature_docs(mode: Mode) -> Result<()> { | 10 | pub fn generate_feature_docs(mode: Mode) -> Result<()> { |
11 | let features = Feature::collect()?; | 11 | let features = Feature::collect()?; |
12 | let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); | 12 | let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); |
13 | let contents = contents.trim().to_string() + "\n"; | 13 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); |
14 | let dst = project_root().join("docs/user/generated_features.adoc"); | 14 | let dst = project_root().join("docs/user/generated_features.adoc"); |
15 | codegen::update(&dst, &contents, mode)?; | 15 | codegen::update(&dst, &contents, mode)?; |
16 | Ok(()) | 16 | Ok(()) |
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs index dd1f4d6a2..df3ec22c8 100644 --- a/xtask/src/codegen/gen_syntax.rs +++ b/xtask/src/codegen/gen_syntax.rs | |||
@@ -14,7 +14,7 @@ use ungrammar::{rust_grammar, Grammar, Rule}; | |||
14 | 14 | ||
15 | use crate::{ | 15 | use crate::{ |
16 | ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC}, | 16 | ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC}, |
17 | codegen::{self, update, Mode}, | 17 | codegen::{self, reformat, update, Mode}, |
18 | project_root, Result, | 18 | project_root, Result, |
19 | }; | 19 | }; |
20 | 20 | ||
@@ -61,7 +61,7 @@ fn generate_tokens(grammar: &AstSrc) -> Result<String> { | |||
61 | } | 61 | } |
62 | }); | 62 | }); |
63 | 63 | ||
64 | let pretty = crate::reformat(quote! { | 64 | let pretty = reformat(quote! { |
65 | use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken}; | 65 | use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken}; |
66 | #(#tokens)* | 66 | #(#tokens)* |
67 | })? | 67 | })? |
@@ -261,7 +261,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> { | |||
261 | } | 261 | } |
262 | } | 262 | } |
263 | 263 | ||
264 | let pretty = crate::reformat(res)?; | 264 | let pretty = reformat(res)?; |
265 | Ok(pretty) | 265 | Ok(pretty) |
266 | } | 266 | } |
267 | 267 | ||
@@ -383,7 +383,7 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> { | |||
383 | } | 383 | } |
384 | }; | 384 | }; |
385 | 385 | ||
386 | crate::reformat(ast) | 386 | reformat(ast) |
387 | } | 387 | } |
388 | 388 | ||
389 | fn to_upper_snake_case(s: &str) -> String { | 389 | fn to_upper_snake_case(s: &str) -> String { |
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs index 807ef587c..e790d995f 100644 --- a/xtask/src/lib.rs +++ b/xtask/src/lib.rs | |||
@@ -3,14 +3,15 @@ | |||
3 | //! See https://github.com/matklad/cargo-xtask/ | 3 | //! See https://github.com/matklad/cargo-xtask/ |
4 | 4 | ||
5 | pub mod not_bash; | 5 | pub mod not_bash; |
6 | pub mod codegen; | ||
7 | mod ast_src; | ||
8 | |||
6 | pub mod install; | 9 | pub mod install; |
7 | pub mod release; | 10 | pub mod release; |
8 | pub mod dist; | 11 | pub mod dist; |
9 | pub mod pre_commit; | 12 | pub mod pre_commit; |
10 | pub mod metrics; | 13 | pub mod metrics; |
11 | 14 | pub mod pre_cache; | |
12 | pub mod codegen; | ||
13 | mod ast_src; | ||
14 | 15 | ||
15 | use std::{ | 16 | use std::{ |
16 | env, | 17 | env, |
@@ -21,7 +22,7 @@ use walkdir::{DirEntry, WalkDir}; | |||
21 | 22 | ||
22 | use crate::{ | 23 | use crate::{ |
23 | codegen::Mode, | 24 | codegen::Mode, |
24 | not_bash::{fs2, pushd, pushenv, rm_rf}, | 25 | not_bash::{pushd, pushenv}, |
25 | }; | 26 | }; |
26 | 27 | ||
27 | pub use anyhow::{bail, Context as _, Result}; | 28 | pub use anyhow::{bail, Context as _, Result}; |
@@ -62,17 +63,6 @@ pub fn run_rustfmt(mode: Mode) -> Result<()> { | |||
62 | Ok(()) | 63 | Ok(()) |
63 | } | 64 | } |
64 | 65 | ||
65 | fn reformat(text: impl std::fmt::Display) -> Result<String> { | ||
66 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); | ||
67 | ensure_rustfmt()?; | ||
68 | let stdout = run!( | ||
69 | "rustfmt --config-path {} --config fn_single_line=true", project_root().join("rustfmt.toml").display(); | ||
70 | <text.to_string().as_bytes() | ||
71 | )?; | ||
72 | let preamble = "Generated file, do not edit by hand, see `xtask/src/codegen`"; | ||
73 | Ok(format!("//! {}\n\n{}\n", preamble, stdout)) | ||
74 | } | ||
75 | |||
76 | fn ensure_rustfmt() -> Result<()> { | 66 | fn ensure_rustfmt() -> Result<()> { |
77 | let out = run!("rustfmt --version")?; | 67 | let out = run!("rustfmt --version")?; |
78 | if !out.contains("stable") { | 68 | if !out.contains("stable") { |
@@ -119,42 +109,6 @@ pub fn run_fuzzer() -> Result<()> { | |||
119 | Ok(()) | 109 | Ok(()) |
120 | } | 110 | } |
121 | 111 | ||
122 | /// Cleans the `./target` dir after the build such that only | ||
123 | /// dependencies are cached on CI. | ||
124 | pub fn run_pre_cache() -> Result<()> { | ||
125 | let slow_tests_cookie = Path::new("./target/.slow_tests_cookie"); | ||
126 | if !slow_tests_cookie.exists() { | ||
127 | panic!("slow tests were skipped on CI!") | ||
128 | } | ||
129 | rm_rf(slow_tests_cookie)?; | ||
130 | |||
131 | for entry in Path::new("./target/debug").read_dir()? { | ||
132 | let entry = entry?; | ||
133 | if entry.file_type().map(|it| it.is_file()).ok() == Some(true) { | ||
134 | // Can't delete yourself on windows :-( | ||
135 | if !entry.path().ends_with("xtask.exe") { | ||
136 | rm_rf(&entry.path())? | ||
137 | } | ||
138 | } | ||
139 | } | ||
140 | |||
141 | fs2::remove_file("./target/.rustc_info.json")?; | ||
142 | let to_delete = ["hir", "heavy_test", "xtask", "ide", "rust-analyzer"]; | ||
143 | for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() { | ||
144 | for entry in Path::new(dir).read_dir()? { | ||
145 | let entry = entry?; | ||
146 | if to_delete.iter().any(|&it| entry.path().display().to_string().contains(it)) { | ||
147 | // Can't delete yourself on windows :-( | ||
148 | if !entry.path().ends_with("xtask.exe") { | ||
149 | rm_rf(&entry.path())? | ||
150 | } | ||
151 | } | ||
152 | } | ||
153 | } | ||
154 | |||
155 | Ok(()) | ||
156 | } | ||
157 | |||
158 | fn is_release_tag(tag: &str) -> bool { | 112 | fn is_release_tag(tag: &str) -> bool { |
159 | tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) | 113 | tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) |
160 | } | 114 | } |
diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 71caff248..c4a15f4bd 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs | |||
@@ -17,9 +17,10 @@ use xtask::{ | |||
17 | install::{ClientOpt, InstallCmd, Malloc, ServerOpt}, | 17 | install::{ClientOpt, InstallCmd, Malloc, ServerOpt}, |
18 | metrics::MetricsCmd, | 18 | metrics::MetricsCmd, |
19 | not_bash::pushd, | 19 | not_bash::pushd, |
20 | pre_cache::PreCacheCmd, | ||
20 | pre_commit, project_root, | 21 | pre_commit, project_root, |
21 | release::{PromoteCmd, ReleaseCmd}, | 22 | release::{PromoteCmd, ReleaseCmd}, |
22 | run_clippy, run_fuzzer, run_pre_cache, run_rustfmt, Result, | 23 | run_clippy, run_fuzzer, run_rustfmt, Result, |
23 | }; | 24 | }; |
24 | 25 | ||
25 | fn main() -> Result<()> { | 26 | fn main() -> Result<()> { |
@@ -101,7 +102,7 @@ FLAGS: | |||
101 | } | 102 | } |
102 | "pre-cache" => { | 103 | "pre-cache" => { |
103 | args.finish()?; | 104 | args.finish()?; |
104 | run_pre_cache() | 105 | PreCacheCmd.run() |
105 | } | 106 | } |
106 | "release" => { | 107 | "release" => { |
107 | let dry_run = args.contains("--dry-run"); | 108 | let dry_run = args.contains("--dry-run"); |
diff --git a/xtask/src/pre_cache.rs b/xtask/src/pre_cache.rs new file mode 100644 index 000000000..47ba6ba24 --- /dev/null +++ b/xtask/src/pre_cache.rs | |||
@@ -0,0 +1,80 @@ | |||
1 | use std::{ | ||
2 | fs::FileType, | ||
3 | path::{Path, PathBuf}, | ||
4 | }; | ||
5 | |||
6 | use anyhow::Result; | ||
7 | |||
8 | use crate::not_bash::{fs2, rm_rf}; | ||
9 | |||
10 | pub struct PreCacheCmd; | ||
11 | |||
12 | impl PreCacheCmd { | ||
13 | /// Cleans the `./target` dir after the build such that only | ||
14 | /// dependencies are cached on CI. | ||
15 | pub fn run(self) -> Result<()> { | ||
16 | let slow_tests_cookie = Path::new("./target/.slow_tests_cookie"); | ||
17 | if !slow_tests_cookie.exists() { | ||
18 | panic!("slow tests were skipped on CI!") | ||
19 | } | ||
20 | rm_rf(slow_tests_cookie)?; | ||
21 | |||
22 | for path in read_dir("./target/debug", FileType::is_file)? { | ||
23 | // Can't delete yourself on windows :-( | ||
24 | if !path.ends_with("xtask.exe") { | ||
25 | rm_rf(&path)? | ||
26 | } | ||
27 | } | ||
28 | |||
29 | fs2::remove_file("./target/.rustc_info.json")?; | ||
30 | |||
31 | let to_delete = read_dir("./crates", FileType::is_dir)? | ||
32 | .into_iter() | ||
33 | .map(|path| path.file_name().unwrap().to_string_lossy().replace('-', "_")) | ||
34 | .collect::<Vec<_>>(); | ||
35 | |||
36 | for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() { | ||
37 | for path in read_dir(dir, |_file_type| true)? { | ||
38 | if path.ends_with("xtask.exe") { | ||
39 | continue; | ||
40 | } | ||
41 | let file_name = path.file_name().unwrap().to_string_lossy(); | ||
42 | let (stem, _) = match rsplit_once(&file_name, '-') { | ||
43 | Some(it) => it, | ||
44 | None => { | ||
45 | rm_rf(path)?; | ||
46 | continue; | ||
47 | } | ||
48 | }; | ||
49 | let stem = stem.replace('-', "_"); | ||
50 | if to_delete.contains(&stem) { | ||
51 | rm_rf(path)?; | ||
52 | } | ||
53 | } | ||
54 | } | ||
55 | |||
56 | Ok(()) | ||
57 | } | ||
58 | } | ||
59 | fn read_dir(path: impl AsRef<Path>, cond: impl Fn(&FileType) -> bool) -> Result<Vec<PathBuf>> { | ||
60 | read_dir_impl(path.as_ref(), &cond) | ||
61 | } | ||
62 | |||
63 | fn read_dir_impl(path: &Path, cond: &dyn Fn(&FileType) -> bool) -> Result<Vec<PathBuf>> { | ||
64 | let mut res = Vec::new(); | ||
65 | for entry in path.read_dir()? { | ||
66 | let entry = entry?; | ||
67 | let file_type = entry.file_type()?; | ||
68 | if cond(&file_type) { | ||
69 | res.push(entry.path()) | ||
70 | } | ||
71 | } | ||
72 | Ok(res) | ||
73 | } | ||
74 | |||
75 | fn rsplit_once(haystack: &str, delim: char) -> Option<(&str, &str)> { | ||
76 | let mut split = haystack.rsplitn(2, delim); | ||
77 | let suffix = split.next()?; | ||
78 | let prefix = split.next()?; | ||
79 | Some((prefix, suffix)) | ||
80 | } | ||
diff --git a/xtask/tests/tidy.rs b/xtask/tests/tidy.rs index ca9749ed4..bec3c630b 100644 --- a/xtask/tests/tidy.rs +++ b/xtask/tests/tidy.rs | |||
@@ -82,7 +82,7 @@ MIT/Apache-2.0 | |||
82 | MIT/Apache-2.0 AND BSD-2-Clause | 82 | MIT/Apache-2.0 AND BSD-2-Clause |
83 | Unlicense OR MIT | 83 | Unlicense OR MIT |
84 | Unlicense/MIT | 84 | Unlicense/MIT |
85 | Zlib | 85 | Zlib OR Apache-2.0 OR MIT |
86 | " | 86 | " |
87 | .lines() | 87 | .lines() |
88 | .filter(|it| !it.is_empty()) | 88 | .filter(|it| !it.is_empty()) |