diff options
54 files changed, 496 insertions, 297 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2deb009ce..fb077e28d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml | |||
@@ -25,7 +25,7 @@ jobs: | |||
25 | strategy: | 25 | strategy: |
26 | fail-fast: false | 26 | fail-fast: false |
27 | matrix: | 27 | matrix: |
28 | os: [ubuntu-latest, windows-latest] #, macos-latest] | 28 | os: [ubuntu-latest, windows-latest, macos-latest] |
29 | 29 | ||
30 | steps: | 30 | steps: |
31 | - name: Checkout repository | 31 | - name: Checkout repository |
@@ -70,10 +70,6 @@ jobs: | |||
70 | - name: Prepare cache | 70 | - name: Prepare cache |
71 | run: cargo xtask pre-cache | 71 | run: cargo xtask pre-cache |
72 | 72 | ||
73 | - name: Prepare cache 2 | ||
74 | if: matrix.os == 'windows-latest' | ||
75 | run: Remove-Item ./target/debug/xtask.exe, ./target/debug/deps/xtask.exe | ||
76 | |||
77 | # Weird targets to catch non-portable code | 73 | # Weird targets to catch non-portable code |
78 | rust-cross: | 74 | rust-cross: |
79 | name: Rust Cross | 75 | name: Rust Cross |
diff --git a/Cargo.lock b/Cargo.lock index c1f1c07a2..ffa385106 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -162,9 +162,9 @@ checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" | |||
162 | 162 | ||
163 | [[package]] | 163 | [[package]] |
164 | name = "chalk-derive" | 164 | name = "chalk-derive" |
165 | version = "0.21.0" | 165 | version = "0.23.0" |
166 | source = "registry+https://github.com/rust-lang/crates.io-index" | 166 | source = "registry+https://github.com/rust-lang/crates.io-index" |
167 | checksum = "c1df0dbb57d74b4acd20f20fa66ab2acd09776b79eaeb9d8f947b2f3e01c40bf" | 167 | checksum = "c3cb438e961fd7f1183dc5e0bdcfd09253bf9b90592cf665d1ce6787d8a4908f" |
168 | dependencies = [ | 168 | dependencies = [ |
169 | "proc-macro2", | 169 | "proc-macro2", |
170 | "quote", | 170 | "quote", |
@@ -174,9 +174,9 @@ dependencies = [ | |||
174 | 174 | ||
175 | [[package]] | 175 | [[package]] |
176 | name = "chalk-ir" | 176 | name = "chalk-ir" |
177 | version = "0.21.0" | 177 | version = "0.23.0" |
178 | source = "registry+https://github.com/rust-lang/crates.io-index" | 178 | source = "registry+https://github.com/rust-lang/crates.io-index" |
179 | checksum = "44361a25dbdb1dc428f56ad7a3c21ba9ca12f3225c26a47919ff6fcb10a583d4" | 179 | checksum = "bb332abfcb015b148c6fbab39b1d13282745b0f7f312019dd8e138f5f3f0855d" |
180 | dependencies = [ | 180 | dependencies = [ |
181 | "chalk-derive", | 181 | "chalk-derive", |
182 | "lazy_static", | 182 | "lazy_static", |
@@ -184,9 +184,9 @@ dependencies = [ | |||
184 | 184 | ||
185 | [[package]] | 185 | [[package]] |
186 | name = "chalk-recursive" | 186 | name = "chalk-recursive" |
187 | version = "0.21.0" | 187 | version = "0.23.0" |
188 | source = "registry+https://github.com/rust-lang/crates.io-index" | 188 | source = "registry+https://github.com/rust-lang/crates.io-index" |
189 | checksum = "dd89556b98de156d5eaf21077d297cd2198628f10f2df140798ea3a5dd84bc86" | 189 | checksum = "e7c7673f10c5fa1acf7fa07d4f4c5917cbcf161ed3a952d14530c79950de32d2" |
190 | dependencies = [ | 190 | dependencies = [ |
191 | "chalk-derive", | 191 | "chalk-derive", |
192 | "chalk-ir", | 192 | "chalk-ir", |
@@ -197,9 +197,9 @@ dependencies = [ | |||
197 | 197 | ||
198 | [[package]] | 198 | [[package]] |
199 | name = "chalk-solve" | 199 | name = "chalk-solve" |
200 | version = "0.21.0" | 200 | version = "0.23.0" |
201 | source = "registry+https://github.com/rust-lang/crates.io-index" | 201 | source = "registry+https://github.com/rust-lang/crates.io-index" |
202 | checksum = "a886da37a0dc457057d86f78f026f7a09c6d8088aa13f4f4127fdb8dc80119a3" | 202 | checksum = "802de4eff72e5a5d2828e6c07224c74d66949dc6308aff025d0ae2871a11b4eb" |
203 | dependencies = [ | 203 | dependencies = [ |
204 | "chalk-derive", | 204 | "chalk-derive", |
205 | "chalk-ir", | 205 | "chalk-ir", |
@@ -214,9 +214,9 @@ dependencies = [ | |||
214 | 214 | ||
215 | [[package]] | 215 | [[package]] |
216 | name = "chrono" | 216 | name = "chrono" |
217 | version = "0.4.13" | 217 | version = "0.4.15" |
218 | source = "registry+https://github.com/rust-lang/crates.io-index" | 218 | source = "registry+https://github.com/rust-lang/crates.io-index" |
219 | checksum = "c74d84029116787153e02106bf53e66828452a4b325cc8652b788b5967c0a0b6" | 219 | checksum = "942f72db697d8767c22d46a598e01f2d3b475501ea43d0db4f16d90259182d0b" |
220 | dependencies = [ | 220 | dependencies = [ |
221 | "num-integer", | 221 | "num-integer", |
222 | "num-traits", | 222 | "num-traits", |
@@ -765,9 +765,9 @@ dependencies = [ | |||
765 | 765 | ||
766 | [[package]] | 766 | [[package]] |
767 | name = "lsp-server" | 767 | name = "lsp-server" |
768 | version = "0.3.3" | 768 | version = "0.3.4" |
769 | source = "registry+https://github.com/rust-lang/crates.io-index" | 769 | source = "registry+https://github.com/rust-lang/crates.io-index" |
770 | checksum = "53b4ace8ebe5d2aff3687ce0ed507f6020d6a47a7de2b0d3d664ea237ffb0c62" | 770 | checksum = "87fce8851309a325974ec76efe7c9d954d152c9ff4fded6520eb3c96d0aa3a96" |
771 | dependencies = [ | 771 | dependencies = [ |
772 | "crossbeam-channel", | 772 | "crossbeam-channel", |
773 | "log", | 773 | "log", |
@@ -971,9 +971,9 @@ checksum = "1ab52be62400ca80aa00285d25253d7f7c437b7375c4de678f5405d3afe82ca5" | |||
971 | 971 | ||
972 | [[package]] | 972 | [[package]] |
973 | name = "once_cell" | 973 | name = "once_cell" |
974 | version = "1.4.0" | 974 | version = "1.4.1" |
975 | source = "registry+https://github.com/rust-lang/crates.io-index" | 975 | source = "registry+https://github.com/rust-lang/crates.io-index" |
976 | checksum = "0b631f7e854af39a1739f401cf34a8a013dfe09eac4fa4dba91e9768bd28168d" | 976 | checksum = "260e51e7efe62b592207e9e13a68e43692a7a279171d6ba57abd208bf23645ad" |
977 | 977 | ||
978 | [[package]] | 978 | [[package]] |
979 | name = "oorandom" | 979 | name = "oorandom" |
@@ -1036,9 +1036,9 @@ dependencies = [ | |||
1036 | 1036 | ||
1037 | [[package]] | 1037 | [[package]] |
1038 | name = "perf-event-open-sys" | 1038 | name = "perf-event-open-sys" |
1039 | version = "0.3.2" | 1039 | version = "0.3.3" |
1040 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1040 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1041 | checksum = "83e7183862f36d10263d0a1ccaef50fef734ade948bf026afd1bd97355c78273" | 1041 | checksum = "d9ebe2b9ef0cb884ef778c5a533144e348e9839a9fcf67f3d24e1890ac9088d6" |
1042 | dependencies = [ | 1042 | dependencies = [ |
1043 | "libc", | 1043 | "libc", |
1044 | ] | 1044 | ] |
@@ -1097,6 +1097,7 @@ dependencies = [ | |||
1097 | "mbe", | 1097 | "mbe", |
1098 | "memmap", | 1098 | "memmap", |
1099 | "proc_macro_api", | 1099 | "proc_macro_api", |
1100 | "proc_macro_test", | ||
1100 | "serde_derive", | 1101 | "serde_derive", |
1101 | "test_utils", | 1102 | "test_utils", |
1102 | "toolchain", | 1103 | "toolchain", |
@@ -1104,6 +1105,10 @@ dependencies = [ | |||
1104 | ] | 1105 | ] |
1105 | 1106 | ||
1106 | [[package]] | 1107 | [[package]] |
1108 | name = "proc_macro_test" | ||
1109 | version = "0.0.0" | ||
1110 | |||
1111 | [[package]] | ||
1107 | name = "profile" | 1112 | name = "profile" |
1108 | version = "0.0.0" | 1113 | version = "0.0.0" |
1109 | dependencies = [ | 1114 | dependencies = [ |
@@ -1259,9 +1264,9 @@ dependencies = [ | |||
1259 | 1264 | ||
1260 | [[package]] | 1265 | [[package]] |
1261 | name = "rustc-ap-rustc_lexer" | 1266 | name = "rustc-ap-rustc_lexer" |
1262 | version = "671.0.0" | 1267 | version = "673.0.0" |
1263 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1268 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1264 | checksum = "22e1221f3bfa2943c942cf8da319ab2346887f8757778c29c7f1822cd27b521f" | 1269 | checksum = "f6b71fa1285bdefe5fb61e59b63d6cc246abf337f4acafdd620d721bc488e671" |
1265 | dependencies = [ | 1270 | dependencies = [ |
1266 | "unicode-xid", | 1271 | "unicode-xid", |
1267 | ] | 1272 | ] |
@@ -1574,9 +1579,9 @@ dependencies = [ | |||
1574 | 1579 | ||
1575 | [[package]] | 1580 | [[package]] |
1576 | name = "tinyvec" | 1581 | name = "tinyvec" |
1577 | version = "0.3.3" | 1582 | version = "0.3.4" |
1578 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1583 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1579 | checksum = "53953d2d3a5ad81d9f844a32f14ebb121f50b650cd59d0ee2a07cf13c617efed" | 1584 | checksum = "238ce071d267c5710f9d31451efec16c5ee22de34df17cc05e56cbc92e967117" |
1580 | 1585 | ||
1581 | [[package]] | 1586 | [[package]] |
1582 | name = "toolchain" | 1587 | name = "toolchain" |
diff --git a/crates/assists/src/ast_transform.rs b/crates/assists/src/ast_transform.rs index 4c41c16d8..5216862ba 100644 --- a/crates/assists/src/ast_transform.rs +++ b/crates/assists/src/ast_transform.rs | |||
@@ -7,6 +7,17 @@ use syntax::{ | |||
7 | ast::{self, AstNode}, | 7 | ast::{self, AstNode}, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { | ||
11 | SyntaxRewriter::from_fn(|element| match element { | ||
12 | syntax::SyntaxElement::Node(n) => { | ||
13 | let replacement = transformer.get_substitution(&n)?; | ||
14 | Some(replacement.into()) | ||
15 | } | ||
16 | _ => None, | ||
17 | }) | ||
18 | .rewrite_ast(&node) | ||
19 | } | ||
20 | |||
10 | pub trait AstTransform<'a> { | 21 | pub trait AstTransform<'a> { |
11 | fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode>; | 22 | fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode>; |
12 | 23 | ||
@@ -107,10 +118,7 @@ impl<'a> SubstituteTypeParams<'a> { | |||
107 | ast::Type::PathType(path_type) => path_type.path()?, | 118 | ast::Type::PathType(path_type) => path_type.path()?, |
108 | _ => return None, | 119 | _ => return None, |
109 | }; | 120 | }; |
110 | // FIXME: use `hir::Path::from_src` instead. | 121 | let resolution = self.source_scope.speculative_resolve(&path)?; |
111 | #[allow(deprecated)] | ||
112 | let path = hir::Path::from_ast(path)?; | ||
113 | let resolution = self.source_scope.resolve_hir_path(&path)?; | ||
114 | match resolution { | 122 | match resolution { |
115 | hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()), | 123 | hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()), |
116 | _ => None, | 124 | _ => None, |
@@ -146,10 +154,7 @@ impl<'a> QualifyPaths<'a> { | |||
146 | // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway | 154 | // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway |
147 | return None; | 155 | return None; |
148 | } | 156 | } |
149 | // FIXME: use `hir::Path::from_src` instead. | 157 | let resolution = self.source_scope.speculative_resolve(&p)?; |
150 | #[allow(deprecated)] | ||
151 | let hir_path = hir::Path::from_ast(p.clone()); | ||
152 | let resolution = self.source_scope.resolve_hir_path(&hir_path?)?; | ||
153 | match resolution { | 158 | match resolution { |
154 | PathResolution::Def(def) => { | 159 | PathResolution::Def(def) => { |
155 | let found_path = from.find_use_path(self.source_scope.db.upcast(), def)?; | 160 | let found_path = from.find_use_path(self.source_scope.db.upcast(), def)?; |
@@ -175,17 +180,6 @@ impl<'a> QualifyPaths<'a> { | |||
175 | } | 180 | } |
176 | } | 181 | } |
177 | 182 | ||
178 | pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { | ||
179 | SyntaxRewriter::from_fn(|element| match element { | ||
180 | syntax::SyntaxElement::Node(n) => { | ||
181 | let replacement = transformer.get_substitution(&n)?; | ||
182 | Some(replacement.into()) | ||
183 | } | ||
184 | _ => None, | ||
185 | }) | ||
186 | .rewrite_ast(&node) | ||
187 | } | ||
188 | |||
189 | impl<'a> AstTransform<'a> for QualifyPaths<'a> { | 183 | impl<'a> AstTransform<'a> for QualifyPaths<'a> { |
190 | fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode> { | 184 | fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode> { |
191 | self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) | 185 | self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) |
diff --git a/crates/assists/src/handlers/add_missing_impl_members.rs b/crates/assists/src/handlers/add_missing_impl_members.rs index 81b61ebf8..83a2ada9a 100644 --- a/crates/assists/src/handlers/add_missing_impl_members.rs +++ b/crates/assists/src/handlers/add_missing_impl_members.rs | |||
@@ -48,7 +48,6 @@ enum AddMissingImplMembersMode { | |||
48 | // fn foo(&self) -> u32 { | 48 | // fn foo(&self) -> u32 { |
49 | // ${0:todo!()} | 49 | // ${0:todo!()} |
50 | // } | 50 | // } |
51 | // | ||
52 | // } | 51 | // } |
53 | // ``` | 52 | // ``` |
54 | pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 53 | pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
@@ -89,8 +88,8 @@ pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext) - | |||
89 | // impl Trait for () { | 88 | // impl Trait for () { |
90 | // Type X = (); | 89 | // Type X = (); |
91 | // fn foo(&self) {} | 90 | // fn foo(&self) {} |
92 | // $0fn bar(&self) {} | ||
93 | // | 91 | // |
92 | // $0fn bar(&self) {} | ||
94 | // } | 93 | // } |
95 | // ``` | 94 | // ``` |
96 | pub(crate) fn add_missing_default_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 95 | pub(crate) fn add_missing_default_members(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
@@ -240,15 +239,18 @@ struct S; | |||
240 | 239 | ||
241 | impl Foo for S { | 240 | impl Foo for S { |
242 | fn bar(&self) {} | 241 | fn bar(&self) {} |
242 | |||
243 | $0type Output; | 243 | $0type Output; |
244 | |||
244 | const CONST: usize = 42; | 245 | const CONST: usize = 42; |
246 | |||
245 | fn foo(&self) { | 247 | fn foo(&self) { |
246 | todo!() | 248 | todo!() |
247 | } | 249 | } |
250 | |||
248 | fn baz(&self) { | 251 | fn baz(&self) { |
249 | todo!() | 252 | todo!() |
250 | } | 253 | } |
251 | |||
252 | }"#, | 254 | }"#, |
253 | ); | 255 | ); |
254 | } | 256 | } |
@@ -281,10 +283,10 @@ struct S; | |||
281 | 283 | ||
282 | impl Foo for S { | 284 | impl Foo for S { |
283 | fn bar(&self) {} | 285 | fn bar(&self) {} |
286 | |||
284 | fn foo(&self) { | 287 | fn foo(&self) { |
285 | ${0:todo!()} | 288 | ${0:todo!()} |
286 | } | 289 | } |
287 | |||
288 | }"#, | 290 | }"#, |
289 | ); | 291 | ); |
290 | } | 292 | } |
@@ -599,6 +601,7 @@ trait Foo { | |||
599 | struct S; | 601 | struct S; |
600 | impl Foo for S { | 602 | impl Foo for S { |
601 | $0type Output; | 603 | $0type Output; |
604 | |||
602 | fn foo(&self) { | 605 | fn foo(&self) { |
603 | todo!() | 606 | todo!() |
604 | } | 607 | } |
@@ -708,4 +711,56 @@ impl Tr for () { | |||
708 | }"#, | 711 | }"#, |
709 | ) | 712 | ) |
710 | } | 713 | } |
714 | |||
715 | #[test] | ||
716 | fn test_whitespace_fixup_preserves_bad_tokens() { | ||
717 | check_assist( | ||
718 | add_missing_impl_members, | ||
719 | r#" | ||
720 | trait Tr { | ||
721 | fn foo(); | ||
722 | } | ||
723 | |||
724 | impl Tr for ()<|> { | ||
725 | +++ | ||
726 | }"#, | ||
727 | r#" | ||
728 | trait Tr { | ||
729 | fn foo(); | ||
730 | } | ||
731 | |||
732 | impl Tr for () { | ||
733 | fn foo() { | ||
734 | ${0:todo!()} | ||
735 | } | ||
736 | +++ | ||
737 | }"#, | ||
738 | ) | ||
739 | } | ||
740 | |||
741 | #[test] | ||
742 | fn test_whitespace_fixup_preserves_comments() { | ||
743 | check_assist( | ||
744 | add_missing_impl_members, | ||
745 | r#" | ||
746 | trait Tr { | ||
747 | fn foo(); | ||
748 | } | ||
749 | |||
750 | impl Tr for ()<|> { | ||
751 | // very important | ||
752 | }"#, | ||
753 | r#" | ||
754 | trait Tr { | ||
755 | fn foo(); | ||
756 | } | ||
757 | |||
758 | impl Tr for () { | ||
759 | fn foo() { | ||
760 | ${0:todo!()} | ||
761 | } | ||
762 | // very important | ||
763 | }"#, | ||
764 | ) | ||
765 | } | ||
711 | } | 766 | } |
diff --git a/crates/assists/src/handlers/auto_import.rs b/crates/assists/src/handlers/auto_import.rs index cce789972..b9ec3f10b 100644 --- a/crates/assists/src/handlers/auto_import.rs +++ b/crates/assists/src/handlers/auto_import.rs | |||
@@ -53,7 +53,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
53 | |builder| { | 53 | |builder| { |
54 | insert_use_statement( | 54 | insert_use_statement( |
55 | &auto_import_assets.syntax_under_caret, | 55 | &auto_import_assets.syntax_under_caret, |
56 | &import, | 56 | &import.to_string(), |
57 | ctx, | 57 | ctx, |
58 | builder.text_edit_builder(), | 58 | builder.text_edit_builder(), |
59 | ); | 59 | ); |
diff --git a/crates/assists/src/handlers/expand_glob_import.rs b/crates/assists/src/handlers/expand_glob_import.rs index f690ec343..81d0af2f3 100644 --- a/crates/assists/src/handlers/expand_glob_import.rs +++ b/crates/assists/src/handlers/expand_glob_import.rs | |||
@@ -1,3 +1,4 @@ | |||
1 | use either::Either; | ||
1 | use hir::{AssocItem, MacroDef, ModuleDef, Name, PathResolution, ScopeDef, SemanticsScope}; | 2 | use hir::{AssocItem, MacroDef, ModuleDef, Name, PathResolution, ScopeDef, SemanticsScope}; |
2 | use ide_db::{ | 3 | use ide_db::{ |
3 | defs::{classify_name_ref, Definition, NameRefClass}, | 4 | defs::{classify_name_ref, Definition, NameRefClass}, |
@@ -10,8 +11,6 @@ use crate::{ | |||
10 | AssistId, AssistKind, | 11 | AssistId, AssistKind, |
11 | }; | 12 | }; |
12 | 13 | ||
13 | use either::Either; | ||
14 | |||
15 | // Assist: expand_glob_import | 14 | // Assist: expand_glob_import |
16 | // | 15 | // |
17 | // Expands glob imports. | 16 | // Expands glob imports. |
@@ -40,11 +39,15 @@ use either::Either; | |||
40 | pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 39 | pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
41 | let star = ctx.find_token_at_offset(T![*])?; | 40 | let star = ctx.find_token_at_offset(T![*])?; |
42 | let mod_path = find_mod_path(&star)?; | 41 | let mod_path = find_mod_path(&star)?; |
42 | let module = match ctx.sema.resolve_path(&mod_path)? { | ||
43 | PathResolution::Def(ModuleDef::Module(it)) => it, | ||
44 | _ => return None, | ||
45 | }; | ||
43 | 46 | ||
44 | let source_file = ctx.source_file(); | 47 | let source_file = ctx.source_file(); |
45 | let scope = ctx.sema.scope_at_offset(source_file.syntax(), ctx.offset()); | 48 | let scope = ctx.sema.scope_at_offset(source_file.syntax(), ctx.offset()); |
46 | 49 | ||
47 | let defs_in_mod = find_defs_in_mod(ctx, scope, &mod_path)?; | 50 | let defs_in_mod = find_defs_in_mod(ctx, scope, module)?; |
48 | let name_refs_in_source_file = | 51 | let name_refs_in_source_file = |
49 | source_file.syntax().descendants().filter_map(ast::NameRef::cast).collect(); | 52 | source_file.syntax().descendants().filter_map(ast::NameRef::cast).collect(); |
50 | let used_names = find_used_names(ctx, defs_in_mod, name_refs_in_source_file); | 53 | let used_names = find_used_names(ctx, defs_in_mod, name_refs_in_source_file); |
@@ -82,17 +85,8 @@ impl Def { | |||
82 | fn find_defs_in_mod( | 85 | fn find_defs_in_mod( |
83 | ctx: &AssistContext, | 86 | ctx: &AssistContext, |
84 | from: SemanticsScope<'_>, | 87 | from: SemanticsScope<'_>, |
85 | path: &ast::Path, | 88 | module: hir::Module, |
86 | ) -> Option<Vec<Def>> { | 89 | ) -> Option<Vec<Def>> { |
87 | let hir_path = ctx.sema.lower_path(&path)?; | ||
88 | let module = if let Some(PathResolution::Def(ModuleDef::Module(module))) = | ||
89 | from.resolve_hir_path_qualifier(&hir_path) | ||
90 | { | ||
91 | module | ||
92 | } else { | ||
93 | return None; | ||
94 | }; | ||
95 | |||
96 | let module_scope = module.scope(ctx.db(), from.module()); | 90 | let module_scope = module.scope(ctx.db(), from.module()); |
97 | 91 | ||
98 | let mut defs = vec![]; | 92 | let mut defs = vec![]; |
diff --git a/crates/assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/assists/src/handlers/extract_struct_from_enum_variant.rs index 4bcdae7ba..d62e06b4a 100644 --- a/crates/assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/assists/src/handlers/extract_struct_from_enum_variant.rs | |||
@@ -106,7 +106,12 @@ fn insert_import( | |||
106 | if let Some(mut mod_path) = mod_path { | 106 | if let Some(mut mod_path) = mod_path { |
107 | mod_path.segments.pop(); | 107 | mod_path.segments.pop(); |
108 | mod_path.segments.push(variant_hir_name.clone()); | 108 | mod_path.segments.push(variant_hir_name.clone()); |
109 | insert_use_statement(path.syntax(), &mod_path, ctx, builder.text_edit_builder()); | 109 | insert_use_statement( |
110 | path.syntax(), | ||
111 | &mod_path.to_string(), | ||
112 | ctx, | ||
113 | builder.text_edit_builder(), | ||
114 | ); | ||
110 | } | 115 | } |
111 | Some(()) | 116 | Some(()) |
112 | } | 117 | } |
diff --git a/crates/assists/src/handlers/replace_qualified_name_with_use.rs b/crates/assists/src/handlers/replace_qualified_name_with_use.rs index 011bf1106..470e5f8ff 100644 --- a/crates/assists/src/handlers/replace_qualified_name_with_use.rs +++ b/crates/assists/src/handlers/replace_qualified_name_with_use.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | use hir; | 1 | use syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SyntaxNode, TextRange}; |
2 | use syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SmolStr, SyntaxNode}; | 2 | use test_utils::mark; |
3 | 3 | ||
4 | use crate::{ | 4 | use crate::{ |
5 | utils::{find_insert_use_container, insert_use_statement}, | 5 | utils::{find_insert_use_container, insert_use_statement}, |
@@ -28,12 +28,19 @@ pub(crate) fn replace_qualified_name_with_use( | |||
28 | if path.syntax().ancestors().find_map(ast::Use::cast).is_some() { | 28 | if path.syntax().ancestors().find_map(ast::Use::cast).is_some() { |
29 | return None; | 29 | return None; |
30 | } | 30 | } |
31 | 31 | if path.qualifier().is_none() { | |
32 | let hir_path = ctx.sema.lower_path(&path)?; | 32 | mark::hit!(dont_import_trivial_paths); |
33 | let segments = collect_hir_path_segments(&hir_path)?; | ||
34 | if segments.len() < 2 { | ||
35 | return None; | 33 | return None; |
36 | } | 34 | } |
35 | let path_to_import = path.to_string().clone(); | ||
36 | let path_to_import = match path.segment()?.generic_arg_list() { | ||
37 | Some(generic_args) => { | ||
38 | let generic_args_start = | ||
39 | generic_args.syntax().text_range().start() - path.syntax().text_range().start(); | ||
40 | &path_to_import[TextRange::up_to(generic_args_start)] | ||
41 | } | ||
42 | None => path_to_import.as_str(), | ||
43 | }; | ||
37 | 44 | ||
38 | let target = path.syntax().text_range(); | 45 | let target = path.syntax().text_range(); |
39 | acc.add( | 46 | acc.add( |
@@ -41,12 +48,16 @@ pub(crate) fn replace_qualified_name_with_use( | |||
41 | "Replace qualified path with use", | 48 | "Replace qualified path with use", |
42 | target, | 49 | target, |
43 | |builder| { | 50 | |builder| { |
44 | let path_to_import = hir_path.mod_path().clone(); | ||
45 | let container = match find_insert_use_container(path.syntax(), ctx) { | 51 | let container = match find_insert_use_container(path.syntax(), ctx) { |
46 | Some(c) => c, | 52 | Some(c) => c, |
47 | None => return, | 53 | None => return, |
48 | }; | 54 | }; |
49 | insert_use_statement(path.syntax(), &path_to_import, ctx, builder.text_edit_builder()); | 55 | insert_use_statement( |
56 | path.syntax(), | ||
57 | &path_to_import.to_string(), | ||
58 | ctx, | ||
59 | builder.text_edit_builder(), | ||
60 | ); | ||
50 | 61 | ||
51 | // Now that we've brought the name into scope, re-qualify all paths that could be | 62 | // Now that we've brought the name into scope, re-qualify all paths that could be |
52 | // affected (that is, all paths inside the node we added the `use` to). | 63 | // affected (that is, all paths inside the node we added the `use` to). |
@@ -58,26 +69,6 @@ pub(crate) fn replace_qualified_name_with_use( | |||
58 | ) | 69 | ) |
59 | } | 70 | } |
60 | 71 | ||
61 | fn collect_hir_path_segments(path: &hir::Path) -> Option<Vec<SmolStr>> { | ||
62 | let mut ps = Vec::<SmolStr>::with_capacity(10); | ||
63 | match path.kind() { | ||
64 | hir::PathKind::Abs => ps.push("".into()), | ||
65 | hir::PathKind::Crate => ps.push("crate".into()), | ||
66 | hir::PathKind::Plain => {} | ||
67 | hir::PathKind::Super(0) => ps.push("self".into()), | ||
68 | hir::PathKind::Super(lvl) => { | ||
69 | let mut chain = "super".to_string(); | ||
70 | for _ in 0..*lvl { | ||
71 | chain += "::super"; | ||
72 | } | ||
73 | ps.push(chain.into()); | ||
74 | } | ||
75 | hir::PathKind::DollarCrate(_) => return None, | ||
76 | } | ||
77 | ps.extend(path.segments().iter().map(|it| it.name.to_string().into())); | ||
78 | Some(ps) | ||
79 | } | ||
80 | |||
81 | /// Adds replacements to `re` that shorten `path` in all descendants of `node`. | 72 | /// Adds replacements to `re` that shorten `path` in all descendants of `node`. |
82 | fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path: ast::Path) { | 73 | fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path: ast::Path) { |
83 | for child in node.children() { | 74 | for child in node.children() { |
@@ -467,7 +458,8 @@ impl Debug for Foo { | |||
467 | } | 458 | } |
468 | 459 | ||
469 | #[test] | 460 | #[test] |
470 | fn test_replace_not_applicable_one_segment() { | 461 | fn dont_import_trivial_paths() { |
462 | mark::check!(dont_import_trivial_paths); | ||
471 | check_assist_not_applicable( | 463 | check_assist_not_applicable( |
472 | replace_qualified_name_with_use, | 464 | replace_qualified_name_with_use, |
473 | r" | 465 | r" |
diff --git a/crates/assists/src/lib.rs b/crates/assists/src/lib.rs index ae90d68a3..c589b08dc 100644 --- a/crates/assists/src/lib.rs +++ b/crates/assists/src/lib.rs | |||
@@ -66,13 +66,13 @@ pub struct GroupLabel(pub String); | |||
66 | 66 | ||
67 | #[derive(Debug, Clone)] | 67 | #[derive(Debug, Clone)] |
68 | pub struct Assist { | 68 | pub struct Assist { |
69 | id: AssistId, | 69 | pub id: AssistId, |
70 | /// Short description of the assist, as shown in the UI. | 70 | /// Short description of the assist, as shown in the UI. |
71 | label: String, | 71 | label: String, |
72 | group: Option<GroupLabel>, | 72 | pub group: Option<GroupLabel>, |
73 | /// Target ranges are used to sort assists: the smaller the target range, | 73 | /// Target ranges are used to sort assists: the smaller the target range, |
74 | /// the more specific assist is, and so it should be sorted first. | 74 | /// the more specific assist is, and so it should be sorted first. |
75 | target: TextRange, | 75 | pub target: TextRange, |
76 | } | 76 | } |
77 | 77 | ||
78 | #[derive(Debug, Clone)] | 78 | #[derive(Debug, Clone)] |
@@ -82,6 +82,11 @@ pub struct ResolvedAssist { | |||
82 | } | 82 | } |
83 | 83 | ||
84 | impl Assist { | 84 | impl Assist { |
85 | fn new(id: AssistId, label: String, group: Option<GroupLabel>, target: TextRange) -> Assist { | ||
86 | assert!(label.starts_with(char::is_uppercase)); | ||
87 | Assist { id, label, group, target } | ||
88 | } | ||
89 | |||
85 | /// Return all the assists applicable at the given position. | 90 | /// Return all the assists applicable at the given position. |
86 | /// | 91 | /// |
87 | /// Assists are returned in the "unresolved" state, that is only labels are | 92 | /// Assists are returned in the "unresolved" state, that is only labels are |
@@ -114,30 +119,8 @@ impl Assist { | |||
114 | acc.finish_resolved() | 119 | acc.finish_resolved() |
115 | } | 120 | } |
116 | 121 | ||
117 | pub(crate) fn new( | 122 | pub fn label(&self) -> &str { |
118 | id: AssistId, | 123 | self.label.as_str() |
119 | label: String, | ||
120 | group: Option<GroupLabel>, | ||
121 | target: TextRange, | ||
122 | ) -> Assist { | ||
123 | assert!(label.starts_with(|c: char| c.is_uppercase())); | ||
124 | Assist { id, label, group, target } | ||
125 | } | ||
126 | |||
127 | pub fn id(&self) -> AssistId { | ||
128 | self.id | ||
129 | } | ||
130 | |||
131 | pub fn label(&self) -> String { | ||
132 | self.label.clone() | ||
133 | } | ||
134 | |||
135 | pub fn group(&self) -> Option<GroupLabel> { | ||
136 | self.group.clone() | ||
137 | } | ||
138 | |||
139 | pub fn target(&self) -> TextRange { | ||
140 | self.target | ||
141 | } | 124 | } |
142 | } | 125 | } |
143 | 126 | ||
diff --git a/crates/assists/src/tests/generated.rs b/crates/assists/src/tests/generated.rs index d16e6fb0a..173567003 100644 --- a/crates/assists/src/tests/generated.rs +++ b/crates/assists/src/tests/generated.rs | |||
@@ -82,8 +82,8 @@ trait Trait { | |||
82 | impl Trait for () { | 82 | impl Trait for () { |
83 | Type X = (); | 83 | Type X = (); |
84 | fn foo(&self) {} | 84 | fn foo(&self) {} |
85 | $0fn bar(&self) {} | ||
86 | 85 | ||
86 | $0fn bar(&self) {} | ||
87 | } | 87 | } |
88 | "#####, | 88 | "#####, |
89 | ) | 89 | ) |
@@ -115,7 +115,6 @@ impl Trait<u32> for () { | |||
115 | fn foo(&self) -> u32 { | 115 | fn foo(&self) -> u32 { |
116 | ${0:todo!()} | 116 | ${0:todo!()} |
117 | } | 117 | } |
118 | |||
119 | } | 118 | } |
120 | "#####, | 119 | "#####, |
121 | ) | 120 | ) |
diff --git a/crates/assists/src/utils/insert_use.rs b/crates/assists/src/utils/insert_use.rs index 50a62ee82..49096a67c 100644 --- a/crates/assists/src/utils/insert_use.rs +++ b/crates/assists/src/utils/insert_use.rs | |||
@@ -5,7 +5,6 @@ | |||
5 | use std::iter::successors; | 5 | use std::iter::successors; |
6 | 6 | ||
7 | use either::Either; | 7 | use either::Either; |
8 | use hir::{self, ModPath}; | ||
9 | use syntax::{ | 8 | use syntax::{ |
10 | ast::{self, NameOwner, VisibilityOwner}, | 9 | ast::{self, NameOwner, VisibilityOwner}, |
11 | AstNode, AstToken, Direction, SmolStr, | 10 | AstNode, AstToken, Direction, SmolStr, |
@@ -35,11 +34,11 @@ pub(crate) fn find_insert_use_container( | |||
35 | pub(crate) fn insert_use_statement( | 34 | pub(crate) fn insert_use_statement( |
36 | // Ideally the position of the cursor, used to | 35 | // Ideally the position of the cursor, used to |
37 | position: &SyntaxNode, | 36 | position: &SyntaxNode, |
38 | path_to_import: &ModPath, | 37 | path_to_import: &str, |
39 | ctx: &AssistContext, | 38 | ctx: &AssistContext, |
40 | builder: &mut TextEditBuilder, | 39 | builder: &mut TextEditBuilder, |
41 | ) { | 40 | ) { |
42 | let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::<Vec<_>>(); | 41 | let target = path_to_import.split("::").map(SmolStr::new).collect::<Vec<_>>(); |
43 | let container = find_insert_use_container(position, ctx); | 42 | let container = find_insert_use_container(position, ctx); |
44 | 43 | ||
45 | if let Some(container) = container { | 44 | if let Some(container) = container { |
diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs index 5dc3ae3b1..c442654dd 100644 --- a/crates/hir/src/code_model.rs +++ b/crates/hir/src/code_model.rs | |||
@@ -12,6 +12,7 @@ use hir_def::{ | |||
12 | docs::Documentation, | 12 | docs::Documentation, |
13 | expr::{BindingAnnotation, Pat, PatId}, | 13 | expr::{BindingAnnotation, Pat, PatId}, |
14 | import_map, | 14 | import_map, |
15 | path::ModPath, | ||
15 | per_ns::PerNs, | 16 | per_ns::PerNs, |
16 | resolver::{HasResolver, Resolver}, | 17 | resolver::{HasResolver, Resolver}, |
17 | src::HasSource as _, | 18 | src::HasSource as _, |
@@ -344,11 +345,7 @@ impl Module { | |||
344 | 345 | ||
345 | /// Finds a path that can be used to refer to the given item from within | 346 | /// Finds a path that can be used to refer to the given item from within |
346 | /// this module, if possible. | 347 | /// this module, if possible. |
347 | pub fn find_use_path( | 348 | pub fn find_use_path(self, db: &dyn DefDatabase, item: impl Into<ItemInNs>) -> Option<ModPath> { |
348 | self, | ||
349 | db: &dyn DefDatabase, | ||
350 | item: impl Into<ItemInNs>, | ||
351 | ) -> Option<hir_def::path::ModPath> { | ||
352 | hir_def::find_path::find_path(db, item.into(), self.into()) | 349 | hir_def::find_path::find_path(db, item.into(), self.into()) |
353 | } | 350 | } |
354 | } | 351 | } |
@@ -1126,7 +1123,7 @@ impl ImplDef { | |||
1126 | .value | 1123 | .value |
1127 | .attrs() | 1124 | .attrs() |
1128 | .filter_map(|it| { | 1125 | .filter_map(|it| { |
1129 | let path = hir_def::path::ModPath::from_src(it.path()?, &hygenic)?; | 1126 | let path = ModPath::from_src(it.path()?, &hygenic)?; |
1130 | if path.as_ident()?.to_string() == "derive" { | 1127 | if path.as_ident()?.to_string() == "derive" { |
1131 | Some(it) | 1128 | Some(it) |
1132 | } else { | 1129 | } else { |
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 4ae2bd085..8961ba8fd 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -48,7 +48,7 @@ pub use hir_def::{ | |||
48 | builtin_type::BuiltinType, | 48 | builtin_type::BuiltinType, |
49 | docs::Documentation, | 49 | docs::Documentation, |
50 | nameres::ModuleSource, | 50 | nameres::ModuleSource, |
51 | path::{ModPath, Path, PathKind}, | 51 | path::ModPath, |
52 | type_ref::{Mutability, TypeRef}, | 52 | type_ref::{Mutability, TypeRef}, |
53 | }; | 53 | }; |
54 | pub use hir_expand::{ | 54 | pub use hir_expand::{ |
@@ -60,4 +60,7 @@ pub use hir_ty::display::HirDisplay; | |||
60 | // These are negative re-exports: pub using these names is forbidden, they | 60 | // These are negative re-exports: pub using these names is forbidden, they |
61 | // should remain private to hir internals. | 61 | // should remain private to hir internals. |
62 | #[allow(unused)] | 62 | #[allow(unused)] |
63 | use hir_expand::hygiene::Hygiene; | 63 | use { |
64 | hir_def::path::{Path, PathKind}, | ||
65 | hir_expand::hygiene::Hygiene, | ||
66 | }; | ||
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index d8beac98a..c693176fa 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs | |||
@@ -6,7 +6,7 @@ use std::{cell::RefCell, fmt, iter::successors}; | |||
6 | 6 | ||
7 | use base_db::{FileId, FileRange}; | 7 | use base_db::{FileId, FileRange}; |
8 | use hir_def::{ | 8 | use hir_def::{ |
9 | resolver::{self, HasResolver, Resolver}, | 9 | resolver::{self, HasResolver, Resolver, TypeNs}, |
10 | AsMacroCall, FunctionId, TraitId, VariantId, | 10 | AsMacroCall, FunctionId, TraitId, VariantId, |
11 | }; | 11 | }; |
12 | use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo}; | 12 | use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo}; |
@@ -22,12 +22,11 @@ use crate::{ | |||
22 | db::HirDatabase, | 22 | db::HirDatabase, |
23 | diagnostics::Diagnostic, | 23 | diagnostics::Diagnostic, |
24 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, | 24 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, |
25 | source_analyzer::{resolve_hir_path, resolve_hir_path_qualifier, SourceAnalyzer}, | 25 | source_analyzer::{resolve_hir_path, SourceAnalyzer}, |
26 | AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, | 26 | AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, |
27 | Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, TypeRef, | 27 | Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, TypeRef, |
28 | VariantDef, | 28 | VariantDef, |
29 | }; | 29 | }; |
30 | use resolver::TypeNs; | ||
31 | 30 | ||
32 | #[derive(Debug, Clone, PartialEq, Eq)] | 31 | #[derive(Debug, Clone, PartialEq, Eq)] |
33 | pub enum PathResolution { | 32 | pub enum PathResolution { |
@@ -112,14 +111,13 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
112 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | 111 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { |
113 | self.imp.expand(macro_call) | 112 | self.imp.expand(macro_call) |
114 | } | 113 | } |
115 | 114 | pub fn speculative_expand( | |
116 | pub fn expand_hypothetical( | ||
117 | &self, | 115 | &self, |
118 | actual_macro_call: &ast::MacroCall, | 116 | actual_macro_call: &ast::MacroCall, |
119 | hypothetical_args: &ast::TokenTree, | 117 | hypothetical_args: &ast::TokenTree, |
120 | token_to_map: SyntaxToken, | 118 | token_to_map: SyntaxToken, |
121 | ) -> Option<(SyntaxNode, SyntaxToken)> { | 119 | ) -> Option<(SyntaxNode, SyntaxToken)> { |
122 | self.imp.expand_hypothetical(actual_macro_call, hypothetical_args, token_to_map) | 120 | self.imp.speculative_expand(actual_macro_call, hypothetical_args, token_to_map) |
123 | } | 121 | } |
124 | 122 | ||
125 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | 123 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { |
@@ -229,10 +227,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
229 | self.imp.resolve_variant(record_lit).map(VariantDef::from) | 227 | self.imp.resolve_variant(record_lit).map(VariantDef::from) |
230 | } | 228 | } |
231 | 229 | ||
232 | pub fn lower_path(&self, path: &ast::Path) -> Option<Path> { | ||
233 | self.imp.lower_path(path) | ||
234 | } | ||
235 | |||
236 | pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { | 230 | pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { |
237 | self.imp.resolve_bind_pat_to_const(pat) | 231 | self.imp.resolve_bind_pat_to_const(pat) |
238 | } | 232 | } |
@@ -311,7 +305,7 @@ impl<'db> SemanticsImpl<'db> { | |||
311 | Some(node) | 305 | Some(node) |
312 | } | 306 | } |
313 | 307 | ||
314 | fn expand_hypothetical( | 308 | fn speculative_expand( |
315 | &self, | 309 | &self, |
316 | actual_macro_call: &ast::MacroCall, | 310 | actual_macro_call: &ast::MacroCall, |
317 | hypothetical_args: &ast::TokenTree, | 311 | hypothetical_args: &ast::TokenTree, |
@@ -468,11 +462,6 @@ impl<'db> SemanticsImpl<'db> { | |||
468 | self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit) | 462 | self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit) |
469 | } | 463 | } |
470 | 464 | ||
471 | fn lower_path(&self, path: &ast::Path) -> Option<Path> { | ||
472 | let src = self.find_file(path.syntax().clone()); | ||
473 | Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into())) | ||
474 | } | ||
475 | |||
476 | fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { | 465 | fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { |
477 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) | 466 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) |
478 | } | 467 | } |
@@ -756,31 +745,10 @@ impl<'a> SemanticsScope<'a> { | |||
756 | 745 | ||
757 | /// Resolve a path as-if it was written at the given scope. This is | 746 | /// Resolve a path as-if it was written at the given scope. This is |
758 | /// necessary a heuristic, as it doesn't take hygiene into account. | 747 | /// necessary a heuristic, as it doesn't take hygiene into account. |
759 | pub fn resolve_hypothetical(&self, path: &ast::Path) -> Option<PathResolution> { | 748 | pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> { |
760 | let hygiene = Hygiene::new(self.db.upcast(), self.file_id); | 749 | let hygiene = Hygiene::new(self.db.upcast(), self.file_id); |
761 | let path = Path::from_src(path.clone(), &hygiene)?; | 750 | let path = Path::from_src(path.clone(), &hygiene)?; |
762 | self.resolve_hir_path(&path) | 751 | resolve_hir_path(self.db, &self.resolver, &path) |
763 | } | ||
764 | |||
765 | pub fn resolve_hir_path(&self, path: &Path) -> Option<PathResolution> { | ||
766 | resolve_hir_path(self.db, &self.resolver, path) | ||
767 | } | ||
768 | |||
769 | /// Resolves a path where we know it is a qualifier of another path. | ||
770 | /// | ||
771 | /// For example, if we have: | ||
772 | /// ``` | ||
773 | /// mod my { | ||
774 | /// pub mod foo { | ||
775 | /// struct Bar; | ||
776 | /// } | ||
777 | /// | ||
778 | /// pub fn foo() {} | ||
779 | /// } | ||
780 | /// ``` | ||
781 | /// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. | ||
782 | pub fn resolve_hir_path_qualifier(&self, path: &Path) -> Option<PathResolution> { | ||
783 | resolve_hir_path_qualifier(self.db, &self.resolver, path) | ||
784 | } | 752 | } |
785 | } | 753 | } |
786 | 754 | ||
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 8750584f9..1d13c4f1d 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs | |||
@@ -13,6 +13,7 @@ use hir_def::{ | |||
13 | Body, BodySourceMap, | 13 | Body, BodySourceMap, |
14 | }, | 14 | }, |
15 | expr::{ExprId, Pat, PatId}, | 15 | expr::{ExprId, Pat, PatId}, |
16 | path::{ModPath, Path, PathKind}, | ||
16 | resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, | 17 | resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, |
17 | AsMacroCall, DefWithBodyId, FieldId, FunctionId, LocalFieldId, VariantId, | 18 | AsMacroCall, DefWithBodyId, FieldId, FunctionId, LocalFieldId, VariantId, |
18 | }; | 19 | }; |
@@ -28,8 +29,7 @@ use syntax::{ | |||
28 | 29 | ||
29 | use crate::{ | 30 | use crate::{ |
30 | db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Field, Function, Local, | 31 | db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Field, Function, Local, |
31 | MacroDef, ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias, | 32 | MacroDef, ModuleDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, |
32 | TypeParam, | ||
33 | }; | 33 | }; |
34 | use base_db::CrateId; | 34 | use base_db::CrateId; |
35 | 35 | ||
@@ -508,7 +508,7 @@ pub(crate) fn resolve_hir_path( | |||
508 | /// } | 508 | /// } |
509 | /// ``` | 509 | /// ``` |
510 | /// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. | 510 | /// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. |
511 | pub(crate) fn resolve_hir_path_qualifier( | 511 | fn resolve_hir_path_qualifier( |
512 | db: &dyn HirDatabase, | 512 | db: &dyn HirDatabase, |
513 | resolver: &Resolver, | 513 | resolver: &Resolver, |
514 | path: &Path, | 514 | path: &Path, |
diff --git a/crates/hir_def/src/path.rs b/crates/hir_def/src/path.rs index 74d26f08b..99395667d 100644 --- a/crates/hir_def/src/path.rs +++ b/crates/hir_def/src/path.rs | |||
@@ -154,12 +154,6 @@ pub enum GenericArg { | |||
154 | 154 | ||
155 | impl Path { | 155 | impl Path { |
156 | /// Converts an `ast::Path` to `Path`. Works with use trees. | 156 | /// Converts an `ast::Path` to `Path`. Works with use trees. |
157 | #[deprecated = "Doesn't handle hygiene, don't add new calls, remove old ones"] | ||
158 | pub fn from_ast(path: ast::Path) -> Option<Path> { | ||
159 | lower::lower_path(path, &Hygiene::new_unhygienic()) | ||
160 | } | ||
161 | |||
162 | /// Converts an `ast::Path` to `Path`. Works with use trees. | ||
163 | /// It correctly handles `$crate` based path from macro call. | 157 | /// It correctly handles `$crate` based path from macro call. |
164 | pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<Path> { | 158 | pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<Path> { |
165 | lower::lower_path(path, hygiene) | 159 | lower::lower_path(path, hygiene) |
diff --git a/crates/hir_ty/Cargo.toml b/crates/hir_ty/Cargo.toml index 83b5013a9..a319b0ce8 100644 --- a/crates/hir_ty/Cargo.toml +++ b/crates/hir_ty/Cargo.toml | |||
@@ -16,9 +16,9 @@ ena = "0.14.0" | |||
16 | log = "0.4.8" | 16 | log = "0.4.8" |
17 | rustc-hash = "1.1.0" | 17 | rustc-hash = "1.1.0" |
18 | scoped-tls = "1" | 18 | scoped-tls = "1" |
19 | chalk-solve = { version = "0.21.0" } | 19 | chalk-solve = { version = "0.23.0" } |
20 | chalk-ir = { version = "0.21.0" } | 20 | chalk-ir = { version = "0.23.0" } |
21 | chalk-recursive = { version = "0.21.0" } | 21 | chalk-recursive = { version = "0.23.0" } |
22 | 22 | ||
23 | stdx = { path = "../stdx" } | 23 | stdx = { path = "../stdx" } |
24 | hir_def = { path = "../hir_def" } | 24 | hir_def = { path = "../hir_def" } |
diff --git a/crates/hir_ty/src/diagnostics/expr.rs b/crates/hir_ty/src/diagnostics/expr.rs index fb76e2e4e..278a4b947 100644 --- a/crates/hir_ty/src/diagnostics/expr.rs +++ b/crates/hir_ty/src/diagnostics/expr.rs | |||
@@ -223,10 +223,10 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
223 | db.body_with_source_map(self.owner.into()); | 223 | db.body_with_source_map(self.owner.into()); |
224 | 224 | ||
225 | let match_expr_ty = match infer.type_of_expr.get(match_expr) { | 225 | let match_expr_ty = match infer.type_of_expr.get(match_expr) { |
226 | Some(ty) => ty, | ||
227 | // If we can't resolve the type of the match expression | 226 | // If we can't resolve the type of the match expression |
228 | // we cannot perform exhaustiveness checks. | 227 | // we cannot perform exhaustiveness checks. |
229 | None => return, | 228 | None | Some(Ty::Unknown) => return, |
229 | Some(ty) => ty, | ||
230 | }; | 230 | }; |
231 | 231 | ||
232 | let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db }; | 232 | let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db }; |
diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs index 7f007f1d6..5bd03f2ac 100644 --- a/crates/hir_ty/src/diagnostics/match_check.rs +++ b/crates/hir_ty/src/diagnostics/match_check.rs | |||
@@ -1335,6 +1335,23 @@ fn panic(a: Category, b: Category) { | |||
1335 | ); | 1335 | ); |
1336 | } | 1336 | } |
1337 | 1337 | ||
1338 | #[test] | ||
1339 | fn unknown_type() { | ||
1340 | check_diagnostics( | ||
1341 | r#" | ||
1342 | enum Option<T> { Some(T), None } | ||
1343 | |||
1344 | fn main() { | ||
1345 | // `Never` is deliberately not defined so that it's an uninferred type. | ||
1346 | match Option::<Never>::None { | ||
1347 | None => (), | ||
1348 | Some(never) => match never {}, | ||
1349 | } | ||
1350 | } | ||
1351 | "#, | ||
1352 | ); | ||
1353 | } | ||
1354 | |||
1338 | mod false_negatives { | 1355 | mod false_negatives { |
1339 | //! The implementation of match checking here is a work in progress. As we roll this out, we | 1356 | //! The implementation of match checking here is a work in progress. As we roll this out, we |
1340 | //! prefer false negatives to false positives (ideally there would be no false positives). This | 1357 | //! prefer false negatives to false positives (ideally there would be no false positives). This |
diff --git a/crates/hir_ty/src/traits.rs b/crates/hir_ty/src/traits.rs index 255323717..14cd3a2b4 100644 --- a/crates/hir_ty/src/traits.rs +++ b/crates/hir_ty/src/traits.rs | |||
@@ -3,7 +3,7 @@ use std::sync::Arc; | |||
3 | 3 | ||
4 | use base_db::CrateId; | 4 | use base_db::CrateId; |
5 | use chalk_ir::cast::Cast; | 5 | use chalk_ir::cast::Cast; |
6 | use chalk_solve::Solver; | 6 | use chalk_solve::{logging_db::LoggingRustIrDatabase, Solver}; |
7 | use hir_def::{lang_item::LangItemTarget, TraitId}; | 7 | use hir_def::{lang_item::LangItemTarget, TraitId}; |
8 | 8 | ||
9 | use crate::{db::HirDatabase, DebruijnIndex, Substs}; | 9 | use crate::{db::HirDatabase, DebruijnIndex, Substs}; |
@@ -166,16 +166,25 @@ fn solve( | |||
166 | } | 166 | } |
167 | remaining > 0 | 167 | remaining > 0 |
168 | }; | 168 | }; |
169 | |||
169 | let mut solve = || { | 170 | let mut solve = || { |
170 | let solution = solver.solve_limited(&context, goal, should_continue); | 171 | if is_chalk_print() { |
171 | log::debug!("solve({:?}) => {:?}", goal, solution); | 172 | let logging_db = LoggingRustIrDatabase::new(context); |
172 | solution | 173 | let solution = solver.solve_limited(&logging_db, goal, &should_continue); |
174 | log::debug!("chalk program:\n{}", logging_db); | ||
175 | solution | ||
176 | } else { | ||
177 | solver.solve_limited(&context, goal, &should_continue) | ||
178 | } | ||
173 | }; | 179 | }; |
180 | |||
174 | // don't set the TLS for Chalk unless Chalk debugging is active, to make | 181 | // don't set the TLS for Chalk unless Chalk debugging is active, to make |
175 | // extra sure we only use it for debugging | 182 | // extra sure we only use it for debugging |
176 | let solution = | 183 | let solution = |
177 | if is_chalk_debug() { chalk::tls::set_current_program(db, solve) } else { solve() }; | 184 | if is_chalk_debug() { chalk::tls::set_current_program(db, solve) } else { solve() }; |
178 | 185 | ||
186 | log::debug!("solve({:?}) => {:?}", goal, solution); | ||
187 | |||
179 | solution | 188 | solution |
180 | } | 189 | } |
181 | 190 | ||
@@ -183,6 +192,10 @@ fn is_chalk_debug() -> bool { | |||
183 | std::env::var("CHALK_DEBUG").is_ok() | 192 | std::env::var("CHALK_DEBUG").is_ok() |
184 | } | 193 | } |
185 | 194 | ||
195 | fn is_chalk_print() -> bool { | ||
196 | std::env::var("CHALK_PRINT").is_ok() | ||
197 | } | ||
198 | |||
186 | fn solution_from_chalk( | 199 | fn solution_from_chalk( |
187 | db: &dyn HirDatabase, | 200 | db: &dyn HirDatabase, |
188 | solution: chalk_solve::Solution<Interner>, | 201 | solution: chalk_solve::Solution<Interner>, |
diff --git a/crates/hir_ty/src/traits/chalk.rs b/crates/hir_ty/src/traits/chalk.rs index b33653417..17c83b6a4 100644 --- a/crates/hir_ty/src/traits/chalk.rs +++ b/crates/hir_ty/src/traits/chalk.rs | |||
@@ -240,20 +240,23 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { | |||
240 | Substs::empty().to_chalk(self.db) | 240 | Substs::empty().to_chalk(self.db) |
241 | } | 241 | } |
242 | 242 | ||
243 | fn trait_name(&self, _trait_id: chalk_ir::TraitId<Interner>) -> String { | 243 | fn trait_name(&self, trait_id: chalk_ir::TraitId<Interner>) -> String { |
244 | unimplemented!() | 244 | let id = from_chalk(self.db, trait_id); |
245 | self.db.trait_data(id).name.to_string() | ||
245 | } | 246 | } |
246 | fn adt_name(&self, _struct_id: chalk_ir::AdtId<Interner>) -> String { | 247 | // FIXME: lookup names |
247 | unimplemented!() | 248 | fn adt_name(&self, struct_id: chalk_ir::AdtId<Interner>) -> String { |
249 | let datum = self.db.struct_datum(self.krate, struct_id); | ||
250 | format!("{:?}", datum.name(&Interner)) | ||
248 | } | 251 | } |
249 | fn assoc_type_name(&self, _assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String { | 252 | fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String { |
250 | unimplemented!() | 253 | format!("Assoc_{}", assoc_ty_id.0) |
251 | } | 254 | } |
252 | fn opaque_type_name(&self, _opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String { | 255 | fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String { |
253 | unimplemented!() | 256 | format!("Opaque_{}", opaque_ty_id.0) |
254 | } | 257 | } |
255 | fn fn_def_name(&self, _fn_def_id: chalk_ir::FnDefId<Interner>) -> String { | 258 | fn fn_def_name(&self, fn_def_id: chalk_ir::FnDefId<Interner>) -> String { |
256 | unimplemented!() | 259 | format!("fn_{}", fn_def_id.0) |
257 | } | 260 | } |
258 | } | 261 | } |
259 | 262 | ||
diff --git a/crates/ide/src/completion/completion_context.rs b/crates/ide/src/completion/completion_context.rs index 3857dce67..85456a66f 100644 --- a/crates/ide/src/completion/completion_context.rs +++ b/crates/ide/src/completion/completion_context.rs | |||
@@ -185,7 +185,7 @@ impl<'a> CompletionContext<'a> { | |||
185 | }; | 185 | }; |
186 | if let (Some(actual_expansion), Some(hypothetical_expansion)) = ( | 186 | if let (Some(actual_expansion), Some(hypothetical_expansion)) = ( |
187 | ctx.sema.expand(&actual_macro_call), | 187 | ctx.sema.expand(&actual_macro_call), |
188 | ctx.sema.expand_hypothetical( | 188 | ctx.sema.speculative_expand( |
189 | &actual_macro_call, | 189 | &actual_macro_call, |
190 | &hypothetical_args, | 190 | &hypothetical_args, |
191 | fake_ident_token, | 191 | fake_ident_token, |
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 002adf915..596bc872d 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs | |||
@@ -43,7 +43,7 @@ pub struct InlayHint { | |||
43 | // rust-analyzer shows additional information inline with the source code. | 43 | // rust-analyzer shows additional information inline with the source code. |
44 | // Editors usually render this using read-only virtual text snippets interspersed with code. | 44 | // Editors usually render this using read-only virtual text snippets interspersed with code. |
45 | // | 45 | // |
46 | // rust-analyzer shows hits for | 46 | // rust-analyzer shows hints for |
47 | // | 47 | // |
48 | // * types of local variables | 48 | // * types of local variables |
49 | // * names of function arguments | 49 | // * names of function arguments |
diff --git a/crates/proc_macro_api/src/lib.rs b/crates/proc_macro_api/src/lib.rs index 15db57eb2..d5e87cf7d 100644 --- a/crates/proc_macro_api/src/lib.rs +++ b/crates/proc_macro_api/src/lib.rs | |||
@@ -89,9 +89,8 @@ impl ProcMacroClient { | |||
89 | macros | 89 | macros |
90 | .into_iter() | 90 | .into_iter() |
91 | .filter_map(|(name, kind)| { | 91 | .filter_map(|(name, kind)| { |
92 | // FIXME: Support custom derive only for now. | ||
93 | match kind { | 92 | match kind { |
94 | ProcMacroKind::CustomDerive => { | 93 | ProcMacroKind::CustomDerive | ProcMacroKind::FuncLike => { |
95 | let name = SmolStr::new(&name); | 94 | let name = SmolStr::new(&name); |
96 | let expander: Arc<dyn tt::TokenExpander> = | 95 | let expander: Arc<dyn tt::TokenExpander> = |
97 | Arc::new(ProcMacroProcessExpander { | 96 | Arc::new(ProcMacroProcessExpander { |
@@ -101,7 +100,8 @@ impl ProcMacroClient { | |||
101 | }); | 100 | }); |
102 | Some((name, expander)) | 101 | Some((name, expander)) |
103 | } | 102 | } |
104 | _ => None, | 103 | // FIXME: Attribute macro are currently unsupported. |
104 | ProcMacroKind::Attr => None, | ||
105 | } | 105 | } |
106 | }) | 106 | }) |
107 | .collect() | 107 | .collect() |
diff --git a/crates/proc_macro_srv/Cargo.toml b/crates/proc_macro_srv/Cargo.toml index 7171f0808..a468b5560 100644 --- a/crates/proc_macro_srv/Cargo.toml +++ b/crates/proc_macro_srv/Cargo.toml | |||
@@ -21,7 +21,9 @@ test_utils = { path = "../test_utils" } | |||
21 | [dev-dependencies] | 21 | [dev-dependencies] |
22 | cargo_metadata = "0.11.1" | 22 | cargo_metadata = "0.11.1" |
23 | difference = "2.0.0" | 23 | difference = "2.0.0" |
24 | # used as proc macro test target | 24 | |
25 | # used as proc macro test targets | ||
25 | serde_derive = "1.0.106" | 26 | serde_derive = "1.0.106" |
27 | proc_macro_test = { path = "../proc_macro_test" } | ||
26 | 28 | ||
27 | toolchain = { path = "../toolchain" } | 29 | toolchain = { path = "../toolchain" } |
diff --git a/crates/proc_macro_srv/src/tests/mod.rs b/crates/proc_macro_srv/src/tests/mod.rs index 8e6f28abd..1a827cbd7 100644 --- a/crates/proc_macro_srv/src/tests/mod.rs +++ b/crates/proc_macro_srv/src/tests/mod.rs | |||
@@ -35,7 +35,7 @@ SUBTREE $ | |||
35 | 35 | ||
36 | #[test] | 36 | #[test] |
37 | fn test_derive_proc_macro_list() { | 37 | fn test_derive_proc_macro_list() { |
38 | let res = list("serde_derive", "1.0").join("\n"); | 38 | let res = list("serde_derive", "1").join("\n"); |
39 | 39 | ||
40 | assert_eq_text!( | 40 | assert_eq_text!( |
41 | &res, | 41 | &res, |
@@ -43,3 +43,16 @@ fn test_derive_proc_macro_list() { | |||
43 | Deserialize [CustomDerive]"# | 43 | Deserialize [CustomDerive]"# |
44 | ); | 44 | ); |
45 | } | 45 | } |
46 | |||
47 | /// Tests that we find and classify non-derive macros correctly. | ||
48 | #[test] | ||
49 | fn list_test_macros() { | ||
50 | let res = list("proc_macro_test", "0.0.0").join("\n"); | ||
51 | |||
52 | assert_eq_text!( | ||
53 | &res, | ||
54 | r#"function_like_macro [FuncLike] | ||
55 | attribute_macro [Attr] | ||
56 | DummyTrait [CustomDerive]"# | ||
57 | ); | ||
58 | } | ||
diff --git a/crates/proc_macro_srv/src/tests/utils.rs b/crates/proc_macro_srv/src/tests/utils.rs index 5828512d6..36942147d 100644 --- a/crates/proc_macro_srv/src/tests/utils.rs +++ b/crates/proc_macro_srv/src/tests/utils.rs | |||
@@ -13,7 +13,7 @@ mod fixtures { | |||
13 | // Use current project metadata to get the proc-macro dylib path | 13 | // Use current project metadata to get the proc-macro dylib path |
14 | pub fn dylib_path(crate_name: &str, version: &str) -> std::path::PathBuf { | 14 | pub fn dylib_path(crate_name: &str, version: &str) -> std::path::PathBuf { |
15 | let command = Command::new(toolchain::cargo()) | 15 | let command = Command::new(toolchain::cargo()) |
16 | .args(&["check", "--message-format", "json"]) | 16 | .args(&["check", "--tests", "--message-format", "json"]) |
17 | .output() | 17 | .output() |
18 | .unwrap() | 18 | .unwrap() |
19 | .stdout; | 19 | .stdout; |
diff --git a/crates/proc_macro_test/Cargo.toml b/crates/proc_macro_test/Cargo.toml new file mode 100644 index 000000000..7b0f64f31 --- /dev/null +++ b/crates/proc_macro_test/Cargo.toml | |||
@@ -0,0 +1,10 @@ | |||
1 | [package] | ||
2 | name = "proc_macro_test" | ||
3 | version = "0.0.0" | ||
4 | license = "MIT OR Apache-2.0" | ||
5 | authors = ["rust-analyzer developers"] | ||
6 | edition = "2018" | ||
7 | |||
8 | [lib] | ||
9 | doctest = false | ||
10 | proc-macro = true | ||
diff --git a/crates/proc_macro_test/src/lib.rs b/crates/proc_macro_test/src/lib.rs new file mode 100644 index 000000000..ec2a114a3 --- /dev/null +++ b/crates/proc_macro_test/src/lib.rs | |||
@@ -0,0 +1,18 @@ | |||
1 | //! Exports a few trivial procedural macros for testing. | ||
2 | |||
3 | use proc_macro::TokenStream; | ||
4 | |||
5 | #[proc_macro] | ||
6 | pub fn function_like_macro(args: TokenStream) -> TokenStream { | ||
7 | args | ||
8 | } | ||
9 | |||
10 | #[proc_macro_attribute] | ||
11 | pub fn attribute_macro(_args: TokenStream, item: TokenStream) -> TokenStream { | ||
12 | item | ||
13 | } | ||
14 | |||
15 | #[proc_macro_derive(DummyTrait)] | ||
16 | pub fn derive_macro(_item: TokenStream) -> TokenStream { | ||
17 | TokenStream::new() | ||
18 | } | ||
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 74f73655a..e05ffc768 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs | |||
@@ -859,10 +859,10 @@ pub(crate) fn handle_resolve_code_action( | |||
859 | .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()); | 859 | .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()); |
860 | 860 | ||
861 | let assists = snap.analysis.resolved_assists(&snap.config.assist, frange)?; | 861 | let assists = snap.analysis.resolved_assists(&snap.config.assist, frange)?; |
862 | let (id_string, index) = split_once(¶ms.id, ':').unwrap(); | 862 | let (id, index) = split_once(¶ms.id, ':').unwrap(); |
863 | let index = index.parse::<usize>().unwrap(); | 863 | let index = index.parse::<usize>().unwrap(); |
864 | let assist = &assists[index]; | 864 | let assist = &assists[index]; |
865 | assert!(assist.assist.id().0 == id_string); | 865 | assert!(assist.assist.id.0 == id); |
866 | Ok(to_proto::resolved_code_action(&snap, assist.clone())?.edit) | 866 | Ok(to_proto::resolved_code_action(&snap, assist.clone())?.edit) |
867 | } | 867 | } |
868 | 868 | ||
diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index 3976b6529..e1a28b1b4 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs | |||
@@ -237,8 +237,13 @@ pub enum Status { | |||
237 | Invalid, | 237 | Invalid, |
238 | } | 238 | } |
239 | 239 | ||
240 | #[derive(Deserialize, Serialize)] | ||
241 | pub struct StatusParams { | ||
242 | pub status: Status, | ||
243 | } | ||
244 | |||
240 | impl Notification for StatusNotification { | 245 | impl Notification for StatusNotification { |
241 | type Params = Status; | 246 | type Params = StatusParams; |
242 | const METHOD: &'static str = "rust-analyzer/status"; | 247 | const METHOD: &'static str = "rust-analyzer/status"; |
243 | } | 248 | } |
244 | 249 | ||
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index a2cfb4e0d..505505a77 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs | |||
@@ -13,6 +13,7 @@ use crate::{ | |||
13 | lsp_ext, | 13 | lsp_ext, |
14 | main_loop::Task, | 14 | main_loop::Task, |
15 | }; | 15 | }; |
16 | use lsp_ext::StatusParams; | ||
16 | 17 | ||
17 | impl GlobalState { | 18 | impl GlobalState { |
18 | pub(crate) fn update_configuration(&mut self, config: Config) { | 19 | pub(crate) fn update_configuration(&mut self, config: Config) { |
@@ -85,7 +86,9 @@ impl GlobalState { | |||
85 | Status::Invalid => lsp_ext::Status::Invalid, | 86 | Status::Invalid => lsp_ext::Status::Invalid, |
86 | Status::NeedsReload => lsp_ext::Status::NeedsReload, | 87 | Status::NeedsReload => lsp_ext::Status::NeedsReload, |
87 | }; | 88 | }; |
88 | self.send_notification::<lsp_ext::StatusNotification>(lsp_status); | 89 | self.send_notification::<lsp_ext::StatusNotification>(StatusParams { |
90 | status: lsp_status, | ||
91 | }); | ||
89 | } | 92 | } |
90 | } | 93 | } |
91 | pub(crate) fn fetch_workspaces(&mut self) { | 94 | pub(crate) fn fetch_workspaces(&mut self) { |
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 8a2cfa2ae..535de2f71 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -704,10 +704,10 @@ pub(crate) fn unresolved_code_action( | |||
704 | index: usize, | 704 | index: usize, |
705 | ) -> Result<lsp_ext::CodeAction> { | 705 | ) -> Result<lsp_ext::CodeAction> { |
706 | let res = lsp_ext::CodeAction { | 706 | let res = lsp_ext::CodeAction { |
707 | title: assist.label(), | 707 | title: assist.label().to_string(), |
708 | id: Some(format!("{}:{}", assist.id().0.to_owned(), index.to_string())), | 708 | id: Some(format!("{}:{}", assist.id.0, index.to_string())), |
709 | group: assist.group().filter(|_| snap.config.client_caps.code_action_group).map(|gr| gr.0), | 709 | group: assist.group.filter(|_| snap.config.client_caps.code_action_group).map(|gr| gr.0), |
710 | kind: Some(code_action_kind(assist.id().1)), | 710 | kind: Some(code_action_kind(assist.id.1)), |
711 | edit: None, | 711 | edit: None, |
712 | is_preferred: None, | 712 | is_preferred: None, |
713 | }; | 713 | }; |
diff --git a/crates/rust-analyzer/tests/heavy_tests/main.rs b/crates/rust-analyzer/tests/rust-analyzer/main.rs index 7370505f8..fa315ff8e 100644 --- a/crates/rust-analyzer/tests/heavy_tests/main.rs +++ b/crates/rust-analyzer/tests/rust-analyzer/main.rs | |||
@@ -1,3 +1,13 @@ | |||
1 | //! The most high-level integrated tests for rust-analyzer. | ||
2 | //! | ||
3 | //! This tests run a full LSP event loop, spawn cargo and process stdlib from | ||
4 | //! sysroot. For this reason, the tests here are very slow, and should be | ||
5 | //! avoided unless absolutely necessary. | ||
6 | //! | ||
7 | //! In particular, it's fine *not* to test that client & server agree on | ||
8 | //! specific JSON shapes here -- there's little value in such tests, as we can't | ||
9 | //! be sure without a real client anyway. | ||
10 | |||
1 | mod testdir; | 11 | mod testdir; |
2 | mod support; | 12 | mod support; |
3 | 13 | ||
diff --git a/crates/rust-analyzer/tests/heavy_tests/support.rs b/crates/rust-analyzer/tests/rust-analyzer/support.rs index 5bafeba79..5bafeba79 100644 --- a/crates/rust-analyzer/tests/heavy_tests/support.rs +++ b/crates/rust-analyzer/tests/rust-analyzer/support.rs | |||
diff --git a/crates/rust-analyzer/tests/heavy_tests/testdir.rs b/crates/rust-analyzer/tests/rust-analyzer/testdir.rs index 7487e7429..7487e7429 100644 --- a/crates/rust-analyzer/tests/heavy_tests/testdir.rs +++ b/crates/rust-analyzer/tests/rust-analyzer/testdir.rs | |||
diff --git a/crates/ssr/src/resolving.rs b/crates/ssr/src/resolving.rs index 4441fb426..b932132d5 100644 --- a/crates/ssr/src/resolving.rs +++ b/crates/ssr/src/resolving.rs | |||
@@ -212,13 +212,13 @@ impl<'db> ResolutionScope<'db> { | |||
212 | // First try resolving the whole path. This will work for things like | 212 | // First try resolving the whole path. This will work for things like |
213 | // `std::collections::HashMap`, but will fail for things like | 213 | // `std::collections::HashMap`, but will fail for things like |
214 | // `std::collections::HashMap::new`. | 214 | // `std::collections::HashMap::new`. |
215 | if let Some(resolution) = self.scope.resolve_hypothetical(&path) { | 215 | if let Some(resolution) = self.scope.speculative_resolve(&path) { |
216 | return Some(resolution); | 216 | return Some(resolution); |
217 | } | 217 | } |
218 | // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if | 218 | // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if |
219 | // that succeeds, then iterate through the candidates on the resolved type with the provided | 219 | // that succeeds, then iterate through the candidates on the resolved type with the provided |
220 | // name. | 220 | // name. |
221 | let resolved_qualifier = self.scope.resolve_hypothetical(&path.qualifier()?)?; | 221 | let resolved_qualifier = self.scope.speculative_resolve(&path.qualifier()?)?; |
222 | if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier { | 222 | if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier { |
223 | let name = path.segment()?.name_ref()?; | 223 | let name = path.segment()?.name_ref()?; |
224 | adt.ty(self.scope.db).iterate_path_candidates( | 224 | adt.ty(self.scope.db).iterate_path_candidates( |
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 47e351f9d..ec3132da8 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml | |||
@@ -13,7 +13,7 @@ doctest = false | |||
13 | [dependencies] | 13 | [dependencies] |
14 | itertools = "0.9.0" | 14 | itertools = "0.9.0" |
15 | rowan = "0.10.0" | 15 | rowan = "0.10.0" |
16 | rustc_lexer = { version = "671.0.0", package = "rustc-ap-rustc_lexer" } | 16 | rustc_lexer = { version = "673.0.0", package = "rustc-ap-rustc_lexer" } |
17 | rustc-hash = "1.1.0" | 17 | rustc-hash = "1.1.0" |
18 | arrayvec = "0.5.1" | 18 | arrayvec = "0.5.1" |
19 | once_cell = "1.3.1" | 19 | once_cell = "1.3.1" |
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs index 190746e09..060b20966 100644 --- a/crates/syntax/src/ast/edit.rs +++ b/crates/syntax/src/ast/edit.rs | |||
@@ -91,29 +91,52 @@ impl ast::AssocItemList { | |||
91 | res = make_multiline(res); | 91 | res = make_multiline(res); |
92 | } | 92 | } |
93 | items.into_iter().for_each(|it| res = res.append_item(it)); | 93 | items.into_iter().for_each(|it| res = res.append_item(it)); |
94 | res | 94 | res.fixup_trailing_whitespace().unwrap_or(res) |
95 | } | 95 | } |
96 | 96 | ||
97 | #[must_use] | 97 | #[must_use] |
98 | pub fn append_item(&self, item: ast::AssocItem) -> ast::AssocItemList { | 98 | pub fn append_item(&self, item: ast::AssocItem) -> ast::AssocItemList { |
99 | let (indent, position) = match self.assoc_items().last() { | 99 | let (indent, position, whitespace) = match self.assoc_items().last() { |
100 | Some(it) => ( | 100 | Some(it) => ( |
101 | leading_indent(it.syntax()).unwrap_or_default().to_string(), | 101 | leading_indent(it.syntax()).unwrap_or_default().to_string(), |
102 | InsertPosition::After(it.syntax().clone().into()), | 102 | InsertPosition::After(it.syntax().clone().into()), |
103 | "\n\n", | ||
103 | ), | 104 | ), |
104 | None => match self.l_curly_token() { | 105 | None => match self.l_curly_token() { |
105 | Some(it) => ( | 106 | Some(it) => ( |
106 | " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(), | 107 | " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(), |
107 | InsertPosition::After(it.into()), | 108 | InsertPosition::After(it.into()), |
109 | "\n", | ||
108 | ), | 110 | ), |
109 | None => return self.clone(), | 111 | None => return self.clone(), |
110 | }, | 112 | }, |
111 | }; | 113 | }; |
112 | let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); | 114 | let ws = tokens::WsBuilder::new(&format!("{}{}", whitespace, indent)); |
113 | let to_insert: ArrayVec<[SyntaxElement; 2]> = | 115 | let to_insert: ArrayVec<[SyntaxElement; 2]> = |
114 | [ws.ws().into(), item.syntax().clone().into()].into(); | 116 | [ws.ws().into(), item.syntax().clone().into()].into(); |
115 | self.insert_children(position, to_insert) | 117 | self.insert_children(position, to_insert) |
116 | } | 118 | } |
119 | |||
120 | /// Remove extra whitespace between last item and closing curly brace. | ||
121 | fn fixup_trailing_whitespace(&self) -> Option<ast::AssocItemList> { | ||
122 | let first_token_after_items = | ||
123 | self.assoc_items().last()?.syntax().next_sibling_or_token()?; | ||
124 | let last_token_before_curly = self.r_curly_token()?.prev_sibling_or_token()?; | ||
125 | if last_token_before_curly != first_token_after_items { | ||
126 | // there is something more between last item and | ||
127 | // right curly than just whitespace - bail out | ||
128 | return None; | ||
129 | } | ||
130 | let whitespace = | ||
131 | last_token_before_curly.clone().into_token().and_then(ast::Whitespace::cast)?; | ||
132 | let text = whitespace.syntax().text(); | ||
133 | let newline = text.rfind("\n")?; | ||
134 | let keep = tokens::WsBuilder::new(&text[newline..]); | ||
135 | Some(self.replace_children( | ||
136 | first_token_after_items..=last_token_before_curly, | ||
137 | std::iter::once(keep.ws().into()), | ||
138 | )) | ||
139 | } | ||
117 | } | 140 | } |
118 | 141 | ||
119 | impl ast::RecordExprFieldList { | 142 | impl ast::RecordExprFieldList { |
diff --git a/docs/dev/README.md b/docs/dev/README.md index ad18217f1..36edddc70 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md | |||
@@ -165,6 +165,11 @@ In general, API is centered around UI concerns -- the result of the call is what | |||
165 | The results are 100% Rust specific though. | 165 | The results are 100% Rust specific though. |
166 | Shout outs to LSP developers for popularizing the idea that "UI" is a good place to draw a boundary at. | 166 | Shout outs to LSP developers for popularizing the idea that "UI" is a good place to draw a boundary at. |
167 | 167 | ||
168 | ## CI | ||
169 | |||
170 | CI does not test rust-analyzer, CI is a core part of rust-analyzer, and is maintained with above average standard of quality. | ||
171 | CI is reproducible -- it can only be broken by changes to files in this repository, any dependence on externalities is a bug. | ||
172 | |||
168 | # Code Style & Review Process | 173 | # Code Style & Review Process |
169 | 174 | ||
170 | Do see [./style.md](./style.md). | 175 | Do see [./style.md](./style.md). |
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index 1be01fd88..2e3133449 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md | |||
@@ -412,7 +412,13 @@ Reloads project information (that is, re-executes `cargo metadata`). | |||
412 | 412 | ||
413 | **Method:** `rust-analyzer/status` | 413 | **Method:** `rust-analyzer/status` |
414 | 414 | ||
415 | **Notification:** `"loading" | "ready" | "invalid" | "needsReload"` | 415 | **Notification:** |
416 | |||
417 | ```typescript | ||
418 | interface StatusParams { | ||
419 | status: "loading" | "ready" | "invalid" | "needsReload", | ||
420 | } | ||
421 | ``` | ||
416 | 422 | ||
417 | This notification is sent from server to client. | 423 | This notification is sent from server to client. |
418 | The client can use it to display persistent status to the user (in modline). | 424 | The client can use it to display persistent status to the user (in modline). |
diff --git a/docs/dev/style.md b/docs/dev/style.md index 3bbab6da9..8effddcda 100644 --- a/docs/dev/style.md +++ b/docs/dev/style.md | |||
@@ -65,7 +65,7 @@ There are many benefits to this: | |||
65 | It also makes sense to format snippets more compactly (for example, by placing enum definitions like `enum E { Foo, Bar }` on a single line), | 65 | It also makes sense to format snippets more compactly (for example, by placing enum definitions like `enum E { Foo, Bar }` on a single line), |
66 | as long as they are still readable. | 66 | as long as they are still readable. |
67 | 67 | ||
68 | ## Order of Imports | 68 | # Order of Imports |
69 | 69 | ||
70 | Separate import groups with blank lines. | 70 | Separate import groups with blank lines. |
71 | Use one `use` per crate. | 71 | Use one `use` per crate. |
@@ -91,7 +91,7 @@ use super::{} | |||
91 | Module declarations come before the imports. | 91 | Module declarations come before the imports. |
92 | Order them in "suggested reading order" for a person new to the code base. | 92 | Order them in "suggested reading order" for a person new to the code base. |
93 | 93 | ||
94 | ## Import Style | 94 | # Import Style |
95 | 95 | ||
96 | Qualify items from `hir` and `ast`. | 96 | Qualify items from `hir` and `ast`. |
97 | 97 | ||
@@ -112,7 +112,7 @@ Avoid local `use MyEnum::*` imports. | |||
112 | 112 | ||
113 | Prefer `use crate::foo::bar` to `use super::bar`. | 113 | Prefer `use crate::foo::bar` to `use super::bar`. |
114 | 114 | ||
115 | ## Order of Items | 115 | # Order of Items |
116 | 116 | ||
117 | Optimize for the reader who sees the file for the first time, and wants to get a general idea about what's going on. | 117 | Optimize for the reader who sees the file for the first time, and wants to get a general idea about what's going on. |
118 | People read things from top to bottom, so place most important things first. | 118 | People read things from top to bottom, so place most important things first. |
@@ -143,7 +143,7 @@ struct Foo { | |||
143 | } | 143 | } |
144 | ``` | 144 | ``` |
145 | 145 | ||
146 | ## Variable Naming | 146 | # Variable Naming |
147 | 147 | ||
148 | Use boring and long names for local variables ([yay code completion](https://github.com/rust-analyzer/rust-analyzer/pull/4162#discussion_r417130973)). | 148 | Use boring and long names for local variables ([yay code completion](https://github.com/rust-analyzer/rust-analyzer/pull/4162#discussion_r417130973)). |
149 | The default name is a lowercased name of the type: `global_state: GlobalState`. | 149 | The default name is a lowercased name of the type: `global_state: GlobalState`. |
@@ -151,12 +151,12 @@ Avoid ad-hoc acronyms and contractions, but use the ones that exist consistently | |||
151 | The default name for "result of the function" local variable is `res`. | 151 | The default name for "result of the function" local variable is `res`. |
152 | The default name for "I don't really care about the name" variable is `it`. | 152 | The default name for "I don't really care about the name" variable is `it`. |
153 | 153 | ||
154 | ## Collection types | 154 | # Collection types |
155 | 155 | ||
156 | Prefer `rustc_hash::FxHashMap` and `rustc_hash::FxHashSet` instead of the ones in `std::collections`. | 156 | Prefer `rustc_hash::FxHashMap` and `rustc_hash::FxHashSet` instead of the ones in `std::collections`. |
157 | They use a hasher that's slightly faster and using them consistently will reduce code size by some small amount. | 157 | They use a hasher that's slightly faster and using them consistently will reduce code size by some small amount. |
158 | 158 | ||
159 | ## Preconditions | 159 | # Preconditions |
160 | 160 | ||
161 | Express function preconditions in types and force the caller to provide them (rather than checking in callee): | 161 | Express function preconditions in types and force the caller to provide them (rather than checking in callee): |
162 | 162 | ||
@@ -176,7 +176,36 @@ fn frobnicate(walrus: Option<Walrus>) { | |||
176 | } | 176 | } |
177 | ``` | 177 | ``` |
178 | 178 | ||
179 | ## Premature Pessimization | 179 | # Getters & Setters |
180 | |||
181 | If a field can have any value without breaking invariants, make the field public. | ||
182 | Conversely, if there is an invariant, document it, enforce it in the "constructor" function, make the field private, and provide a getter. | ||
183 | Never provide setters. | ||
184 | |||
185 | Getters should return borrowed data: | ||
186 | |||
187 | ``` | ||
188 | struct Person { | ||
189 | // Invariant: never empty | ||
190 | first_name: String, | ||
191 | middle_name: Option<String> | ||
192 | } | ||
193 | |||
194 | // Good | ||
195 | impl Person { | ||
196 | fn first_name(&self) -> &str { self.first_name.as_str() } | ||
197 | fn middle_name(&self) -> Option<&str> { self.middle_name.as_ref() } | ||
198 | } | ||
199 | |||
200 | // Not as good | ||
201 | impl Person { | ||
202 | fn first_name(&self) -> String { self.first_name.clone() } | ||
203 | fn middle_name(&self) -> &Option<String> { &self.middle_name } | ||
204 | } | ||
205 | ``` | ||
206 | |||
207 | |||
208 | # Premature Pessimization | ||
180 | 209 | ||
181 | Avoid writing code which is slower than it needs to be. | 210 | Avoid writing code which is slower than it needs to be. |
182 | Don't allocate a `Vec` where an iterator would do, don't allocate strings needlessly. | 211 | Don't allocate a `Vec` where an iterator would do, don't allocate strings needlessly. |
@@ -197,12 +226,12 @@ if words.len() != 2 { | |||
197 | } | 226 | } |
198 | ``` | 227 | ``` |
199 | 228 | ||
200 | ## Documentation | 229 | # Documentation |
201 | 230 | ||
202 | For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines. | 231 | For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines. |
203 | If the line is too long, you want to split the sentence in two :-) | 232 | If the line is too long, you want to split the sentence in two :-) |
204 | 233 | ||
205 | ## Commit Style | 234 | # Commit Style |
206 | 235 | ||
207 | We don't have specific rules around git history hygiene. | 236 | We don't have specific rules around git history hygiene. |
208 | Maintaining clean git history is encouraged, but not enforced. | 237 | Maintaining clean git history is encouraged, but not enforced. |
diff --git a/editors/code/.eslintignore b/editors/code/.eslintignore new file mode 100644 index 000000000..3df5c860b --- /dev/null +++ b/editors/code/.eslintignore | |||
@@ -0,0 +1,3 @@ | |||
1 | node_modules | ||
2 | .eslintrc.js | ||
3 | rollup.config.js \ No newline at end of file | ||
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index 6e767babf..543f7e02e 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts | |||
@@ -36,7 +36,7 @@ export class Ctx { | |||
36 | 36 | ||
37 | res.pushCleanup(client.start()); | 37 | res.pushCleanup(client.start()); |
38 | await client.onReady(); | 38 | await client.onReady(); |
39 | client.onNotification(ra.status, (status) => res.setStatus(status)); | 39 | client.onNotification(ra.status, (params) => res.setStatus(params.status)); |
40 | return res; | 40 | return res; |
41 | } | 41 | } |
42 | 42 | ||
diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts index 494d51c83..8663737a6 100644 --- a/editors/code/src/lsp_ext.ts +++ b/editors/code/src/lsp_ext.ts | |||
@@ -8,7 +8,10 @@ export const analyzerStatus = new lc.RequestType<null, string, void>("rust-analy | |||
8 | export const memoryUsage = new lc.RequestType<null, string, void>("rust-analyzer/memoryUsage"); | 8 | export const memoryUsage = new lc.RequestType<null, string, void>("rust-analyzer/memoryUsage"); |
9 | 9 | ||
10 | export type Status = "loading" | "ready" | "invalid" | "needsReload"; | 10 | export type Status = "loading" | "ready" | "invalid" | "needsReload"; |
11 | export const status = new lc.NotificationType<Status>("rust-analyzer/status"); | 11 | export interface StatusParams { |
12 | status: Status; | ||
13 | } | ||
14 | export const status = new lc.NotificationType<StatusParams>("rust-analyzer/status"); | ||
12 | 15 | ||
13 | export const reloadWorkspace = new lc.RequestType<null, null, void>("rust-analyzer/reloadWorkspace"); | 16 | export const reloadWorkspace = new lc.RequestType<null, null, void>("rust-analyzer/reloadWorkspace"); |
14 | 17 | ||
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 1a1140b04..e9edbdd10 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml | |||
@@ -18,3 +18,4 @@ quote = "1.0.2" | |||
18 | ungrammar = "1.1.1" | 18 | ungrammar = "1.1.1" |
19 | walkdir = "2.3.1" | 19 | walkdir = "2.3.1" |
20 | write-json = "0.1.0" | 20 | write-json = "0.1.0" |
21 | # Avoid adding more dependencies to this crate | ||
diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index 78a84f68d..98acd7fa6 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs | |||
@@ -15,7 +15,11 @@ use std::{ | |||
15 | path::{Path, PathBuf}, | 15 | path::{Path, PathBuf}, |
16 | }; | 16 | }; |
17 | 17 | ||
18 | use crate::{not_bash::fs2, project_root, Result}; | 18 | use crate::{ |
19 | ensure_rustfmt, | ||
20 | not_bash::{fs2, pushenv, run}, | ||
21 | project_root, Result, | ||
22 | }; | ||
19 | 23 | ||
20 | pub use self::{ | 24 | pub use self::{ |
21 | gen_assists_docs::{generate_assists_docs, generate_assists_tests}, | 25 | gen_assists_docs::{generate_assists_docs, generate_assists_tests}, |
@@ -62,6 +66,18 @@ fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> { | |||
62 | } | 66 | } |
63 | } | 67 | } |
64 | 68 | ||
69 | const PREAMBLE: &str = "Generated file, do not edit by hand, see `xtask/src/codegen`"; | ||
70 | |||
71 | fn reformat(text: impl std::fmt::Display) -> Result<String> { | ||
72 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); | ||
73 | ensure_rustfmt()?; | ||
74 | let stdout = run!( | ||
75 | "rustfmt --config-path {} --config fn_single_line=true", project_root().join("rustfmt.toml").display(); | ||
76 | <text.to_string().as_bytes() | ||
77 | )?; | ||
78 | Ok(format!("//! {}\n\n{}\n", PREAMBLE, stdout)) | ||
79 | } | ||
80 | |||
65 | fn extract_comment_blocks(text: &str) -> Vec<Vec<String>> { | 81 | fn extract_comment_blocks(text: &str) -> Vec<Vec<String>> { |
66 | do_extract_comment_blocks(text, false).into_iter().map(|(_line, block)| block).collect() | 82 | do_extract_comment_blocks(text, false).into_iter().map(|(_line, block)| block).collect() |
67 | } | 83 | } |
diff --git a/xtask/src/codegen/gen_assists_docs.rs b/xtask/src/codegen/gen_assists_docs.rs index 526941f73..4f4968594 100644 --- a/xtask/src/codegen/gen_assists_docs.rs +++ b/xtask/src/codegen/gen_assists_docs.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | use std::{fmt, fs, path::Path}; | 3 | use std::{fmt, fs, path::Path}; |
4 | 4 | ||
5 | use crate::{ | 5 | use crate::{ |
6 | codegen::{self, extract_comment_blocks_with_empty_lines, Location, Mode}, | 6 | codegen::{self, extract_comment_blocks_with_empty_lines, reformat, Location, Mode, PREAMBLE}, |
7 | project_root, rust_files, Result, | 7 | project_root, rust_files, Result, |
8 | }; | 8 | }; |
9 | 9 | ||
@@ -15,7 +15,7 @@ pub fn generate_assists_tests(mode: Mode) -> Result<()> { | |||
15 | pub fn generate_assists_docs(mode: Mode) -> Result<()> { | 15 | pub fn generate_assists_docs(mode: Mode) -> Result<()> { |
16 | let assists = Assist::collect()?; | 16 | let assists = Assist::collect()?; |
17 | let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); | 17 | let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); |
18 | let contents = contents.trim().to_string() + "\n"; | 18 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); |
19 | let dst = project_root().join("docs/user/generated_assists.adoc"); | 19 | let dst = project_root().join("docs/user/generated_assists.adoc"); |
20 | codegen::update(&dst, &contents, mode) | 20 | codegen::update(&dst, &contents, mode) |
21 | } | 21 | } |
@@ -134,7 +134,7 @@ r#####" | |||
134 | 134 | ||
135 | buf.push_str(&test) | 135 | buf.push_str(&test) |
136 | } | 136 | } |
137 | let buf = crate::reformat(buf)?; | 137 | let buf = reformat(buf)?; |
138 | codegen::update(&project_root().join(codegen::ASSISTS_TESTS), &buf, mode) | 138 | codegen::update(&project_root().join(codegen::ASSISTS_TESTS), &buf, mode) |
139 | } | 139 | } |
140 | 140 | ||
diff --git a/xtask/src/codegen/gen_feature_docs.rs b/xtask/src/codegen/gen_feature_docs.rs index 31bc3839d..3f0013e82 100644 --- a/xtask/src/codegen/gen_feature_docs.rs +++ b/xtask/src/codegen/gen_feature_docs.rs | |||
@@ -3,14 +3,14 @@ | |||
3 | use std::{fmt, fs, path::PathBuf}; | 3 | use std::{fmt, fs, path::PathBuf}; |
4 | 4 | ||
5 | use crate::{ | 5 | use crate::{ |
6 | codegen::{self, extract_comment_blocks_with_empty_lines, Location, Mode}, | 6 | codegen::{self, extract_comment_blocks_with_empty_lines, Location, Mode, PREAMBLE}, |
7 | project_root, rust_files, Result, | 7 | project_root, rust_files, Result, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | pub fn generate_feature_docs(mode: Mode) -> Result<()> { | 10 | pub fn generate_feature_docs(mode: Mode) -> Result<()> { |
11 | let features = Feature::collect()?; | 11 | let features = Feature::collect()?; |
12 | let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); | 12 | let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); |
13 | let contents = contents.trim().to_string() + "\n"; | 13 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); |
14 | let dst = project_root().join("docs/user/generated_features.adoc"); | 14 | let dst = project_root().join("docs/user/generated_features.adoc"); |
15 | codegen::update(&dst, &contents, mode)?; | 15 | codegen::update(&dst, &contents, mode)?; |
16 | Ok(()) | 16 | Ok(()) |
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs index dd1f4d6a2..df3ec22c8 100644 --- a/xtask/src/codegen/gen_syntax.rs +++ b/xtask/src/codegen/gen_syntax.rs | |||
@@ -14,7 +14,7 @@ use ungrammar::{rust_grammar, Grammar, Rule}; | |||
14 | 14 | ||
15 | use crate::{ | 15 | use crate::{ |
16 | ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC}, | 16 | ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC}, |
17 | codegen::{self, update, Mode}, | 17 | codegen::{self, reformat, update, Mode}, |
18 | project_root, Result, | 18 | project_root, Result, |
19 | }; | 19 | }; |
20 | 20 | ||
@@ -61,7 +61,7 @@ fn generate_tokens(grammar: &AstSrc) -> Result<String> { | |||
61 | } | 61 | } |
62 | }); | 62 | }); |
63 | 63 | ||
64 | let pretty = crate::reformat(quote! { | 64 | let pretty = reformat(quote! { |
65 | use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken}; | 65 | use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken}; |
66 | #(#tokens)* | 66 | #(#tokens)* |
67 | })? | 67 | })? |
@@ -261,7 +261,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> { | |||
261 | } | 261 | } |
262 | } | 262 | } |
263 | 263 | ||
264 | let pretty = crate::reformat(res)?; | 264 | let pretty = reformat(res)?; |
265 | Ok(pretty) | 265 | Ok(pretty) |
266 | } | 266 | } |
267 | 267 | ||
@@ -383,7 +383,7 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> { | |||
383 | } | 383 | } |
384 | }; | 384 | }; |
385 | 385 | ||
386 | crate::reformat(ast) | 386 | reformat(ast) |
387 | } | 387 | } |
388 | 388 | ||
389 | fn to_upper_snake_case(s: &str) -> String { | 389 | fn to_upper_snake_case(s: &str) -> String { |
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs index 807ef587c..e790d995f 100644 --- a/xtask/src/lib.rs +++ b/xtask/src/lib.rs | |||
@@ -3,14 +3,15 @@ | |||
3 | //! See https://github.com/matklad/cargo-xtask/ | 3 | //! See https://github.com/matklad/cargo-xtask/ |
4 | 4 | ||
5 | pub mod not_bash; | 5 | pub mod not_bash; |
6 | pub mod codegen; | ||
7 | mod ast_src; | ||
8 | |||
6 | pub mod install; | 9 | pub mod install; |
7 | pub mod release; | 10 | pub mod release; |
8 | pub mod dist; | 11 | pub mod dist; |
9 | pub mod pre_commit; | 12 | pub mod pre_commit; |
10 | pub mod metrics; | 13 | pub mod metrics; |
11 | 14 | pub mod pre_cache; | |
12 | pub mod codegen; | ||
13 | mod ast_src; | ||
14 | 15 | ||
15 | use std::{ | 16 | use std::{ |
16 | env, | 17 | env, |
@@ -21,7 +22,7 @@ use walkdir::{DirEntry, WalkDir}; | |||
21 | 22 | ||
22 | use crate::{ | 23 | use crate::{ |
23 | codegen::Mode, | 24 | codegen::Mode, |
24 | not_bash::{fs2, pushd, pushenv, rm_rf}, | 25 | not_bash::{pushd, pushenv}, |
25 | }; | 26 | }; |
26 | 27 | ||
27 | pub use anyhow::{bail, Context as _, Result}; | 28 | pub use anyhow::{bail, Context as _, Result}; |
@@ -62,17 +63,6 @@ pub fn run_rustfmt(mode: Mode) -> Result<()> { | |||
62 | Ok(()) | 63 | Ok(()) |
63 | } | 64 | } |
64 | 65 | ||
65 | fn reformat(text: impl std::fmt::Display) -> Result<String> { | ||
66 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); | ||
67 | ensure_rustfmt()?; | ||
68 | let stdout = run!( | ||
69 | "rustfmt --config-path {} --config fn_single_line=true", project_root().join("rustfmt.toml").display(); | ||
70 | <text.to_string().as_bytes() | ||
71 | )?; | ||
72 | let preamble = "Generated file, do not edit by hand, see `xtask/src/codegen`"; | ||
73 | Ok(format!("//! {}\n\n{}\n", preamble, stdout)) | ||
74 | } | ||
75 | |||
76 | fn ensure_rustfmt() -> Result<()> { | 66 | fn ensure_rustfmt() -> Result<()> { |
77 | let out = run!("rustfmt --version")?; | 67 | let out = run!("rustfmt --version")?; |
78 | if !out.contains("stable") { | 68 | if !out.contains("stable") { |
@@ -119,42 +109,6 @@ pub fn run_fuzzer() -> Result<()> { | |||
119 | Ok(()) | 109 | Ok(()) |
120 | } | 110 | } |
121 | 111 | ||
122 | /// Cleans the `./target` dir after the build such that only | ||
123 | /// dependencies are cached on CI. | ||
124 | pub fn run_pre_cache() -> Result<()> { | ||
125 | let slow_tests_cookie = Path::new("./target/.slow_tests_cookie"); | ||
126 | if !slow_tests_cookie.exists() { | ||
127 | panic!("slow tests were skipped on CI!") | ||
128 | } | ||
129 | rm_rf(slow_tests_cookie)?; | ||
130 | |||
131 | for entry in Path::new("./target/debug").read_dir()? { | ||
132 | let entry = entry?; | ||
133 | if entry.file_type().map(|it| it.is_file()).ok() == Some(true) { | ||
134 | // Can't delete yourself on windows :-( | ||
135 | if !entry.path().ends_with("xtask.exe") { | ||
136 | rm_rf(&entry.path())? | ||
137 | } | ||
138 | } | ||
139 | } | ||
140 | |||
141 | fs2::remove_file("./target/.rustc_info.json")?; | ||
142 | let to_delete = ["hir", "heavy_test", "xtask", "ide", "rust-analyzer"]; | ||
143 | for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() { | ||
144 | for entry in Path::new(dir).read_dir()? { | ||
145 | let entry = entry?; | ||
146 | if to_delete.iter().any(|&it| entry.path().display().to_string().contains(it)) { | ||
147 | // Can't delete yourself on windows :-( | ||
148 | if !entry.path().ends_with("xtask.exe") { | ||
149 | rm_rf(&entry.path())? | ||
150 | } | ||
151 | } | ||
152 | } | ||
153 | } | ||
154 | |||
155 | Ok(()) | ||
156 | } | ||
157 | |||
158 | fn is_release_tag(tag: &str) -> bool { | 112 | fn is_release_tag(tag: &str) -> bool { |
159 | tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) | 113 | tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) |
160 | } | 114 | } |
diff --git a/xtask/src/main.rs b/xtask/src/main.rs index b69b884e5..fb38fdc92 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs | |||
@@ -17,9 +17,10 @@ use xtask::{ | |||
17 | install::{ClientOpt, InstallCmd, Malloc, ServerOpt}, | 17 | install::{ClientOpt, InstallCmd, Malloc, ServerOpt}, |
18 | metrics::MetricsCmd, | 18 | metrics::MetricsCmd, |
19 | not_bash::pushd, | 19 | not_bash::pushd, |
20 | pre_cache::PreCacheCmd, | ||
20 | pre_commit, project_root, | 21 | pre_commit, project_root, |
21 | release::{PromoteCmd, ReleaseCmd}, | 22 | release::{PromoteCmd, ReleaseCmd}, |
22 | run_clippy, run_fuzzer, run_pre_cache, run_rustfmt, Result, | 23 | run_clippy, run_fuzzer, run_rustfmt, Result, |
23 | }; | 24 | }; |
24 | 25 | ||
25 | fn main() -> Result<()> { | 26 | fn main() -> Result<()> { |
@@ -100,7 +101,7 @@ FLAGS: | |||
100 | } | 101 | } |
101 | "pre-cache" => { | 102 | "pre-cache" => { |
102 | args.finish()?; | 103 | args.finish()?; |
103 | run_pre_cache() | 104 | PreCacheCmd.run() |
104 | } | 105 | } |
105 | "release" => { | 106 | "release" => { |
106 | let dry_run = args.contains("--dry-run"); | 107 | let dry_run = args.contains("--dry-run"); |
diff --git a/xtask/src/pre_cache.rs b/xtask/src/pre_cache.rs new file mode 100644 index 000000000..47ba6ba24 --- /dev/null +++ b/xtask/src/pre_cache.rs | |||
@@ -0,0 +1,80 @@ | |||
1 | use std::{ | ||
2 | fs::FileType, | ||
3 | path::{Path, PathBuf}, | ||
4 | }; | ||
5 | |||
6 | use anyhow::Result; | ||
7 | |||
8 | use crate::not_bash::{fs2, rm_rf}; | ||
9 | |||
10 | pub struct PreCacheCmd; | ||
11 | |||
12 | impl PreCacheCmd { | ||
13 | /// Cleans the `./target` dir after the build such that only | ||
14 | /// dependencies are cached on CI. | ||
15 | pub fn run(self) -> Result<()> { | ||
16 | let slow_tests_cookie = Path::new("./target/.slow_tests_cookie"); | ||
17 | if !slow_tests_cookie.exists() { | ||
18 | panic!("slow tests were skipped on CI!") | ||
19 | } | ||
20 | rm_rf(slow_tests_cookie)?; | ||
21 | |||
22 | for path in read_dir("./target/debug", FileType::is_file)? { | ||
23 | // Can't delete yourself on windows :-( | ||
24 | if !path.ends_with("xtask.exe") { | ||
25 | rm_rf(&path)? | ||
26 | } | ||
27 | } | ||
28 | |||
29 | fs2::remove_file("./target/.rustc_info.json")?; | ||
30 | |||
31 | let to_delete = read_dir("./crates", FileType::is_dir)? | ||
32 | .into_iter() | ||
33 | .map(|path| path.file_name().unwrap().to_string_lossy().replace('-', "_")) | ||
34 | .collect::<Vec<_>>(); | ||
35 | |||
36 | for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() { | ||
37 | for path in read_dir(dir, |_file_type| true)? { | ||
38 | if path.ends_with("xtask.exe") { | ||
39 | continue; | ||
40 | } | ||
41 | let file_name = path.file_name().unwrap().to_string_lossy(); | ||
42 | let (stem, _) = match rsplit_once(&file_name, '-') { | ||
43 | Some(it) => it, | ||
44 | None => { | ||
45 | rm_rf(path)?; | ||
46 | continue; | ||
47 | } | ||
48 | }; | ||
49 | let stem = stem.replace('-', "_"); | ||
50 | if to_delete.contains(&stem) { | ||
51 | rm_rf(path)?; | ||
52 | } | ||
53 | } | ||
54 | } | ||
55 | |||
56 | Ok(()) | ||
57 | } | ||
58 | } | ||
59 | fn read_dir(path: impl AsRef<Path>, cond: impl Fn(&FileType) -> bool) -> Result<Vec<PathBuf>> { | ||
60 | read_dir_impl(path.as_ref(), &cond) | ||
61 | } | ||
62 | |||
63 | fn read_dir_impl(path: &Path, cond: &dyn Fn(&FileType) -> bool) -> Result<Vec<PathBuf>> { | ||
64 | let mut res = Vec::new(); | ||
65 | for entry in path.read_dir()? { | ||
66 | let entry = entry?; | ||
67 | let file_type = entry.file_type()?; | ||
68 | if cond(&file_type) { | ||
69 | res.push(entry.path()) | ||
70 | } | ||
71 | } | ||
72 | Ok(res) | ||
73 | } | ||
74 | |||
75 | fn rsplit_once(haystack: &str, delim: char) -> Option<(&str, &str)> { | ||
76 | let mut split = haystack.rsplitn(2, delim); | ||
77 | let suffix = split.next()?; | ||
78 | let prefix = split.next()?; | ||
79 | Some((prefix, suffix)) | ||
80 | } | ||
diff --git a/xtask/tests/tidy.rs b/xtask/tests/tidy.rs index 76895aeca..ebd42cef7 100644 --- a/xtask/tests/tidy.rs +++ b/xtask/tests/tidy.rs | |||
@@ -82,7 +82,7 @@ MIT/Apache-2.0 | |||
82 | MIT/Apache-2.0 AND BSD-2-Clause | 82 | MIT/Apache-2.0 AND BSD-2-Clause |
83 | Unlicense OR MIT | 83 | Unlicense OR MIT |
84 | Unlicense/MIT | 84 | Unlicense/MIT |
85 | Zlib | 85 | Zlib OR Apache-2.0 OR MIT |
86 | " | 86 | " |
87 | .lines() | 87 | .lines() |
88 | .filter(|it| !it.is_empty()) | 88 | .filter(|it| !it.is_empty()) |