diff options
60 files changed, 1347 insertions, 534 deletions
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index df34dfdb8..c055d113d 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml | |||
@@ -49,12 +49,16 @@ jobs: | |||
49 | node-version: 12.x | 49 | node-version: 12.x |
50 | 50 | ||
51 | - name: Dist | 51 | - name: Dist |
52 | if: github.ref == 'refs/heads/release' | 52 | if: matrix.os == 'ubuntu-latest' && github.ref == 'refs/heads/release' |
53 | run: cargo xtask dist --version 0.2.$GITHUB_RUN_NUMBER --tag $(date --iso --utc) | 53 | run: cargo xtask dist --client --version 0.2.$GITHUB_RUN_NUMBER --tag $(date --iso --utc) |
54 | 54 | ||
55 | - name: Dist | 55 | - name: Dist |
56 | if: github.ref != 'refs/heads/release' | 56 | if: matrix.os == 'ubuntu-latest' && github.ref != 'refs/heads/release' |
57 | run: cargo xtask dist --version 0.3.$GITHUB_RUN_NUMBER-nightly --tag nightly | 57 | run: cargo xtask dist --client --version 0.3.$GITHUB_RUN_NUMBER-nightly --tag nightly |
58 | |||
59 | - name: Dist | ||
60 | if: matrix.os != 'ubuntu-latest' | ||
61 | run: cargo xtask dist | ||
58 | 62 | ||
59 | - name: Upload artifacts | 63 | - name: Upload artifacts |
60 | uses: actions/upload-artifact@v1 | 64 | uses: actions/upload-artifact@v1 |
diff --git a/Cargo.lock b/Cargo.lock index 3f4d9204c..908319f87 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -170,7 +170,7 @@ dependencies = [ | |||
170 | "chalk-macros", | 170 | "chalk-macros", |
171 | "chalk-rust-ir", | 171 | "chalk-rust-ir", |
172 | "ena", | 172 | "ena", |
173 | "itertools", | 173 | "itertools 0.8.2", |
174 | "petgraph", | 174 | "petgraph", |
175 | "rustc-hash", | 175 | "rustc-hash", |
176 | ] | 176 | ] |
@@ -521,6 +521,15 @@ dependencies = [ | |||
521 | ] | 521 | ] |
522 | 522 | ||
523 | [[package]] | 523 | [[package]] |
524 | name = "itertools" | ||
525 | version = "0.9.0" | ||
526 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
527 | checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" | ||
528 | dependencies = [ | ||
529 | "either", | ||
530 | ] | ||
531 | |||
532 | [[package]] | ||
524 | name = "itoa" | 533 | name = "itoa" |
525 | version = "0.4.5" | 534 | version = "0.4.5" |
526 | source = "registry+https://github.com/rust-lang/crates.io-index" | 535 | source = "registry+https://github.com/rust-lang/crates.io-index" |
@@ -845,14 +854,9 @@ checksum = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b" | |||
845 | 854 | ||
846 | [[package]] | 855 | [[package]] |
847 | name = "proc-macro-hack" | 856 | name = "proc-macro-hack" |
848 | version = "0.5.12" | 857 | version = "0.5.14" |
849 | source = "registry+https://github.com/rust-lang/crates.io-index" | 858 | source = "registry+https://github.com/rust-lang/crates.io-index" |
850 | checksum = "f918f2b601f93baa836c1c2945faef682ba5b6d4828ecb45eeb7cc3c71b811b4" | 859 | checksum = "fcfdefadc3d57ca21cf17990a28ef4c0f7c61383a28cb7604cf4a18e6ede1420" |
851 | dependencies = [ | ||
852 | "proc-macro2", | ||
853 | "quote", | ||
854 | "syn", | ||
855 | ] | ||
856 | 860 | ||
857 | [[package]] | 861 | [[package]] |
858 | name = "proc-macro2" | 862 | name = "proc-macro2" |
@@ -880,7 +884,9 @@ version = "0.1.0" | |||
880 | name = "ra_assists" | 884 | name = "ra_assists" |
881 | version = "0.1.0" | 885 | version = "0.1.0" |
882 | dependencies = [ | 886 | dependencies = [ |
887 | "either", | ||
883 | "format-buf", | 888 | "format-buf", |
889 | "itertools 0.9.0", | ||
884 | "join_to_string", | 890 | "join_to_string", |
885 | "ra_db", | 891 | "ra_db", |
886 | "ra_fmt", | 892 | "ra_fmt", |
@@ -932,7 +938,7 @@ dependencies = [ | |||
932 | name = "ra_fmt" | 938 | name = "ra_fmt" |
933 | version = "0.1.0" | 939 | version = "0.1.0" |
934 | dependencies = [ | 940 | dependencies = [ |
935 | "itertools", | 941 | "itertools 0.9.0", |
936 | "ra_syntax", | 942 | "ra_syntax", |
937 | ] | 943 | ] |
938 | 944 | ||
@@ -942,7 +948,7 @@ version = "0.1.0" | |||
942 | dependencies = [ | 948 | dependencies = [ |
943 | "arrayvec", | 949 | "arrayvec", |
944 | "either", | 950 | "either", |
945 | "itertools", | 951 | "itertools 0.9.0", |
946 | "log", | 952 | "log", |
947 | "ra_db", | 953 | "ra_db", |
948 | "ra_hir_def", | 954 | "ra_hir_def", |
@@ -1019,7 +1025,7 @@ dependencies = [ | |||
1019 | "format-buf", | 1025 | "format-buf", |
1020 | "indexmap", | 1026 | "indexmap", |
1021 | "insta", | 1027 | "insta", |
1022 | "itertools", | 1028 | "itertools 0.9.0", |
1023 | "join_to_string", | 1029 | "join_to_string", |
1024 | "log", | 1030 | "log", |
1025 | "ra_assists", | 1031 | "ra_assists", |
@@ -1040,6 +1046,7 @@ dependencies = [ | |||
1040 | name = "ra_ide_db" | 1046 | name = "ra_ide_db" |
1041 | version = "0.1.0" | 1047 | version = "0.1.0" |
1042 | dependencies = [ | 1048 | dependencies = [ |
1049 | "either", | ||
1043 | "fst", | 1050 | "fst", |
1044 | "log", | 1051 | "log", |
1045 | "once_cell", | 1052 | "once_cell", |
@@ -1105,7 +1112,7 @@ name = "ra_syntax" | |||
1105 | version = "0.1.0" | 1112 | version = "0.1.0" |
1106 | dependencies = [ | 1113 | dependencies = [ |
1107 | "arrayvec", | 1114 | "arrayvec", |
1108 | "itertools", | 1115 | "itertools 0.9.0", |
1109 | "once_cell", | 1116 | "once_cell", |
1110 | "ra_parser", | 1117 | "ra_parser", |
1111 | "ra_text_edit", | 1118 | "ra_text_edit", |
@@ -1282,7 +1289,7 @@ dependencies = [ | |||
1282 | "crossbeam-channel", | 1289 | "crossbeam-channel", |
1283 | "env_logger", | 1290 | "env_logger", |
1284 | "globset", | 1291 | "globset", |
1285 | "itertools", | 1292 | "itertools 0.9.0", |
1286 | "jod-thread", | 1293 | "jod-thread", |
1287 | "log", | 1294 | "log", |
1288 | "lsp-server", | 1295 | "lsp-server", |
@@ -1480,9 +1487,9 @@ checksum = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f" | |||
1480 | 1487 | ||
1481 | [[package]] | 1488 | [[package]] |
1482 | name = "syn" | 1489 | name = "syn" |
1483 | version = "1.0.16" | 1490 | version = "1.0.17" |
1484 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1491 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1485 | checksum = "123bd9499cfb380418d509322d7a6d52e5315f064fe4b3ad18a53d6b92c07859" | 1492 | checksum = "0df0eb663f387145cab623dea85b09c2c5b4b0aef44e945d928e682fce71bb03" |
1486 | dependencies = [ | 1493 | dependencies = [ |
1487 | "proc-macro2", | 1494 | "proc-macro2", |
1488 | "quote", | 1495 | "quote", |
diff --git a/crates/ra_assists/Cargo.toml b/crates/ra_assists/Cargo.toml index d314dc8e6..a87f4052a 100644 --- a/crates/ra_assists/Cargo.toml +++ b/crates/ra_assists/Cargo.toml | |||
@@ -11,6 +11,8 @@ doctest = false | |||
11 | format-buf = "1.0.0" | 11 | format-buf = "1.0.0" |
12 | join_to_string = "0.1.3" | 12 | join_to_string = "0.1.3" |
13 | rustc-hash = "1.1.0" | 13 | rustc-hash = "1.1.0" |
14 | itertools = "0.9.0" | ||
15 | either = "1.5.3" | ||
14 | 16 | ||
15 | ra_syntax = { path = "../ra_syntax" } | 17 | ra_syntax = { path = "../ra_syntax" } |
16 | ra_text_edit = { path = "../ra_text_edit" } | 18 | ra_text_edit = { path = "../ra_text_edit" } |
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index 62182cf03..c3e653299 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs | |||
@@ -11,6 +11,7 @@ use ra_syntax::{ | |||
11 | use ra_text_edit::TextEditBuilder; | 11 | use ra_text_edit::TextEditBuilder; |
12 | 12 | ||
13 | use crate::{AssistAction, AssistId, AssistLabel, GroupLabel, ResolvedAssist}; | 13 | use crate::{AssistAction, AssistId, AssistLabel, GroupLabel, ResolvedAssist}; |
14 | use algo::SyntaxRewriter; | ||
14 | 15 | ||
15 | #[derive(Clone, Debug)] | 16 | #[derive(Clone, Debug)] |
16 | pub(crate) struct Assist(pub(crate) Vec<AssistInfo>); | 17 | pub(crate) struct Assist(pub(crate) Vec<AssistInfo>); |
@@ -234,6 +235,11 @@ impl ActionBuilder { | |||
234 | pub(crate) fn replace_ast<N: AstNode>(&mut self, old: N, new: N) { | 235 | pub(crate) fn replace_ast<N: AstNode>(&mut self, old: N, new: N) { |
235 | algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) | 236 | algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) |
236 | } | 237 | } |
238 | pub(crate) fn rewrite(&mut self, rewriter: SyntaxRewriter) { | ||
239 | let node = rewriter.rewrite_root().unwrap(); | ||
240 | let new = rewriter.rewrite(&node); | ||
241 | algo::diff(&node, &new).into_text_edit(&mut self.edit) | ||
242 | } | ||
237 | 243 | ||
238 | fn build(self) -> AssistAction { | 244 | fn build(self) -> AssistAction { |
239 | AssistAction { | 245 | AssistAction { |
diff --git a/crates/ra_assists/src/ast_transform.rs b/crates/ra_assists/src/ast_transform.rs index 45558c448..52b4c82db 100644 --- a/crates/ra_assists/src/ast_transform.rs +++ b/crates/ra_assists/src/ast_transform.rs | |||
@@ -3,7 +3,10 @@ use rustc_hash::FxHashMap; | |||
3 | 3 | ||
4 | use hir::{PathResolution, SemanticsScope}; | 4 | use hir::{PathResolution, SemanticsScope}; |
5 | use ra_ide_db::RootDatabase; | 5 | use ra_ide_db::RootDatabase; |
6 | use ra_syntax::ast::{self, AstNode}; | 6 | use ra_syntax::{ |
7 | algo::SyntaxRewriter, | ||
8 | ast::{self, AstNode}, | ||
9 | }; | ||
7 | 10 | ||
8 | pub trait AstTransform<'a> { | 11 | pub trait AstTransform<'a> { |
9 | fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode>; | 12 | fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode>; |
@@ -153,15 +156,14 @@ impl<'a> QualifyPaths<'a> { | |||
153 | } | 156 | } |
154 | 157 | ||
155 | pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { | 158 | pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { |
156 | let syntax = node.syntax(); | 159 | SyntaxRewriter::from_fn(|element| match element { |
157 | let result = ra_syntax::algo::replace_descendants(syntax, |element| match element { | ||
158 | ra_syntax::SyntaxElement::Node(n) => { | 160 | ra_syntax::SyntaxElement::Node(n) => { |
159 | let replacement = transformer.get_substitution(&n)?; | 161 | let replacement = transformer.get_substitution(&n)?; |
160 | Some(replacement.into()) | 162 | Some(replacement.into()) |
161 | } | 163 | } |
162 | _ => None, | 164 | _ => None, |
163 | }); | 165 | }) |
164 | N::cast(result).unwrap() | 166 | .rewrite_ast(&node) |
165 | } | 167 | } |
166 | 168 | ||
167 | impl<'a> AstTransform<'a> for QualifyPaths<'a> { | 169 | impl<'a> AstTransform<'a> for QualifyPaths<'a> { |
diff --git a/crates/ra_assists/src/doc_tests/generated.rs b/crates/ra_assists/src/doc_tests/generated.rs index aef6793e8..62dcb3808 100644 --- a/crates/ra_assists/src/doc_tests/generated.rs +++ b/crates/ra_assists/src/doc_tests/generated.rs | |||
@@ -275,8 +275,8 @@ enum Action { Move { distance: u32 }, Stop } | |||
275 | 275 | ||
276 | fn handle(action: Action) { | 276 | fn handle(action: Action) { |
277 | match action { | 277 | match action { |
278 | Action::Move { distance } => (), | 278 | Action::Move { distance } => {} |
279 | Action::Stop => (), | 279 | Action::Stop => {} |
280 | } | 280 | } |
281 | } | 281 | } |
282 | "#####, | 282 | "#####, |
diff --git a/crates/ra_assists/src/handlers/add_missing_impl_members.rs b/crates/ra_assists/src/handlers/add_missing_impl_members.rs index e5920b6f6..722f207e2 100644 --- a/crates/ra_assists/src/handlers/add_missing_impl_members.rs +++ b/crates/ra_assists/src/handlers/add_missing_impl_members.rs | |||
@@ -151,7 +151,7 @@ fn add_missing_impl_members_inner( | |||
151 | ast::ImplItem::FnDef(def) => ast::ImplItem::FnDef(add_body(def)), | 151 | ast::ImplItem::FnDef(def) => ast::ImplItem::FnDef(add_body(def)), |
152 | _ => it, | 152 | _ => it, |
153 | }) | 153 | }) |
154 | .map(|it| edit::strip_attrs_and_docs(&it)); | 154 | .map(|it| edit::remove_attrs_and_docs(&it)); |
155 | let new_impl_item_list = impl_item_list.append_items(items); | 155 | let new_impl_item_list = impl_item_list.append_items(items); |
156 | let cursor_position = { | 156 | let cursor_position = { |
157 | let first_new_item = new_impl_item_list.impl_items().nth(n_existing_items).unwrap(); | 157 | let first_new_item = new_impl_item_list.impl_items().nth(n_existing_items).unwrap(); |
diff --git a/crates/ra_assists/src/handlers/auto_import.rs b/crates/ra_assists/src/handlers/auto_import.rs index bb280f633..99682e023 100644 --- a/crates/ra_assists/src/handlers/auto_import.rs +++ b/crates/ra_assists/src/handlers/auto_import.rs | |||
@@ -17,6 +17,7 @@ use crate::{ | |||
17 | utils::insert_use_statement, | 17 | utils::insert_use_statement, |
18 | AssistId, | 18 | AssistId, |
19 | }; | 19 | }; |
20 | use either::Either; | ||
20 | 21 | ||
21 | // Assist: auto_import | 22 | // Assist: auto_import |
22 | // | 23 | // |
@@ -58,6 +59,7 @@ pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> { | |||
58 | group.finish() | 59 | group.finish() |
59 | } | 60 | } |
60 | 61 | ||
62 | #[derive(Debug)] | ||
61 | struct AutoImportAssets { | 63 | struct AutoImportAssets { |
62 | import_candidate: ImportCandidate, | 64 | import_candidate: ImportCandidate, |
63 | module_with_name_to_import: Module, | 65 | module_with_name_to_import: Module, |
@@ -127,14 +129,14 @@ impl AutoImportAssets { | |||
127 | ImportsLocator::new(db) | 129 | ImportsLocator::new(db) |
128 | .find_imports(&self.get_search_query()) | 130 | .find_imports(&self.get_search_query()) |
129 | .into_iter() | 131 | .into_iter() |
130 | .filter_map(|module_def| match &self.import_candidate { | 132 | .filter_map(|candidate| match &self.import_candidate { |
131 | ImportCandidate::TraitAssocItem(assoc_item_type, _) => { | 133 | ImportCandidate::TraitAssocItem(assoc_item_type, _) => { |
132 | let located_assoc_item = match module_def { | 134 | let located_assoc_item = match candidate { |
133 | ModuleDef::Function(located_function) => located_function | 135 | Either::Left(ModuleDef::Function(located_function)) => located_function |
134 | .as_assoc_item(db) | 136 | .as_assoc_item(db) |
135 | .map(|assoc| assoc.container(db)) | 137 | .map(|assoc| assoc.container(db)) |
136 | .and_then(Self::assoc_to_trait), | 138 | .and_then(Self::assoc_to_trait), |
137 | ModuleDef::Const(located_const) => located_const | 139 | Either::Left(ModuleDef::Const(located_const)) => located_const |
138 | .as_assoc_item(db) | 140 | .as_assoc_item(db) |
139 | .map(|assoc| assoc.container(db)) | 141 | .map(|assoc| assoc.container(db)) |
140 | .and_then(Self::assoc_to_trait), | 142 | .and_then(Self::assoc_to_trait), |
@@ -153,10 +155,11 @@ impl AutoImportAssets { | |||
153 | |_, assoc| Self::assoc_to_trait(assoc.container(db)), | 155 | |_, assoc| Self::assoc_to_trait(assoc.container(db)), |
154 | ) | 156 | ) |
155 | .map(ModuleDef::from) | 157 | .map(ModuleDef::from) |
158 | .map(Either::Left) | ||
156 | } | 159 | } |
157 | ImportCandidate::TraitMethod(function_callee, _) => { | 160 | ImportCandidate::TraitMethod(function_callee, _) => { |
158 | let located_assoc_item = | 161 | let located_assoc_item = |
159 | if let ModuleDef::Function(located_function) = module_def { | 162 | if let Either::Left(ModuleDef::Function(located_function)) = candidate { |
160 | located_function | 163 | located_function |
161 | .as_assoc_item(db) | 164 | .as_assoc_item(db) |
162 | .map(|assoc| assoc.container(db)) | 165 | .map(|assoc| assoc.container(db)) |
@@ -179,10 +182,18 @@ impl AutoImportAssets { | |||
179 | }, | 182 | }, |
180 | ) | 183 | ) |
181 | .map(ModuleDef::from) | 184 | .map(ModuleDef::from) |
185 | .map(Either::Left) | ||
186 | } | ||
187 | _ => Some(candidate), | ||
188 | }) | ||
189 | .filter_map(|candidate| match candidate { | ||
190 | Either::Left(module_def) => { | ||
191 | self.module_with_name_to_import.find_use_path(db, module_def) | ||
192 | } | ||
193 | Either::Right(macro_def) => { | ||
194 | self.module_with_name_to_import.find_use_path(db, macro_def) | ||
182 | } | 195 | } |
183 | _ => Some(module_def), | ||
184 | }) | 196 | }) |
185 | .filter_map(|module_def| self.module_with_name_to_import.find_use_path(db, module_def)) | ||
186 | .filter(|use_path| !use_path.segments.is_empty()) | 197 | .filter(|use_path| !use_path.segments.is_empty()) |
187 | .take(20) | 198 | .take(20) |
188 | .collect::<BTreeSet<_>>() | 199 | .collect::<BTreeSet<_>>() |
@@ -440,6 +451,30 @@ mod tests { | |||
440 | } | 451 | } |
441 | 452 | ||
442 | #[test] | 453 | #[test] |
454 | fn macro_import() { | ||
455 | check_assist( | ||
456 | auto_import, | ||
457 | r" | ||
458 | //- /lib.rs crate:crate_with_macro | ||
459 | #[macro_export] | ||
460 | macro_rules! foo { | ||
461 | () => () | ||
462 | } | ||
463 | |||
464 | //- /main.rs crate:main deps:crate_with_macro | ||
465 | fn main() { | ||
466 | foo<|> | ||
467 | }", | ||
468 | r"use crate_with_macro::foo; | ||
469 | |||
470 | fn main() { | ||
471 | foo<|> | ||
472 | } | ||
473 | ", | ||
474 | ); | ||
475 | } | ||
476 | |||
477 | #[test] | ||
443 | fn auto_import_target() { | 478 | fn auto_import_target() { |
444 | check_assist_target( | 479 | check_assist_target( |
445 | auto_import, | 480 | auto_import, |
diff --git a/crates/ra_assists/src/handlers/fill_match_arms.rs b/crates/ra_assists/src/handlers/fill_match_arms.rs index fbd6a3ec3..add82e5b1 100644 --- a/crates/ra_assists/src/handlers/fill_match_arms.rs +++ b/crates/ra_assists/src/handlers/fill_match_arms.rs | |||
@@ -2,7 +2,8 @@ | |||
2 | 2 | ||
3 | use std::iter; | 3 | use std::iter; |
4 | 4 | ||
5 | use hir::{Adt, HasSource, Semantics}; | 5 | use hir::{Adt, HasSource, ModuleDef, Semantics}; |
6 | use itertools::Itertools; | ||
6 | use ra_ide_db::RootDatabase; | 7 | use ra_ide_db::RootDatabase; |
7 | 8 | ||
8 | use crate::{Assist, AssistCtx, AssistId}; | 9 | use crate::{Assist, AssistCtx, AssistId}; |
@@ -29,8 +30,8 @@ use ast::{MatchArm, Pat}; | |||
29 | // | 30 | // |
30 | // fn handle(action: Action) { | 31 | // fn handle(action: Action) { |
31 | // match action { | 32 | // match action { |
32 | // Action::Move { distance } => (), | 33 | // Action::Move { distance } => {} |
33 | // Action::Stop => (), | 34 | // Action::Stop => {} |
34 | // } | 35 | // } |
35 | // } | 36 | // } |
36 | // ``` | 37 | // ``` |
@@ -39,13 +40,6 @@ pub(crate) fn fill_match_arms(ctx: AssistCtx) -> Option<Assist> { | |||
39 | let match_arm_list = match_expr.match_arm_list()?; | 40 | let match_arm_list = match_expr.match_arm_list()?; |
40 | 41 | ||
41 | let expr = match_expr.expr()?; | 42 | let expr = match_expr.expr()?; |
42 | let enum_def = resolve_enum_def(&ctx.sema, &expr)?; | ||
43 | let module = ctx.sema.scope(expr.syntax()).module()?; | ||
44 | |||
45 | let variants = enum_def.variants(ctx.db); | ||
46 | if variants.is_empty() { | ||
47 | return None; | ||
48 | } | ||
49 | 43 | ||
50 | let mut arms: Vec<MatchArm> = match_arm_list.arms().collect(); | 44 | let mut arms: Vec<MatchArm> = match_arm_list.arms().collect(); |
51 | if arms.len() == 1 { | 45 | if arms.len() == 1 { |
@@ -54,13 +48,49 @@ pub(crate) fn fill_match_arms(ctx: AssistCtx) -> Option<Assist> { | |||
54 | } | 48 | } |
55 | } | 49 | } |
56 | 50 | ||
57 | let db = ctx.db; | 51 | let module = ctx.sema.scope(expr.syntax()).module()?; |
58 | let missing_arms: Vec<MatchArm> = variants | 52 | |
59 | .into_iter() | 53 | let missing_arms: Vec<MatchArm> = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) { |
60 | .filter_map(|variant| build_pat(db, module, variant)) | 54 | let variants = enum_def.variants(ctx.db); |
61 | .filter(|variant_pat| is_variant_missing(&mut arms, variant_pat)) | 55 | |
62 | .map(|pat| make::match_arm(iter::once(pat), make::expr_unit())) | 56 | variants |
63 | .collect(); | 57 | .into_iter() |
58 | .filter_map(|variant| build_pat(ctx.db, module, variant)) | ||
59 | .filter(|variant_pat| is_variant_missing(&mut arms, variant_pat)) | ||
60 | .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block())) | ||
61 | .collect() | ||
62 | } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) { | ||
63 | // Partial fill not currently supported for tuple of enums. | ||
64 | if !arms.is_empty() { | ||
65 | return None; | ||
66 | } | ||
67 | |||
68 | // We do not currently support filling match arms for a tuple | ||
69 | // containing a single enum. | ||
70 | if enum_defs.len() < 2 { | ||
71 | return None; | ||
72 | } | ||
73 | |||
74 | // When calculating the match arms for a tuple of enums, we want | ||
75 | // to create a match arm for each possible combination of enum | ||
76 | // values. The `multi_cartesian_product` method transforms | ||
77 | // Vec<Vec<EnumVariant>> into Vec<(EnumVariant, .., EnumVariant)> | ||
78 | // where each tuple represents a proposed match arm. | ||
79 | enum_defs | ||
80 | .into_iter() | ||
81 | .map(|enum_def| enum_def.variants(ctx.db)) | ||
82 | .multi_cartesian_product() | ||
83 | .map(|variants| { | ||
84 | let patterns = | ||
85 | variants.into_iter().filter_map(|variant| build_pat(ctx.db, module, variant)); | ||
86 | ast::Pat::from(make::tuple_pat(patterns)) | ||
87 | }) | ||
88 | .filter(|variant_pat| is_variant_missing(&mut arms, variant_pat)) | ||
89 | .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block())) | ||
90 | .collect() | ||
91 | } else { | ||
92 | return None; | ||
93 | }; | ||
64 | 94 | ||
65 | if missing_arms.is_empty() { | 95 | if missing_arms.is_empty() { |
66 | return None; | 96 | return None; |
@@ -104,8 +134,27 @@ fn resolve_enum_def(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option< | |||
104 | }) | 134 | }) |
105 | } | 135 | } |
106 | 136 | ||
137 | fn resolve_tuple_of_enum_def( | ||
138 | sema: &Semantics<RootDatabase>, | ||
139 | expr: &ast::Expr, | ||
140 | ) -> Option<Vec<hir::Enum>> { | ||
141 | sema.type_of_expr(&expr)? | ||
142 | .tuple_fields(sema.db) | ||
143 | .iter() | ||
144 | .map(|ty| { | ||
145 | ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() { | ||
146 | Some(Adt::Enum(e)) => Some(e), | ||
147 | // For now we only handle expansion for a tuple of enums. Here | ||
148 | // we map non-enum items to None and rely on `collect` to | ||
149 | // convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>. | ||
150 | _ => None, | ||
151 | }) | ||
152 | }) | ||
153 | .collect() | ||
154 | } | ||
155 | |||
107 | fn build_pat(db: &RootDatabase, module: hir::Module, var: hir::EnumVariant) -> Option<ast::Pat> { | 156 | fn build_pat(db: &RootDatabase, module: hir::Module, var: hir::EnumVariant) -> Option<ast::Pat> { |
108 | let path = crate::ast_transform::path_to_ast(module.find_use_path(db, var.into())?); | 157 | let path = crate::ast_transform::path_to_ast(module.find_use_path(db, ModuleDef::from(var))?); |
109 | 158 | ||
110 | // FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though | 159 | // FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though |
111 | let pat: ast::Pat = match var.source(db).value.kind() { | 160 | let pat: ast::Pat = match var.source(db).value.kind() { |
@@ -143,8 +192,23 @@ mod tests { | |||
143 | fn main() { | 192 | fn main() { |
144 | match A::As<|> { | 193 | match A::As<|> { |
145 | A::As, | 194 | A::As, |
146 | A::Bs{x,y:Some(_)} => (), | 195 | A::Bs{x,y:Some(_)} => {} |
147 | A::Cs(_, Some(_)) => (), | 196 | A::Cs(_, Some(_)) => {} |
197 | } | ||
198 | } | ||
199 | "#, | ||
200 | ); | ||
201 | } | ||
202 | |||
203 | #[test] | ||
204 | fn tuple_of_non_enum() { | ||
205 | // for now this case is not handled, although it potentially could be | ||
206 | // in the future | ||
207 | check_assist_not_applicable( | ||
208 | fill_match_arms, | ||
209 | r#" | ||
210 | fn main() { | ||
211 | match (0, false)<|> { | ||
148 | } | 212 | } |
149 | } | 213 | } |
150 | "#, | 214 | "#, |
@@ -163,8 +227,8 @@ mod tests { | |||
163 | } | 227 | } |
164 | fn main() { | 228 | fn main() { |
165 | match A::As<|> { | 229 | match A::As<|> { |
166 | A::Bs{x,y:Some(_)} => (), | 230 | A::Bs{x,y:Some(_)} => {} |
167 | A::Cs(_, Some(_)) => (), | 231 | A::Cs(_, Some(_)) => {} |
168 | } | 232 | } |
169 | } | 233 | } |
170 | "#, | 234 | "#, |
@@ -176,9 +240,9 @@ mod tests { | |||
176 | } | 240 | } |
177 | fn main() { | 241 | fn main() { |
178 | match <|>A::As { | 242 | match <|>A::As { |
179 | A::Bs{x,y:Some(_)} => (), | 243 | A::Bs{x,y:Some(_)} => {} |
180 | A::Cs(_, Some(_)) => (), | 244 | A::Cs(_, Some(_)) => {} |
181 | A::As => (), | 245 | A::As => {} |
182 | } | 246 | } |
183 | } | 247 | } |
184 | "#, | 248 | "#, |
@@ -197,7 +261,7 @@ mod tests { | |||
197 | } | 261 | } |
198 | fn main() { | 262 | fn main() { |
199 | match A::As<|> { | 263 | match A::As<|> { |
200 | A::Cs(_) | A::Bs => (), | 264 | A::Cs(_) | A::Bs => {} |
201 | } | 265 | } |
202 | } | 266 | } |
203 | "#, | 267 | "#, |
@@ -209,8 +273,8 @@ mod tests { | |||
209 | } | 273 | } |
210 | fn main() { | 274 | fn main() { |
211 | match <|>A::As { | 275 | match <|>A::As { |
212 | A::Cs(_) | A::Bs => (), | 276 | A::Cs(_) | A::Bs => {} |
213 | A::As => (), | 277 | A::As => {} |
214 | } | 278 | } |
215 | } | 279 | } |
216 | "#, | 280 | "#, |
@@ -235,8 +299,8 @@ mod tests { | |||
235 | } | 299 | } |
236 | fn main() { | 300 | fn main() { |
237 | match A::As<|> { | 301 | match A::As<|> { |
238 | A::Bs if 0 < 1 => (), | 302 | A::Bs if 0 < 1 => {} |
239 | A::Ds(_value) => (), | 303 | A::Ds(_value) => { let x = 1; } |
240 | A::Es(B::Xs) => (), | 304 | A::Es(B::Xs) => (), |
241 | } | 305 | } |
242 | } | 306 | } |
@@ -255,11 +319,11 @@ mod tests { | |||
255 | } | 319 | } |
256 | fn main() { | 320 | fn main() { |
257 | match <|>A::As { | 321 | match <|>A::As { |
258 | A::Bs if 0 < 1 => (), | 322 | A::Bs if 0 < 1 => {} |
259 | A::Ds(_value) => (), | 323 | A::Ds(_value) => { let x = 1; } |
260 | A::Es(B::Xs) => (), | 324 | A::Es(B::Xs) => (), |
261 | A::As => (), | 325 | A::As => {} |
262 | A::Cs => (), | 326 | A::Cs => {} |
263 | } | 327 | } |
264 | } | 328 | } |
265 | "#, | 329 | "#, |
@@ -296,11 +360,174 @@ mod tests { | |||
296 | fn main() { | 360 | fn main() { |
297 | let a = A::As; | 361 | let a = A::As; |
298 | match <|>a { | 362 | match <|>a { |
299 | A::As => (), | 363 | A::As => {} |
300 | A::Bs => (), | 364 | A::Bs => {} |
301 | A::Cs(_) => (), | 365 | A::Cs(_) => {} |
302 | A::Ds(_, _) => (), | 366 | A::Ds(_, _) => {} |
303 | A::Es { x, y } => (), | 367 | A::Es { x, y } => {} |
368 | } | ||
369 | } | ||
370 | "#, | ||
371 | ); | ||
372 | } | ||
373 | |||
374 | #[test] | ||
375 | fn fill_match_arms_tuple_of_enum() { | ||
376 | check_assist( | ||
377 | fill_match_arms, | ||
378 | r#" | ||
379 | enum A { | ||
380 | One, | ||
381 | Two, | ||
382 | } | ||
383 | enum B { | ||
384 | One, | ||
385 | Two, | ||
386 | } | ||
387 | |||
388 | fn main() { | ||
389 | let a = A::One; | ||
390 | let b = B::One; | ||
391 | match (a<|>, b) {} | ||
392 | } | ||
393 | "#, | ||
394 | r#" | ||
395 | enum A { | ||
396 | One, | ||
397 | Two, | ||
398 | } | ||
399 | enum B { | ||
400 | One, | ||
401 | Two, | ||
402 | } | ||
403 | |||
404 | fn main() { | ||
405 | let a = A::One; | ||
406 | let b = B::One; | ||
407 | match <|>(a, b) { | ||
408 | (A::One, B::One) => {} | ||
409 | (A::One, B::Two) => {} | ||
410 | (A::Two, B::One) => {} | ||
411 | (A::Two, B::Two) => {} | ||
412 | } | ||
413 | } | ||
414 | "#, | ||
415 | ); | ||
416 | } | ||
417 | |||
418 | #[test] | ||
419 | fn fill_match_arms_tuple_of_enum_ref() { | ||
420 | check_assist( | ||
421 | fill_match_arms, | ||
422 | r#" | ||
423 | enum A { | ||
424 | One, | ||
425 | Two, | ||
426 | } | ||
427 | enum B { | ||
428 | One, | ||
429 | Two, | ||
430 | } | ||
431 | |||
432 | fn main() { | ||
433 | let a = A::One; | ||
434 | let b = B::One; | ||
435 | match (&a<|>, &b) {} | ||
436 | } | ||
437 | "#, | ||
438 | r#" | ||
439 | enum A { | ||
440 | One, | ||
441 | Two, | ||
442 | } | ||
443 | enum B { | ||
444 | One, | ||
445 | Two, | ||
446 | } | ||
447 | |||
448 | fn main() { | ||
449 | let a = A::One; | ||
450 | let b = B::One; | ||
451 | match <|>(&a, &b) { | ||
452 | (A::One, B::One) => {} | ||
453 | (A::One, B::Two) => {} | ||
454 | (A::Two, B::One) => {} | ||
455 | (A::Two, B::Two) => {} | ||
456 | } | ||
457 | } | ||
458 | "#, | ||
459 | ); | ||
460 | } | ||
461 | |||
462 | #[test] | ||
463 | fn fill_match_arms_tuple_of_enum_partial() { | ||
464 | check_assist_not_applicable( | ||
465 | fill_match_arms, | ||
466 | r#" | ||
467 | enum A { | ||
468 | One, | ||
469 | Two, | ||
470 | } | ||
471 | enum B { | ||
472 | One, | ||
473 | Two, | ||
474 | } | ||
475 | |||
476 | fn main() { | ||
477 | let a = A::One; | ||
478 | let b = B::One; | ||
479 | match (a<|>, b) { | ||
480 | (A::Two, B::One) => {} | ||
481 | } | ||
482 | } | ||
483 | "#, | ||
484 | ); | ||
485 | } | ||
486 | |||
487 | #[test] | ||
488 | fn fill_match_arms_tuple_of_enum_not_applicable() { | ||
489 | check_assist_not_applicable( | ||
490 | fill_match_arms, | ||
491 | r#" | ||
492 | enum A { | ||
493 | One, | ||
494 | Two, | ||
495 | } | ||
496 | enum B { | ||
497 | One, | ||
498 | Two, | ||
499 | } | ||
500 | |||
501 | fn main() { | ||
502 | let a = A::One; | ||
503 | let b = B::One; | ||
504 | match (a<|>, b) { | ||
505 | (A::Two, B::One) => {} | ||
506 | (A::One, B::One) => {} | ||
507 | (A::One, B::Two) => {} | ||
508 | (A::Two, B::Two) => {} | ||
509 | } | ||
510 | } | ||
511 | "#, | ||
512 | ); | ||
513 | } | ||
514 | |||
515 | #[test] | ||
516 | fn fill_match_arms_single_element_tuple_of_enum() { | ||
517 | // For now we don't hande the case of a single element tuple, but | ||
518 | // we could handle this in the future if `make::tuple_pat` allowed | ||
519 | // creating a tuple with a single pattern. | ||
520 | check_assist_not_applicable( | ||
521 | fill_match_arms, | ||
522 | r#" | ||
523 | enum A { | ||
524 | One, | ||
525 | Two, | ||
526 | } | ||
527 | |||
528 | fn main() { | ||
529 | let a = A::One; | ||
530 | match (a<|>, ) { | ||
304 | } | 531 | } |
305 | } | 532 | } |
306 | "#, | 533 | "#, |
@@ -328,7 +555,7 @@ mod tests { | |||
328 | 555 | ||
329 | fn foo(a: &A) { | 556 | fn foo(a: &A) { |
330 | match <|>a { | 557 | match <|>a { |
331 | A::As => (), | 558 | A::As => {} |
332 | } | 559 | } |
333 | } | 560 | } |
334 | "#, | 561 | "#, |
@@ -353,7 +580,7 @@ mod tests { | |||
353 | 580 | ||
354 | fn foo(a: &mut A) { | 581 | fn foo(a: &mut A) { |
355 | match <|>a { | 582 | match <|>a { |
356 | A::Es { x, y } => (), | 583 | A::Es { x, y } => {} |
357 | } | 584 | } |
358 | } | 585 | } |
359 | "#, | 586 | "#, |
@@ -384,7 +611,7 @@ mod tests { | |||
384 | 611 | ||
385 | fn main() { | 612 | fn main() { |
386 | match E::X { | 613 | match E::X { |
387 | <|>_ => {}, | 614 | <|>_ => {} |
388 | } | 615 | } |
389 | } | 616 | } |
390 | "#, | 617 | "#, |
@@ -393,8 +620,8 @@ mod tests { | |||
393 | 620 | ||
394 | fn main() { | 621 | fn main() { |
395 | match <|>E::X { | 622 | match <|>E::X { |
396 | E::X => (), | 623 | E::X => {} |
397 | E::Y => (), | 624 | E::Y => {} |
398 | } | 625 | } |
399 | } | 626 | } |
400 | "#, | 627 | "#, |
@@ -421,8 +648,8 @@ mod tests { | |||
421 | 648 | ||
422 | fn main() { | 649 | fn main() { |
423 | match <|>X { | 650 | match <|>X { |
424 | X => (), | 651 | X => {} |
425 | foo::E::Y => (), | 652 | foo::E::Y => {} |
426 | } | 653 | } |
427 | } | 654 | } |
428 | "#, | 655 | "#, |
diff --git a/crates/ra_assists/src/handlers/invert_if.rs b/crates/ra_assists/src/handlers/invert_if.rs index 3a2665d17..4c5716868 100644 --- a/crates/ra_assists/src/handlers/invert_if.rs +++ b/crates/ra_assists/src/handlers/invert_if.rs | |||
@@ -33,6 +33,11 @@ pub(crate) fn invert_if(ctx: AssistCtx) -> Option<Assist> { | |||
33 | return None; | 33 | return None; |
34 | } | 34 | } |
35 | 35 | ||
36 | // This assist should not apply for if-let. | ||
37 | if expr.condition()?.pat().is_some() { | ||
38 | return None; | ||
39 | } | ||
40 | |||
36 | let cond = expr.condition()?.expr()?; | 41 | let cond = expr.condition()?.expr()?; |
37 | let then_node = expr.then_branch()?.syntax().clone(); | 42 | let then_node = expr.then_branch()?.syntax().clone(); |
38 | 43 | ||
@@ -90,4 +95,12 @@ mod tests { | |||
90 | fn invert_if_doesnt_apply_with_cursor_not_on_if() { | 95 | fn invert_if_doesnt_apply_with_cursor_not_on_if() { |
91 | check_assist_not_applicable(invert_if, "fn f() { if !<|>cond { 3 * 2 } else { 1 } }") | 96 | check_assist_not_applicable(invert_if, "fn f() { if !<|>cond { 3 * 2 } else { 1 } }") |
92 | } | 97 | } |
98 | |||
99 | #[test] | ||
100 | fn invert_if_doesnt_apply_with_if_let() { | ||
101 | check_assist_not_applicable( | ||
102 | invert_if, | ||
103 | "fn f() { i<|>f let Some(_) = Some(1) { 1 } else { 0 } }", | ||
104 | ) | ||
105 | } | ||
93 | } | 106 | } |
diff --git a/crates/ra_assists/src/handlers/merge_imports.rs b/crates/ra_assists/src/handlers/merge_imports.rs index 89bc975bd..9c57d1e30 100644 --- a/crates/ra_assists/src/handlers/merge_imports.rs +++ b/crates/ra_assists/src/handlers/merge_imports.rs | |||
@@ -1,9 +1,9 @@ | |||
1 | use std::iter::successors; | 1 | use std::iter::successors; |
2 | 2 | ||
3 | use ra_syntax::{ | 3 | use ra_syntax::{ |
4 | algo::neighbor, | 4 | algo::{neighbor, SyntaxRewriter}, |
5 | ast::{self, edit::AstNodeEdit, make}, | 5 | ast::{self, edit::AstNodeEdit, make}, |
6 | AstNode, AstToken, Direction, InsertPosition, SyntaxElement, TextRange, T, | 6 | AstNode, Direction, InsertPosition, SyntaxElement, T, |
7 | }; | 7 | }; |
8 | 8 | ||
9 | use crate::{Assist, AssistCtx, AssistId}; | 9 | use crate::{Assist, AssistCtx, AssistId}; |
@@ -22,9 +22,10 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
22 | // ``` | 22 | // ``` |
23 | pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> { | 23 | pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> { |
24 | let tree: ast::UseTree = ctx.find_node_at_offset()?; | 24 | let tree: ast::UseTree = ctx.find_node_at_offset()?; |
25 | let (new_tree, to_delete) = if let Some(use_item) = | 25 | let mut rewriter = SyntaxRewriter::default(); |
26 | tree.syntax().parent().and_then(ast::UseItem::cast) | 26 | let mut offset = ctx.frange.range.start(); |
27 | { | 27 | |
28 | if let Some(use_item) = tree.syntax().parent().and_then(ast::UseItem::cast) { | ||
28 | let (merged, to_delete) = next_prev() | 29 | let (merged, to_delete) = next_prev() |
29 | .filter_map(|dir| neighbor(&use_item, dir)) | 30 | .filter_map(|dir| neighbor(&use_item, dir)) |
30 | .filter_map(|it| Some((it.clone(), it.use_tree()?))) | 31 | .filter_map(|it| Some((it.clone(), it.use_tree()?))) |
@@ -32,42 +33,28 @@ pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> { | |||
32 | Some((try_merge_trees(&tree, &use_tree)?, use_item.clone())) | 33 | Some((try_merge_trees(&tree, &use_tree)?, use_item.clone())) |
33 | })?; | 34 | })?; |
34 | 35 | ||
35 | let mut range = to_delete.syntax().text_range(); | 36 | rewriter.replace_ast(&tree, &merged); |
36 | let next_ws = to_delete | 37 | rewriter += to_delete.remove(); |
37 | .syntax() | 38 | |
38 | .next_sibling_or_token() | 39 | if to_delete.syntax().text_range().end() < offset { |
39 | .and_then(|it| it.into_token()) | 40 | offset -= to_delete.syntax().text_range().len(); |
40 | .and_then(ast::Whitespace::cast); | ||
41 | if let Some(ws) = next_ws { | ||
42 | range = range.extend_to(&ws.syntax().text_range()) | ||
43 | } | 41 | } |
44 | (merged, range) | ||
45 | } else { | 42 | } else { |
46 | let (merged, to_delete) = next_prev() | 43 | let (merged, to_delete) = next_prev() |
47 | .filter_map(|dir| neighbor(&tree, dir)) | 44 | .filter_map(|dir| neighbor(&tree, dir)) |
48 | .find_map(|use_tree| Some((try_merge_trees(&tree, &use_tree)?, use_tree.clone())))?; | 45 | .find_map(|use_tree| Some((try_merge_trees(&tree, &use_tree)?, use_tree.clone())))?; |
49 | 46 | ||
50 | let mut range = to_delete.syntax().text_range(); | 47 | rewriter.replace_ast(&tree, &merged); |
51 | if let Some((dir, nb)) = next_prev().find_map(|dir| Some((dir, neighbor(&to_delete, dir)?))) | 48 | rewriter += to_delete.remove(); |
52 | { | 49 | |
53 | let nb_range = nb.syntax().text_range(); | 50 | if to_delete.syntax().text_range().end() < offset { |
54 | if dir == Direction::Prev { | 51 | offset -= to_delete.syntax().text_range().len(); |
55 | range = TextRange::from_to(nb_range.end(), range.end()); | ||
56 | } else { | ||
57 | range = TextRange::from_to(range.start(), nb_range.start()); | ||
58 | } | ||
59 | } | 52 | } |
60 | (merged, range) | ||
61 | }; | 53 | }; |
62 | 54 | ||
63 | let mut offset = ctx.frange.range.start(); | ||
64 | ctx.add_assist(AssistId("merge_imports"), "Merge imports", |edit| { | 55 | ctx.add_assist(AssistId("merge_imports"), "Merge imports", |edit| { |
65 | edit.replace_ast(tree, new_tree); | 56 | edit.rewrite(rewriter); |
66 | edit.delete(to_delete); | 57 | // FIXME: we only need because our diff is imprecise |
67 | |||
68 | if to_delete.end() <= offset { | ||
69 | offset -= to_delete.len(); | ||
70 | } | ||
71 | edit.set_cursor(offset); | 58 | edit.set_cursor(offset); |
72 | }) | 59 | }) |
73 | } | 60 | } |
@@ -156,7 +143,7 @@ use std::fmt::Debug; | |||
156 | use std::fmt<|>::Display; | 143 | use std::fmt<|>::Display; |
157 | ", | 144 | ", |
158 | r" | 145 | r" |
159 | use std::fmt<|>::{Display, Debug}; | 146 | use std::fmt:<|>:{Display, Debug}; |
160 | ", | 147 | ", |
161 | ); | 148 | ); |
162 | } | 149 | } |
@@ -178,7 +165,57 @@ use std::{fmt<|>::{Debug, Display}}; | |||
178 | use std::{fmt::Debug, fmt<|>::Display}; | 165 | use std::{fmt::Debug, fmt<|>::Display}; |
179 | ", | 166 | ", |
180 | r" | 167 | r" |
181 | use std::{fmt<|>::{Display, Debug}}; | 168 | use std::{fmt::<|>{Display, Debug}}; |
169 | ", | ||
170 | ); | ||
171 | } | ||
172 | |||
173 | #[test] | ||
174 | fn removes_just_enough_whitespace() { | ||
175 | check_assist( | ||
176 | merge_imports, | ||
177 | r" | ||
178 | use foo<|>::bar; | ||
179 | use foo::baz; | ||
180 | |||
181 | /// Doc comment | ||
182 | ", | ||
183 | r" | ||
184 | use foo<|>::{bar, baz}; | ||
185 | |||
186 | /// Doc comment | ||
187 | ", | ||
188 | ); | ||
189 | } | ||
190 | |||
191 | #[test] | ||
192 | fn works_with_trailing_comma() { | ||
193 | check_assist( | ||
194 | merge_imports, | ||
195 | r" | ||
196 | use { | ||
197 | foo<|>::bar, | ||
198 | foo::baz, | ||
199 | }; | ||
200 | ", | ||
201 | r" | ||
202 | use { | ||
203 | foo<|>::{bar, baz}, | ||
204 | }; | ||
205 | ", | ||
206 | ); | ||
207 | check_assist( | ||
208 | merge_imports, | ||
209 | r" | ||
210 | use { | ||
211 | foo::baz, | ||
212 | foo<|>::bar, | ||
213 | }; | ||
214 | ", | ||
215 | r" | ||
216 | use { | ||
217 | foo::{bar<|>, baz}, | ||
218 | }; | ||
182 | ", | 219 | ", |
183 | ); | 220 | ); |
184 | } | 221 | } |
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs index b8704ea7d..bcc9b3f10 100644 --- a/crates/ra_assists/src/lib.rs +++ b/crates/ra_assists/src/lib.rs | |||
@@ -165,7 +165,6 @@ mod helpers { | |||
165 | 165 | ||
166 | use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt}; | 166 | use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt}; |
167 | use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase}; | 167 | use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase}; |
168 | use ra_syntax::TextRange; | ||
169 | use test_utils::{add_cursor, assert_eq_text, extract_range_or_offset, RangeOrOffset}; | 168 | use test_utils::{add_cursor, assert_eq_text, extract_range_or_offset, RangeOrOffset}; |
170 | 169 | ||
171 | use crate::{AssistCtx, AssistHandler}; | 170 | use crate::{AssistCtx, AssistHandler}; |
@@ -175,8 +174,7 @@ mod helpers { | |||
175 | let (mut db, file_id) = RootDatabase::with_single_file(text); | 174 | let (mut db, file_id) = RootDatabase::with_single_file(text); |
176 | // FIXME: ideally, this should be done by the above `RootDatabase::with_single_file`, | 175 | // FIXME: ideally, this should be done by the above `RootDatabase::with_single_file`, |
177 | // but it looks like this might need specialization? :( | 176 | // but it looks like this might need specialization? :( |
178 | let local_roots = vec![db.file_source_root(file_id)]; | 177 | db.set_local_roots(Arc::new(vec![db.file_source_root(file_id)])); |
179 | db.set_local_roots(Arc::new(local_roots)); | ||
180 | (db, file_id) | 178 | (db, file_id) |
181 | } | 179 | } |
182 | 180 | ||
@@ -206,11 +204,24 @@ mod helpers { | |||
206 | } | 204 | } |
207 | 205 | ||
208 | fn check(assist: AssistHandler, before: &str, expected: ExpectedResult) { | 206 | fn check(assist: AssistHandler, before: &str, expected: ExpectedResult) { |
209 | let (range_or_offset, before) = extract_range_or_offset(before); | 207 | let (text_without_caret, file_with_caret_id, range_or_offset, db) = |
210 | let range: TextRange = range_or_offset.into(); | 208 | if before.contains("//-") { |
209 | let (mut db, position) = RootDatabase::with_position(before); | ||
210 | db.set_local_roots(Arc::new(vec![db.file_source_root(position.file_id)])); | ||
211 | ( | ||
212 | db.file_text(position.file_id).as_ref().to_owned(), | ||
213 | position.file_id, | ||
214 | RangeOrOffset::Offset(position.offset), | ||
215 | db, | ||
216 | ) | ||
217 | } else { | ||
218 | let (range_or_offset, text_without_caret) = extract_range_or_offset(before); | ||
219 | let (db, file_id) = with_single_file(&text_without_caret); | ||
220 | (text_without_caret, file_id, range_or_offset, db) | ||
221 | }; | ||
222 | |||
223 | let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() }; | ||
211 | 224 | ||
212 | let (db, file_id) = with_single_file(&before); | ||
213 | let frange = FileRange { file_id, range }; | ||
214 | let sema = Semantics::new(&db); | 225 | let sema = Semantics::new(&db); |
215 | let assist_ctx = AssistCtx::new(&sema, frange, true); | 226 | let assist_ctx = AssistCtx::new(&sema, frange, true); |
216 | 227 | ||
@@ -218,7 +229,7 @@ mod helpers { | |||
218 | (Some(assist), ExpectedResult::After(after)) => { | 229 | (Some(assist), ExpectedResult::After(after)) => { |
219 | let action = assist.0[0].action.clone().unwrap(); | 230 | let action = assist.0[0].action.clone().unwrap(); |
220 | 231 | ||
221 | let mut actual = action.edit.apply(&before); | 232 | let mut actual = action.edit.apply(&text_without_caret); |
222 | match action.cursor_position { | 233 | match action.cursor_position { |
223 | None => { | 234 | None => { |
224 | if let RangeOrOffset::Offset(before_cursor_pos) = range_or_offset { | 235 | if let RangeOrOffset::Offset(before_cursor_pos) = range_or_offset { |
@@ -237,7 +248,7 @@ mod helpers { | |||
237 | (Some(assist), ExpectedResult::Target(target)) => { | 248 | (Some(assist), ExpectedResult::Target(target)) => { |
238 | let action = assist.0[0].action.clone().unwrap(); | 249 | let action = assist.0[0].action.clone().unwrap(); |
239 | let range = action.target.expect("expected target on action"); | 250 | let range = action.target.expect("expected target on action"); |
240 | assert_eq_text!(&before[range], target); | 251 | assert_eq_text!(&text_without_caret[range], target); |
241 | } | 252 | } |
242 | (Some(_), ExpectedResult::NotApplicable) => panic!("assist should not be applicable!"), | 253 | (Some(_), ExpectedResult::NotApplicable) => panic!("assist should not be applicable!"), |
243 | (None, ExpectedResult::After(_)) | (None, ExpectedResult::Target(_)) => { | 254 | (None, ExpectedResult::After(_)) | (None, ExpectedResult::Target(_)) => { |
diff --git a/crates/ra_cargo_watch/src/conv.rs b/crates/ra_cargo_watch/src/conv.rs index c6f8ca329..817543deb 100644 --- a/crates/ra_cargo_watch/src/conv.rs +++ b/crates/ra_cargo_watch/src/conv.rs | |||
@@ -1,7 +1,8 @@ | |||
1 | //! This module provides the functionality needed to convert diagnostics from | 1 | //! This module provides the functionality needed to convert diagnostics from |
2 | //! `cargo check` json format to the LSP diagnostic format. | 2 | //! `cargo check` json format to the LSP diagnostic format. |
3 | use cargo_metadata::diagnostic::{ | 3 | use cargo_metadata::diagnostic::{ |
4 | Diagnostic as RustDiagnostic, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion, | 4 | Applicability, Diagnostic as RustDiagnostic, DiagnosticLevel, DiagnosticSpan, |
5 | DiagnosticSpanMacroExpansion, | ||
5 | }; | 6 | }; |
6 | use lsp_types::{ | 7 | use lsp_types::{ |
7 | CodeAction, Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, | 8 | CodeAction, Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, |
@@ -136,10 +137,13 @@ fn map_rust_child_diagnostic( | |||
136 | 137 | ||
137 | let mut edit_map: HashMap<Url, Vec<TextEdit>> = HashMap::new(); | 138 | let mut edit_map: HashMap<Url, Vec<TextEdit>> = HashMap::new(); |
138 | for &span in &spans { | 139 | for &span in &spans { |
139 | if let Some(suggested_replacement) = &span.suggested_replacement { | 140 | match (&span.suggestion_applicability, &span.suggested_replacement) { |
140 | let location = map_span_to_location(span, workspace_root); | 141 | (Some(Applicability::MachineApplicable), Some(suggested_replacement)) => { |
141 | let edit = TextEdit::new(location.range, suggested_replacement.clone()); | 142 | let location = map_span_to_location(span, workspace_root); |
142 | edit_map.entry(location.uri).or_default().push(edit); | 143 | let edit = TextEdit::new(location.range, suggested_replacement.clone()); |
144 | edit_map.entry(location.uri).or_default().push(edit); | ||
145 | } | ||
146 | _ => {} | ||
143 | } | 147 | } |
144 | } | 148 | } |
145 | 149 | ||
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap index 9e8f4eff4..a59fa84fa 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap | |||
@@ -58,44 +58,26 @@ expression: diag | |||
58 | }, | 58 | }, |
59 | message: "lint level defined here", | 59 | message: "lint level defined here", |
60 | }, | 60 | }, |
61 | DiagnosticRelatedInformation { | ||
62 | location: Location { | ||
63 | uri: "file:///test/compiler/mir/tagset.rs", | ||
64 | range: Range { | ||
65 | start: Position { | ||
66 | line: 41, | ||
67 | character: 23, | ||
68 | }, | ||
69 | end: Position { | ||
70 | line: 41, | ||
71 | character: 28, | ||
72 | }, | ||
73 | }, | ||
74 | }, | ||
75 | message: "consider passing by value instead", | ||
76 | }, | ||
61 | ], | 77 | ], |
62 | ), | 78 | ), |
63 | tags: None, | 79 | tags: None, |
64 | }, | 80 | }, |
65 | fixes: [ | 81 | fixes: [], |
66 | CodeAction { | ||
67 | title: "consider passing by value instead", | ||
68 | kind: Some( | ||
69 | "quickfix", | ||
70 | ), | ||
71 | diagnostics: None, | ||
72 | edit: Some( | ||
73 | WorkspaceEdit { | ||
74 | changes: Some( | ||
75 | { | ||
76 | "file:///test/compiler/mir/tagset.rs": [ | ||
77 | TextEdit { | ||
78 | range: Range { | ||
79 | start: Position { | ||
80 | line: 41, | ||
81 | character: 23, | ||
82 | }, | ||
83 | end: Position { | ||
84 | line: 41, | ||
85 | character: 28, | ||
86 | }, | ||
87 | }, | ||
88 | new_text: "self", | ||
89 | }, | ||
90 | ], | ||
91 | }, | ||
92 | ), | ||
93 | document_changes: None, | ||
94 | }, | ||
95 | ), | ||
96 | command: None, | ||
97 | is_preferred: None, | ||
98 | }, | ||
99 | ], | ||
100 | }, | 82 | }, |
101 | ] | 83 | ] |
diff --git a/crates/ra_cargo_watch/src/lib.rs b/crates/ra_cargo_watch/src/lib.rs index bffe5eb00..7c525c430 100644 --- a/crates/ra_cargo_watch/src/lib.rs +++ b/crates/ra_cargo_watch/src/lib.rs | |||
@@ -8,9 +8,10 @@ use lsp_types::{ | |||
8 | WorkDoneProgressEnd, WorkDoneProgressReport, | 8 | WorkDoneProgressEnd, WorkDoneProgressReport, |
9 | }; | 9 | }; |
10 | use std::{ | 10 | use std::{ |
11 | error, fmt, | ||
11 | io::{BufRead, BufReader}, | 12 | io::{BufRead, BufReader}, |
12 | path::{Path, PathBuf}, | 13 | path::{Path, PathBuf}, |
13 | process::{Child, Command, Stdio}, | 14 | process::{Command, Stdio}, |
14 | thread::JoinHandle, | 15 | thread::JoinHandle, |
15 | time::Instant, | 16 | time::Instant, |
16 | }; | 17 | }; |
@@ -70,10 +71,10 @@ impl std::ops::Drop for CheckWatcher { | |||
70 | fn drop(&mut self) { | 71 | fn drop(&mut self) { |
71 | if let Some(handle) = self.handle.take() { | 72 | if let Some(handle) = self.handle.take() { |
72 | // Take the sender out of the option | 73 | // Take the sender out of the option |
73 | let recv = self.cmd_send.take(); | 74 | let cmd_send = self.cmd_send.take(); |
74 | 75 | ||
75 | // Dropping the sender finishes the thread loop | 76 | // Dropping the sender finishes the thread loop |
76 | drop(recv); | 77 | drop(cmd_send); |
77 | 78 | ||
78 | // Join the thread, it should finish shortly. We don't really care | 79 | // Join the thread, it should finish shortly. We don't really care |
79 | // whether it panicked, so it is safe to ignore the result | 80 | // whether it panicked, so it is safe to ignore the result |
@@ -246,11 +247,21 @@ enum CheckEvent { | |||
246 | End, | 247 | End, |
247 | } | 248 | } |
248 | 249 | ||
250 | #[derive(Debug)] | ||
251 | pub struct CargoError(String); | ||
252 | |||
253 | impl fmt::Display for CargoError { | ||
254 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
255 | write!(f, "Cargo failed: {}", self.0) | ||
256 | } | ||
257 | } | ||
258 | impl error::Error for CargoError {} | ||
259 | |||
249 | pub fn run_cargo( | 260 | pub fn run_cargo( |
250 | args: &[String], | 261 | args: &[String], |
251 | current_dir: Option<&Path>, | 262 | current_dir: Option<&Path>, |
252 | on_message: &mut dyn FnMut(cargo_metadata::Message) -> bool, | 263 | on_message: &mut dyn FnMut(cargo_metadata::Message) -> bool, |
253 | ) -> Child { | 264 | ) -> Result<(), CargoError> { |
254 | let mut command = Command::new("cargo"); | 265 | let mut command = Command::new("cargo"); |
255 | if let Some(current_dir) = current_dir { | 266 | if let Some(current_dir) = current_dir { |
256 | command.current_dir(current_dir); | 267 | command.current_dir(current_dir); |
@@ -273,6 +284,8 @@ pub fn run_cargo( | |||
273 | // simply skip a line if it doesn't parse, which just ignores any | 284 | // simply skip a line if it doesn't parse, which just ignores any |
274 | // erroneus output. | 285 | // erroneus output. |
275 | let stdout = BufReader::new(child.stdout.take().unwrap()); | 286 | let stdout = BufReader::new(child.stdout.take().unwrap()); |
287 | let mut read_at_least_one_message = false; | ||
288 | |||
276 | for line in stdout.lines() { | 289 | for line in stdout.lines() { |
277 | let line = match line { | 290 | let line = match line { |
278 | Ok(line) => line, | 291 | Ok(line) => line, |
@@ -291,12 +304,31 @@ pub fn run_cargo( | |||
291 | } | 304 | } |
292 | }; | 305 | }; |
293 | 306 | ||
307 | read_at_least_one_message = true; | ||
308 | |||
294 | if !on_message(message) { | 309 | if !on_message(message) { |
295 | break; | 310 | break; |
296 | } | 311 | } |
297 | } | 312 | } |
298 | 313 | ||
299 | child | 314 | // It is okay to ignore the result, as it only errors if the process is already dead |
315 | let _ = child.kill(); | ||
316 | |||
317 | let err_msg = match child.wait() { | ||
318 | Ok(exit_code) if !exit_code.success() && !read_at_least_one_message => { | ||
319 | // FIXME: Read the stderr to display the reason, see `read2()` reference in PR comment: | ||
320 | // https://github.com/rust-analyzer/rust-analyzer/pull/3632#discussion_r395605298 | ||
321 | format!( | ||
322 | "the command produced no valid metadata (exit code: {:?}): cargo {}", | ||
323 | exit_code, | ||
324 | args.join(" ") | ||
325 | ) | ||
326 | } | ||
327 | Err(err) => format!("io error: {:?}", err), | ||
328 | Ok(_) => return Ok(()), | ||
329 | }; | ||
330 | |||
331 | Err(CargoError(err_msg)) | ||
300 | } | 332 | } |
301 | 333 | ||
302 | impl WatchThread { | 334 | impl WatchThread { |
@@ -325,7 +357,7 @@ impl WatchThread { | |||
325 | // which will break out of the loop, and continue the shutdown | 357 | // which will break out of the loop, and continue the shutdown |
326 | let _ = message_send.send(CheckEvent::Begin); | 358 | let _ = message_send.send(CheckEvent::Begin); |
327 | 359 | ||
328 | let mut child = run_cargo(&args, Some(&workspace_root), &mut |message| { | 360 | let res = run_cargo(&args, Some(&workspace_root), &mut |message| { |
329 | // Skip certain kinds of messages to only spend time on what's useful | 361 | // Skip certain kinds of messages to only spend time on what's useful |
330 | match &message { | 362 | match &message { |
331 | Message::CompilerArtifact(artifact) if artifact.fresh => return true, | 363 | Message::CompilerArtifact(artifact) if artifact.fresh => return true, |
@@ -334,26 +366,19 @@ impl WatchThread { | |||
334 | _ => {} | 366 | _ => {} |
335 | } | 367 | } |
336 | 368 | ||
337 | match message_send.send(CheckEvent::Msg(message)) { | 369 | // if the send channel was closed, we want to shutdown |
338 | Ok(()) => {} | 370 | message_send.send(CheckEvent::Msg(message)).is_ok() |
339 | Err(_err) => { | ||
340 | // The send channel was closed, so we want to shutdown | ||
341 | return false; | ||
342 | } | ||
343 | }; | ||
344 | |||
345 | true | ||
346 | }); | 371 | }); |
347 | 372 | ||
373 | if let Err(err) = res { | ||
374 | // FIXME: make the `message_send` to be `Sender<Result<CheckEvent, CargoError>>` | ||
375 | // to display user-caused misconfiguration errors instead of just logging them here | ||
376 | log::error!("Cargo watcher failed {:?}", err); | ||
377 | } | ||
378 | |||
348 | // We can ignore any error here, as we are already in the progress | 379 | // We can ignore any error here, as we are already in the progress |
349 | // of shutting down. | 380 | // of shutting down. |
350 | let _ = message_send.send(CheckEvent::End); | 381 | let _ = message_send.send(CheckEvent::End); |
351 | |||
352 | // It is okay to ignore the result, as it only errors if the process is already dead | ||
353 | let _ = child.kill(); | ||
354 | |||
355 | // Again, we don't care about the exit status so just ignore the result | ||
356 | let _ = child.wait(); | ||
357 | })) | 382 | })) |
358 | } else { | 383 | } else { |
359 | None | 384 | None |
diff --git a/crates/ra_fmt/Cargo.toml b/crates/ra_fmt/Cargo.toml index ea9befeaf..e9d057afc 100644 --- a/crates/ra_fmt/Cargo.toml +++ b/crates/ra_fmt/Cargo.toml | |||
@@ -9,6 +9,6 @@ publish = false | |||
9 | doctest = false | 9 | doctest = false |
10 | 10 | ||
11 | [dependencies] | 11 | [dependencies] |
12 | itertools = "0.8.2" | 12 | itertools = "0.9.0" |
13 | 13 | ||
14 | ra_syntax = { path = "../ra_syntax" } | 14 | ra_syntax = { path = "../ra_syntax" } |
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 42193b492..ba7b39a19 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml | |||
@@ -13,7 +13,7 @@ rustc-hash = "1.1.0" | |||
13 | either = "1.5.3" | 13 | either = "1.5.3" |
14 | arrayvec = "0.5.1" | 14 | arrayvec = "0.5.1" |
15 | 15 | ||
16 | itertools = "0.8.2" | 16 | itertools = "0.9.0" |
17 | 17 | ||
18 | ra_syntax = { path = "../ra_syntax" } | 18 | ra_syntax = { path = "../ra_syntax" } |
19 | ra_db = { path = "../ra_db" } | 19 | ra_db = { path = "../ra_db" } |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 45e31095c..c5cfd875f 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -10,7 +10,7 @@ use hir_def::{ | |||
10 | docs::Documentation, | 10 | docs::Documentation, |
11 | expr::{BindingAnnotation, Pat, PatId}, | 11 | expr::{BindingAnnotation, Pat, PatId}, |
12 | per_ns::PerNs, | 12 | per_ns::PerNs, |
13 | resolver::HasResolver, | 13 | resolver::{HasResolver, Resolver}, |
14 | type_ref::{Mutability, TypeRef}, | 14 | type_ref::{Mutability, TypeRef}, |
15 | AdtId, AssocContainerId, ConstId, DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule, | 15 | AdtId, AssocContainerId, ConstId, DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule, |
16 | ImplId, LocalEnumVariantId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, | 16 | ImplId, LocalEnumVariantId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, |
@@ -33,7 +33,11 @@ use ra_syntax::{ | |||
33 | }; | 33 | }; |
34 | use rustc_hash::FxHashSet; | 34 | use rustc_hash::FxHashSet; |
35 | 35 | ||
36 | use crate::{db::HirDatabase, has_source::HasSource, CallableDef, HirDisplay, InFile, Name}; | 36 | use crate::{ |
37 | db::{DefDatabase, HirDatabase}, | ||
38 | has_source::HasSource, | ||
39 | CallableDef, HirDisplay, InFile, Name, | ||
40 | }; | ||
37 | 41 | ||
38 | /// hir::Crate describes a single crate. It's the main interface with which | 42 | /// hir::Crate describes a single crate. It's the main interface with which |
39 | /// a crate's dependencies interact. Mostly, it should be just a proxy for the | 43 | /// a crate's dependencies interact. Mostly, it should be just a proxy for the |
@@ -274,20 +278,10 @@ impl Module { | |||
274 | /// this module, if possible. | 278 | /// this module, if possible. |
275 | pub fn find_use_path( | 279 | pub fn find_use_path( |
276 | self, | 280 | self, |
277 | db: &dyn HirDatabase, | 281 | db: &dyn DefDatabase, |
278 | item: ModuleDef, | 282 | item: impl Into<ItemInNs>, |
279 | ) -> Option<hir_def::path::ModPath> { | 283 | ) -> Option<hir_def::path::ModPath> { |
280 | // FIXME expose namespace choice | 284 | hir_def::find_path::find_path(db, item.into(), self.into()) |
281 | hir_def::find_path::find_path(db.upcast(), determine_item_namespace(item), self.into()) | ||
282 | } | ||
283 | } | ||
284 | |||
285 | fn determine_item_namespace(module_def: ModuleDef) -> ItemInNs { | ||
286 | match module_def { | ||
287 | ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => { | ||
288 | ItemInNs::Values(module_def.into()) | ||
289 | } | ||
290 | _ => ItemInNs::Types(module_def.into()), | ||
291 | } | 285 | } |
292 | } | 286 | } |
293 | 287 | ||
@@ -912,10 +906,8 @@ impl Local { | |||
912 | let def = DefWithBodyId::from(self.parent); | 906 | let def = DefWithBodyId::from(self.parent); |
913 | let infer = db.infer(def); | 907 | let infer = db.infer(def); |
914 | let ty = infer[self.pat_id].clone(); | 908 | let ty = infer[self.pat_id].clone(); |
915 | let resolver = def.resolver(db.upcast()); | ||
916 | let krate = def.module(db.upcast()).krate; | 909 | let krate = def.module(db.upcast()).krate; |
917 | let environment = TraitEnvironment::lower(db, &resolver); | 910 | Type::new(db, krate, def, ty) |
918 | Type { krate, ty: InEnvironment { value: ty, environment } } | ||
919 | } | 911 | } |
920 | 912 | ||
921 | pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::BindPat, ast::SelfParam>> { | 913 | pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::BindPat, ast::SelfParam>> { |
@@ -1020,11 +1012,21 @@ impl ImplDef { | |||
1020 | 1012 | ||
1021 | #[derive(Clone, PartialEq, Eq, Debug)] | 1013 | #[derive(Clone, PartialEq, Eq, Debug)] |
1022 | pub struct Type { | 1014 | pub struct Type { |
1023 | pub(crate) krate: CrateId, | 1015 | krate: CrateId, |
1024 | pub(crate) ty: InEnvironment<Ty>, | 1016 | ty: InEnvironment<Ty>, |
1025 | } | 1017 | } |
1026 | 1018 | ||
1027 | impl Type { | 1019 | impl Type { |
1020 | pub(crate) fn new_with_resolver( | ||
1021 | db: &dyn HirDatabase, | ||
1022 | resolver: &Resolver, | ||
1023 | ty: Ty, | ||
1024 | ) -> Option<Type> { | ||
1025 | let krate = resolver.krate()?; | ||
1026 | let environment = TraitEnvironment::lower(db, &resolver); | ||
1027 | Some(Type { krate, ty: InEnvironment { value: ty, environment } }) | ||
1028 | } | ||
1029 | |||
1028 | fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type { | 1030 | fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type { |
1029 | let resolver = lexical_env.resolver(db.upcast()); | 1031 | let resolver = lexical_env.resolver(db.upcast()); |
1030 | let environment = TraitEnvironment::lower(db, &resolver); | 1032 | let environment = TraitEnvironment::lower(db, &resolver); |
@@ -1042,30 +1044,18 @@ impl Type { | |||
1042 | } | 1044 | } |
1043 | 1045 | ||
1044 | pub fn is_bool(&self) -> bool { | 1046 | pub fn is_bool(&self) -> bool { |
1045 | match &self.ty.value { | 1047 | matches!(self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. })) |
1046 | Ty::Apply(a_ty) => match a_ty.ctor { | ||
1047 | TypeCtor::Bool => true, | ||
1048 | _ => false, | ||
1049 | }, | ||
1050 | _ => false, | ||
1051 | } | ||
1052 | } | 1048 | } |
1053 | 1049 | ||
1054 | pub fn is_mutable_reference(&self) -> bool { | 1050 | pub fn is_mutable_reference(&self) -> bool { |
1055 | match &self.ty.value { | 1051 | matches!( |
1056 | Ty::Apply(a_ty) => match a_ty.ctor { | 1052 | self.ty.value, |
1057 | TypeCtor::Ref(Mutability::Mut) => true, | 1053 | Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(Mutability::Mut), .. }) |
1058 | _ => false, | 1054 | ) |
1059 | }, | ||
1060 | _ => false, | ||
1061 | } | ||
1062 | } | 1055 | } |
1063 | 1056 | ||
1064 | pub fn is_unknown(&self) -> bool { | 1057 | pub fn is_unknown(&self) -> bool { |
1065 | match &self.ty.value { | 1058 | matches!(self.ty.value, Ty::Unknown) |
1066 | Ty::Unknown => true, | ||
1067 | _ => false, | ||
1068 | } | ||
1069 | } | 1059 | } |
1070 | 1060 | ||
1071 | /// Checks that particular type `ty` implements `std::future::Future`. | 1061 | /// Checks that particular type `ty` implements `std::future::Future`. |
diff --git a/crates/ra_hir/src/from_id.rs b/crates/ra_hir/src/from_id.rs index c179b13c6..62fb52e72 100644 --- a/crates/ra_hir/src/from_id.rs +++ b/crates/ra_hir/src/from_id.rs | |||
@@ -9,8 +9,8 @@ use hir_def::{ | |||
9 | }; | 9 | }; |
10 | 10 | ||
11 | use crate::{ | 11 | use crate::{ |
12 | Adt, AssocItem, AttrDef, DefWithBody, EnumVariant, GenericDef, Local, ModuleDef, StructField, | 12 | code_model::ItemInNs, Adt, AssocItem, AttrDef, DefWithBody, EnumVariant, GenericDef, Local, |
13 | VariantDef, | 13 | MacroDef, ModuleDef, StructField, VariantDef, |
14 | }; | 14 | }; |
15 | 15 | ||
16 | macro_rules! from_id { | 16 | macro_rules! from_id { |
@@ -228,3 +228,20 @@ impl From<(DefWithBodyId, PatId)> for Local { | |||
228 | Local { parent, pat_id } | 228 | Local { parent, pat_id } |
229 | } | 229 | } |
230 | } | 230 | } |
231 | |||
232 | impl From<MacroDef> for ItemInNs { | ||
233 | fn from(macro_def: MacroDef) -> Self { | ||
234 | ItemInNs::Macros(macro_def.into()) | ||
235 | } | ||
236 | } | ||
237 | |||
238 | impl From<ModuleDef> for ItemInNs { | ||
239 | fn from(module_def: ModuleDef) -> Self { | ||
240 | match module_def { | ||
241 | ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => { | ||
242 | ItemInNs::Values(module_def.into()) | ||
243 | } | ||
244 | _ => ItemInNs::Types(module_def.into()), | ||
245 | } | ||
246 | } | ||
247 | } | ||
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index 9f59d590c..713d45f48 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs | |||
@@ -4,6 +4,18 @@ | |||
4 | //! The principal difference between HIR and syntax trees is that HIR is bound | 4 | //! The principal difference between HIR and syntax trees is that HIR is bound |
5 | //! to a particular crate instance. That is, it has cfg flags and features | 5 | //! to a particular crate instance. That is, it has cfg flags and features |
6 | //! applied. So, the relation between syntax and HIR is many-to-one. | 6 | //! applied. So, the relation between syntax and HIR is many-to-one. |
7 | //! | ||
8 | //! HIR is the public API of the all of the compiler logic above syntax trees. | ||
9 | //! It is written in "OO" style. Each type is self contained (as in, it knows it's | ||
10 | //! parents and full context). It should be "clean code". | ||
11 | //! | ||
12 | //! `ra_hir_*` crates are the implementation of the compiler logic. | ||
13 | //! They are written in "ECS" style, with relatively little abstractions. | ||
14 | //! Many types are not self-contained, and explicitly use local indexes, arenas, etc. | ||
15 | //! | ||
16 | //! `ra_hir` is what insulates the "we don't know how to actually write an incremental compiler" | ||
17 | //! from the ide with completions, hovers, etc. It is a (soft, internal) boundary: | ||
18 | //! https://www.tedinski.com/2018/02/06/system-boundaries.html. | ||
7 | 19 | ||
8 | #![recursion_limit = "512"] | 20 | #![recursion_limit = "512"] |
9 | 21 | ||
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 55e634528..d982f6ffa 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -12,7 +12,8 @@ use hir_expand::ExpansionInfo; | |||
12 | use ra_db::{FileId, FileRange}; | 12 | use ra_db::{FileId, FileRange}; |
13 | use ra_prof::profile; | 13 | use ra_prof::profile; |
14 | use ra_syntax::{ | 14 | use ra_syntax::{ |
15 | algo::skip_trivia_token, ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, | 15 | algo::{find_node_at_offset, skip_trivia_token}, |
16 | ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, | ||
16 | }; | 17 | }; |
17 | use rustc_hash::{FxHashMap, FxHashSet}; | 18 | use rustc_hash::{FxHashMap, FxHashSet}; |
18 | 19 | ||
@@ -108,6 +109,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
108 | token.value | 109 | token.value |
109 | } | 110 | } |
110 | 111 | ||
112 | pub fn descend_node_at_offset<N: ast::AstNode>( | ||
113 | &self, | ||
114 | node: &SyntaxNode, | ||
115 | offset: TextUnit, | ||
116 | ) -> Option<N> { | ||
117 | // Handle macro token cases | ||
118 | node.token_at_offset(offset) | ||
119 | .map(|token| self.descend_into_macros(token)) | ||
120 | .find_map(|it| self.ancestors_with_macros(it.parent()).find_map(N::cast)) | ||
121 | } | ||
122 | |||
111 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | 123 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { |
112 | let node = self.find_file(node.clone()); | 124 | let node = self.find_file(node.clone()); |
113 | original_range(self.db, node.as_ref()) | 125 | original_range(self.db, node.as_ref()) |
@@ -129,6 +141,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
129 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) | 141 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) |
130 | } | 142 | } |
131 | 143 | ||
144 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, | ||
145 | /// search up until it is of the target AstNode type | ||
132 | pub fn find_node_at_offset_with_macros<N: AstNode>( | 146 | pub fn find_node_at_offset_with_macros<N: AstNode>( |
133 | &self, | 147 | &self, |
134 | node: &SyntaxNode, | 148 | node: &SyntaxNode, |
@@ -137,6 +151,19 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
137 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | 151 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) |
138 | } | 152 | } |
139 | 153 | ||
154 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, | ||
155 | /// descend it and find again | ||
156 | pub fn find_node_at_offset_with_descend<N: AstNode>( | ||
157 | &self, | ||
158 | node: &SyntaxNode, | ||
159 | offset: TextUnit, | ||
160 | ) -> Option<N> { | ||
161 | if let Some(it) = find_node_at_offset(&node, offset) { | ||
162 | return Some(it); | ||
163 | } | ||
164 | self.descend_node_at_offset(&node, offset) | ||
165 | } | ||
166 | |||
140 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | 167 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { |
141 | self.analyze(expr.syntax()).type_of(self.db, &expr) | 168 | self.analyze(expr.syntax()).type_of(self.db, &expr) |
142 | } | 169 | } |
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs index e8afef328..10c12c910 100644 --- a/crates/ra_hir/src/source_analyzer.rs +++ b/crates/ra_hir/src/source_analyzer.rs | |||
@@ -17,7 +17,7 @@ use hir_def::{ | |||
17 | AsMacroCall, DefWithBodyId, | 17 | AsMacroCall, DefWithBodyId, |
18 | }; | 18 | }; |
19 | use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; | 19 | use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; |
20 | use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment}; | 20 | use hir_ty::InferenceResult; |
21 | use ra_syntax::{ | 21 | use ra_syntax::{ |
22 | ast::{self, AstNode}, | 22 | ast::{self, AstNode}, |
23 | SyntaxNode, SyntaxNodePtr, TextUnit, | 23 | SyntaxNode, SyntaxNodePtr, TextUnit, |
@@ -103,10 +103,6 @@ impl SourceAnalyzer { | |||
103 | Some(res) | 103 | Some(res) |
104 | } | 104 | } |
105 | 105 | ||
106 | fn trait_env(&self, db: &dyn HirDatabase) -> Arc<TraitEnvironment> { | ||
107 | TraitEnvironment::lower(db, &self.resolver) | ||
108 | } | ||
109 | |||
110 | pub(crate) fn type_of(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<Type> { | 106 | pub(crate) fn type_of(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<Type> { |
111 | let expr_id = match expr { | 107 | let expr_id = match expr { |
112 | ast::Expr::MacroCall(call) => { | 108 | ast::Expr::MacroCall(call) => { |
@@ -117,15 +113,13 @@ impl SourceAnalyzer { | |||
117 | }?; | 113 | }?; |
118 | 114 | ||
119 | let ty = self.infer.as_ref()?[expr_id].clone(); | 115 | let ty = self.infer.as_ref()?[expr_id].clone(); |
120 | let environment = self.trait_env(db); | 116 | Type::new_with_resolver(db, &self.resolver, ty) |
121 | Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) | ||
122 | } | 117 | } |
123 | 118 | ||
124 | pub(crate) fn type_of_pat(&self, db: &dyn HirDatabase, pat: &ast::Pat) -> Option<Type> { | 119 | pub(crate) fn type_of_pat(&self, db: &dyn HirDatabase, pat: &ast::Pat) -> Option<Type> { |
125 | let pat_id = self.pat_id(pat)?; | 120 | let pat_id = self.pat_id(pat)?; |
126 | let ty = self.infer.as_ref()?[pat_id].clone(); | 121 | let ty = self.infer.as_ref()?[pat_id].clone(); |
127 | let environment = self.trait_env(db); | 122 | Type::new_with_resolver(db, &self.resolver, ty) |
128 | Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) | ||
129 | } | 123 | } |
130 | 124 | ||
131 | pub(crate) fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | 125 | pub(crate) fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { |
diff --git a/crates/ra_hir_def/src/body.rs b/crates/ra_hir_def/src/body.rs index 27a297e8b..5f9d53ecb 100644 --- a/crates/ra_hir_def/src/body.rs +++ b/crates/ra_hir_def/src/body.rs | |||
@@ -30,6 +30,7 @@ pub(crate) struct Expander { | |||
30 | hygiene: Hygiene, | 30 | hygiene: Hygiene, |
31 | ast_id_map: Arc<AstIdMap>, | 31 | ast_id_map: Arc<AstIdMap>, |
32 | module: ModuleId, | 32 | module: ModuleId, |
33 | recursive_limit: usize, | ||
33 | } | 34 | } |
34 | 35 | ||
35 | impl Expander { | 36 | impl Expander { |
@@ -41,7 +42,7 @@ impl Expander { | |||
41 | let crate_def_map = db.crate_def_map(module.krate); | 42 | let crate_def_map = db.crate_def_map(module.krate); |
42 | let hygiene = Hygiene::new(db.upcast(), current_file_id); | 43 | let hygiene = Hygiene::new(db.upcast(), current_file_id); |
43 | let ast_id_map = db.ast_id_map(current_file_id); | 44 | let ast_id_map = db.ast_id_map(current_file_id); |
44 | Expander { crate_def_map, current_file_id, hygiene, ast_id_map, module } | 45 | Expander { crate_def_map, current_file_id, hygiene, ast_id_map, module, recursive_limit: 0 } |
45 | } | 46 | } |
46 | 47 | ||
47 | pub(crate) fn enter_expand<T: ast::AstNode>( | 48 | pub(crate) fn enter_expand<T: ast::AstNode>( |
@@ -50,6 +51,10 @@ impl Expander { | |||
50 | local_scope: Option<&ItemScope>, | 51 | local_scope: Option<&ItemScope>, |
51 | macro_call: ast::MacroCall, | 52 | macro_call: ast::MacroCall, |
52 | ) -> Option<(Mark, T)> { | 53 | ) -> Option<(Mark, T)> { |
54 | if self.recursive_limit > 1024 { | ||
55 | return None; | ||
56 | } | ||
57 | |||
53 | let macro_call = InFile::new(self.current_file_id, ¯o_call); | 58 | let macro_call = InFile::new(self.current_file_id, ¯o_call); |
54 | 59 | ||
55 | if let Some(call_id) = macro_call.as_call_id(db, |path| { | 60 | if let Some(call_id) = macro_call.as_call_id(db, |path| { |
@@ -73,6 +78,7 @@ impl Expander { | |||
73 | self.hygiene = Hygiene::new(db.upcast(), file_id); | 78 | self.hygiene = Hygiene::new(db.upcast(), file_id); |
74 | self.current_file_id = file_id; | 79 | self.current_file_id = file_id; |
75 | self.ast_id_map = db.ast_id_map(file_id); | 80 | self.ast_id_map = db.ast_id_map(file_id); |
81 | self.recursive_limit += 1; | ||
76 | 82 | ||
77 | return Some((mark, expr)); | 83 | return Some((mark, expr)); |
78 | } | 84 | } |
@@ -88,6 +94,7 @@ impl Expander { | |||
88 | self.hygiene = Hygiene::new(db.upcast(), mark.file_id); | 94 | self.hygiene = Hygiene::new(db.upcast(), mark.file_id); |
89 | self.current_file_id = mark.file_id; | 95 | self.current_file_id = mark.file_id; |
90 | self.ast_id_map = mem::take(&mut mark.ast_id_map); | 96 | self.ast_id_map = mem::take(&mut mark.ast_id_map); |
97 | self.recursive_limit -= 1; | ||
91 | mark.bomb.defuse(); | 98 | mark.bomb.defuse(); |
92 | } | 99 | } |
93 | 100 | ||
diff --git a/crates/ra_hir_def/src/body/lower.rs b/crates/ra_hir_def/src/body/lower.rs index e8c58ed32..3cf0c66ea 100644 --- a/crates/ra_hir_def/src/body/lower.rs +++ b/crates/ra_hir_def/src/body/lower.rs | |||
@@ -453,7 +453,7 @@ impl ExprCollector<'_> { | |||
453 | } | 453 | } |
454 | } | 454 | } |
455 | ast::Expr::MacroCall(e) => { | 455 | ast::Expr::MacroCall(e) => { |
456 | if let Some(name) = is_macro_rules(&e) { | 456 | if let Some(name) = e.is_macro_rules().map(|it| it.as_name()) { |
457 | let mac = MacroDefId { | 457 | let mac = MacroDefId { |
458 | krate: Some(self.expander.module.krate), | 458 | krate: Some(self.expander.module.krate), |
459 | ast_id: Some(self.expander.ast_id(&e)), | 459 | ast_id: Some(self.expander.ast_id(&e)), |
@@ -697,16 +697,6 @@ impl ExprCollector<'_> { | |||
697 | } | 697 | } |
698 | } | 698 | } |
699 | 699 | ||
700 | fn is_macro_rules(m: &ast::MacroCall) -> Option<Name> { | ||
701 | let name = m.path()?.segment()?.name_ref()?.as_name(); | ||
702 | |||
703 | if name == name![macro_rules] { | ||
704 | Some(m.name()?.as_name()) | ||
705 | } else { | ||
706 | None | ||
707 | } | ||
708 | } | ||
709 | |||
710 | impl From<ast::BinOp> for BinaryOp { | 700 | impl From<ast::BinOp> for BinaryOp { |
711 | fn from(ast_op: ast::BinOp) -> Self { | 701 | fn from(ast_op: ast::BinOp) -> Self { |
712 | match ast_op { | 702 | match ast_op { |
diff --git a/crates/ra_hir_def/src/path.rs b/crates/ra_hir_def/src/path.rs index 246032c13..904080341 100644 --- a/crates/ra_hir_def/src/path.rs +++ b/crates/ra_hir_def/src/path.rs | |||
@@ -95,7 +95,7 @@ pub struct Path { | |||
95 | /// Note that paths like `<Type as Trait>::foo` are desugard to `Trait::<Self=Type>::foo`. | 95 | /// Note that paths like `<Type as Trait>::foo` are desugard to `Trait::<Self=Type>::foo`. |
96 | type_anchor: Option<Box<TypeRef>>, | 96 | type_anchor: Option<Box<TypeRef>>, |
97 | mod_path: ModPath, | 97 | mod_path: ModPath, |
98 | /// Invariant: the same len as self.path.segments | 98 | /// Invariant: the same len as `self.mod_path.segments` |
99 | generic_args: Vec<Option<Arc<GenericArgs>>>, | 99 | generic_args: Vec<Option<Arc<GenericArgs>>>, |
100 | } | 100 | } |
101 | 101 | ||
diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs index d171d2dfd..5a696542f 100644 --- a/crates/ra_hir_expand/src/db.rs +++ b/crates/ra_hir_expand/src/db.rs | |||
@@ -6,7 +6,7 @@ use mbe::{ExpandResult, MacroRules}; | |||
6 | use ra_db::{salsa, SourceDatabase}; | 6 | use ra_db::{salsa, SourceDatabase}; |
7 | use ra_parser::FragmentKind; | 7 | use ra_parser::FragmentKind; |
8 | use ra_prof::profile; | 8 | use ra_prof::profile; |
9 | use ra_syntax::{AstNode, Parse, SyntaxKind::*, SyntaxNode}; | 9 | use ra_syntax::{algo::diff, AstNode, Parse, SyntaxKind::*, SyntaxNode}; |
10 | 10 | ||
11 | use crate::{ | 11 | use crate::{ |
12 | ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, | 12 | ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, |
@@ -238,7 +238,7 @@ pub fn parse_macro_with_arg( | |||
238 | } else { | 238 | } else { |
239 | db.macro_expand(macro_call_id) | 239 | db.macro_expand(macro_call_id) |
240 | }; | 240 | }; |
241 | if let Some(err) = err { | 241 | if let Some(err) = &err { |
242 | // Note: | 242 | // Note: |
243 | // The final goal we would like to make all parse_macro success, | 243 | // The final goal we would like to make all parse_macro success, |
244 | // such that the following log will not call anyway. | 244 | // such that the following log will not call anyway. |
@@ -272,7 +272,25 @@ pub fn parse_macro_with_arg( | |||
272 | let fragment_kind = to_fragment_kind(db, macro_call_id); | 272 | let fragment_kind = to_fragment_kind(db, macro_call_id); |
273 | 273 | ||
274 | let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?; | 274 | let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?; |
275 | Some((parse, Arc::new(rev_token_map))) | 275 | |
276 | if err.is_none() { | ||
277 | Some((parse, Arc::new(rev_token_map))) | ||
278 | } else { | ||
279 | // FIXME: | ||
280 | // In future, we should propagate the actual error with recovery information | ||
281 | // instead of ignore the error here. | ||
282 | |||
283 | // Safe check for recurisve identity macro | ||
284 | let node = parse.syntax_node(); | ||
285 | let file: HirFileId = macro_file.into(); | ||
286 | let call_node = file.call_node(db)?; | ||
287 | |||
288 | if !diff(&node, &call_node.value).is_empty() { | ||
289 | Some((parse, Arc::new(rev_token_map))) | ||
290 | } else { | ||
291 | None | ||
292 | } | ||
293 | } | ||
276 | } | 294 | } |
277 | 295 | ||
278 | /// Given a `MacroCallId`, return what `FragmentKind` it belongs to. | 296 | /// Given a `MacroCallId`, return what `FragmentKind` it belongs to. |
diff --git a/crates/ra_hir_expand/src/eager.rs b/crates/ra_hir_expand/src/eager.rs index 4cbce4df5..687d40294 100644 --- a/crates/ra_hir_expand/src/eager.rs +++ b/crates/ra_hir_expand/src/eager.rs | |||
@@ -26,8 +26,8 @@ use crate::{ | |||
26 | }; | 26 | }; |
27 | 27 | ||
28 | use ra_parser::FragmentKind; | 28 | use ra_parser::FragmentKind; |
29 | use ra_syntax::{algo::replace_descendants, SyntaxElement, SyntaxNode}; | 29 | use ra_syntax::{algo::SyntaxRewriter, SyntaxNode}; |
30 | use std::{collections::HashMap, sync::Arc}; | 30 | use std::sync::Arc; |
31 | 31 | ||
32 | pub fn expand_eager_macro( | 32 | pub fn expand_eager_macro( |
33 | db: &dyn AstDatabase, | 33 | db: &dyn AstDatabase, |
@@ -95,10 +95,10 @@ fn eager_macro_recur( | |||
95 | curr: InFile<SyntaxNode>, | 95 | curr: InFile<SyntaxNode>, |
96 | macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>, | 96 | macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>, |
97 | ) -> Option<SyntaxNode> { | 97 | ) -> Option<SyntaxNode> { |
98 | let mut original = curr.value.clone(); | 98 | let original = curr.value.clone(); |
99 | 99 | ||
100 | let children = curr.value.descendants().filter_map(ast::MacroCall::cast); | 100 | let children = curr.value.descendants().filter_map(ast::MacroCall::cast); |
101 | let mut replaces: HashMap<SyntaxElement, SyntaxElement> = HashMap::default(); | 101 | let mut rewriter = SyntaxRewriter::default(); |
102 | 102 | ||
103 | // Collect replacement | 103 | // Collect replacement |
104 | for child in children { | 104 | for child in children { |
@@ -119,12 +119,9 @@ fn eager_macro_recur( | |||
119 | } | 119 | } |
120 | }; | 120 | }; |
121 | 121 | ||
122 | replaces.insert(child.syntax().clone().into(), insert.into()); | 122 | rewriter.replace(child.syntax(), &insert); |
123 | } | 123 | } |
124 | 124 | ||
125 | if !replaces.is_empty() { | 125 | let res = rewriter.rewrite(&original); |
126 | original = replace_descendants(&original, |n| replaces.get(n).cloned()); | 126 | Some(res) |
127 | } | ||
128 | |||
129 | Some(original) | ||
130 | } | 127 | } |
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs index 69c059ac8..533c6ccfb 100644 --- a/crates/ra_hir_ty/src/method_resolution.rs +++ b/crates/ra_hir_ty/src/method_resolution.rs | |||
@@ -95,14 +95,14 @@ impl Ty { | |||
95 | // Types like slice can have inherent impls in several crates, (core and alloc). | 95 | // Types like slice can have inherent impls in several crates, (core and alloc). |
96 | // The corresponding impls are marked with lang items, so we can use them to find the required crates. | 96 | // The corresponding impls are marked with lang items, so we can use them to find the required crates. |
97 | macro_rules! lang_item_crate { | 97 | macro_rules! lang_item_crate { |
98 | ($($name:expr),+ $(,)?) => {{ | 98 | ($($name:expr),+ $(,)?) => {{ |
99 | let mut v = ArrayVec::<[LangItemTarget; 2]>::new(); | 99 | let mut v = ArrayVec::<[LangItemTarget; 2]>::new(); |
100 | $( | 100 | $( |
101 | v.extend(db.lang_item(cur_crate, $name.into())); | 101 | v.extend(db.lang_item(cur_crate, $name.into())); |
102 | )+ | 102 | )+ |
103 | v | 103 | v |
104 | }}; | 104 | }}; |
105 | } | 105 | } |
106 | 106 | ||
107 | let lang_item_targets = match self { | 107 | let lang_item_targets = match self { |
108 | Ty::Apply(a_ty) => match a_ty.ctor { | 108 | Ty::Apply(a_ty) => match a_ty.ctor { |
diff --git a/crates/ra_hir_ty/src/tests/regression.rs b/crates/ra_hir_ty/src/tests/regression.rs index 14c8ed3a9..a02e3ee05 100644 --- a/crates/ra_hir_ty/src/tests/regression.rs +++ b/crates/ra_hir_ty/src/tests/regression.rs | |||
@@ -453,3 +453,34 @@ pub mod str { | |||
453 | // should be Option<char>, but currently not because of Chalk ambiguity problem | 453 | // should be Option<char>, but currently not because of Chalk ambiguity problem |
454 | assert_eq!("(Option<{unknown}>, Option<{unknown}>)", super::type_at_pos(&db, pos)); | 454 | assert_eq!("(Option<{unknown}>, Option<{unknown}>)", super::type_at_pos(&db, pos)); |
455 | } | 455 | } |
456 | |||
457 | #[test] | ||
458 | fn issue_3642_bad_macro_stackover() { | ||
459 | let (db, pos) = TestDB::with_position( | ||
460 | r#" | ||
461 | //- /main.rs | ||
462 | #[macro_export] | ||
463 | macro_rules! match_ast { | ||
464 | (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; | ||
465 | |||
466 | (match ($node:expr) { | ||
467 | $( ast::$ast:ident($it:ident) => $res:expr, )* | ||
468 | _ => $catch_all:expr $(,)? | ||
469 | }) => {{ | ||
470 | $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )* | ||
471 | { $catch_all } | ||
472 | }}; | ||
473 | } | ||
474 | |||
475 | fn main() { | ||
476 | let anchor<|> = match_ast! { | ||
477 | match parent { | ||
478 | as => {}, | ||
479 | _ => return None | ||
480 | } | ||
481 | }; | ||
482 | }"#, | ||
483 | ); | ||
484 | |||
485 | assert_eq!("()", super::type_at_pos(&db, pos)); | ||
486 | } | ||
diff --git a/crates/ra_ide/Cargo.toml b/crates/ra_ide/Cargo.toml index 7235c944c..36eec0e60 100644 --- a/crates/ra_ide/Cargo.toml +++ b/crates/ra_ide/Cargo.toml | |||
@@ -14,7 +14,7 @@ wasm = [] | |||
14 | either = "1.5.3" | 14 | either = "1.5.3" |
15 | format-buf = "1.0.0" | 15 | format-buf = "1.0.0" |
16 | indexmap = "1.3.2" | 16 | indexmap = "1.3.2" |
17 | itertools = "0.8.2" | 17 | itertools = "0.9.0" |
18 | join_to_string = "0.1.3" | 18 | join_to_string = "0.1.3" |
19 | log = "0.4.8" | 19 | log = "0.4.8" |
20 | rustc-hash = "1.1.0" | 20 | rustc-hash = "1.1.0" |
diff --git a/crates/ra_ide/src/completion/complete_postfix.rs b/crates/ra_ide/src/completion/complete_postfix.rs index 0ba382165..0a00054b2 100644 --- a/crates/ra_ide/src/completion/complete_postfix.rs +++ b/crates/ra_ide/src/completion/complete_postfix.rs | |||
@@ -1,6 +1,9 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use ra_syntax::{ast::AstNode, TextRange, TextUnit}; | 3 | use ra_syntax::{ |
4 | ast::{self, AstNode}, | ||
5 | TextRange, TextUnit, | ||
6 | }; | ||
4 | use ra_text_edit::TextEdit; | 7 | use ra_text_edit::TextEdit; |
5 | 8 | ||
6 | use crate::{ | 9 | use crate::{ |
@@ -21,13 +24,8 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
21 | None => return, | 24 | None => return, |
22 | }; | 25 | }; |
23 | 26 | ||
24 | let receiver_text = if ctx.dot_receiver_is_ambiguous_float_literal { | 27 | let receiver_text = |
25 | let text = dot_receiver.syntax().text(); | 28 | get_receiver_text(dot_receiver, ctx.dot_receiver_is_ambiguous_float_literal); |
26 | let without_dot = ..text.len() - TextUnit::of_char('.'); | ||
27 | text.slice(without_dot).to_string() | ||
28 | } else { | ||
29 | dot_receiver.syntax().text().to_string() | ||
30 | }; | ||
31 | 29 | ||
32 | let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { | 30 | let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { |
33 | Some(it) => it, | 31 | Some(it) => it, |
@@ -35,10 +33,17 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
35 | }; | 33 | }; |
36 | 34 | ||
37 | if receiver_ty.is_bool() || receiver_ty.is_unknown() { | 35 | if receiver_ty.is_bool() || receiver_ty.is_unknown() { |
38 | postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text)) | ||
39 | .add_to(acc); | ||
40 | postfix_snippet( | 36 | postfix_snippet( |
41 | ctx, | 37 | ctx, |
38 | &dot_receiver, | ||
39 | "if", | ||
40 | "if expr {}", | ||
41 | &format!("if {} {{$0}}", receiver_text), | ||
42 | ) | ||
43 | .add_to(acc); | ||
44 | postfix_snippet( | ||
45 | ctx, | ||
46 | &dot_receiver, | ||
42 | "while", | 47 | "while", |
43 | "while expr {}", | 48 | "while expr {}", |
44 | &format!("while {} {{\n$0\n}}", receiver_text), | 49 | &format!("while {} {{\n$0\n}}", receiver_text), |
@@ -46,28 +51,70 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
46 | .add_to(acc); | 51 | .add_to(acc); |
47 | } | 52 | } |
48 | 53 | ||
49 | postfix_snippet(ctx, "not", "!expr", &format!("!{}", receiver_text)).add_to(acc); | 54 | // !&&&42 is a compiler error, ergo process it before considering the references |
55 | postfix_snippet(ctx, &dot_receiver, "not", "!expr", &format!("!{}", receiver_text)).add_to(acc); | ||
50 | 56 | ||
51 | postfix_snippet(ctx, "ref", "&expr", &format!("&{}", receiver_text)).add_to(acc); | 57 | postfix_snippet(ctx, &dot_receiver, "ref", "&expr", &format!("&{}", receiver_text)).add_to(acc); |
52 | postfix_snippet(ctx, "refm", "&mut expr", &format!("&mut {}", receiver_text)).add_to(acc); | 58 | postfix_snippet(ctx, &dot_receiver, "refm", "&mut expr", &format!("&mut {}", receiver_text)) |
59 | .add_to(acc); | ||
60 | |||
61 | // The rest of the postfix completions create an expression that moves an argument, | ||
62 | // so it's better to consider references now to avoid breaking the compilation | ||
63 | let dot_receiver = include_references(dot_receiver); | ||
64 | let receiver_text = | ||
65 | get_receiver_text(&dot_receiver, ctx.dot_receiver_is_ambiguous_float_literal); | ||
53 | 66 | ||
54 | postfix_snippet( | 67 | postfix_snippet( |
55 | ctx, | 68 | ctx, |
69 | &dot_receiver, | ||
56 | "match", | 70 | "match", |
57 | "match expr {}", | 71 | "match expr {}", |
58 | &format!("match {} {{\n ${{1:_}} => {{$0\\}},\n}}", receiver_text), | 72 | &format!("match {} {{\n ${{1:_}} => {{$0\\}},\n}}", receiver_text), |
59 | ) | 73 | ) |
60 | .add_to(acc); | 74 | .add_to(acc); |
61 | 75 | ||
62 | postfix_snippet(ctx, "dbg", "dbg!(expr)", &format!("dbg!({})", receiver_text)).add_to(acc); | 76 | postfix_snippet( |
77 | ctx, | ||
78 | &dot_receiver, | ||
79 | "box", | ||
80 | "Box::new(expr)", | ||
81 | &format!("Box::new({})", receiver_text), | ||
82 | ) | ||
83 | .add_to(acc); | ||
63 | 84 | ||
64 | postfix_snippet(ctx, "box", "Box::new(expr)", &format!("Box::new({})", receiver_text)) | 85 | postfix_snippet(ctx, &dot_receiver, "dbg", "dbg!(expr)", &format!("dbg!({})", receiver_text)) |
65 | .add_to(acc); | 86 | .add_to(acc); |
66 | } | 87 | } |
67 | 88 | ||
68 | fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder { | 89 | fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String { |
90 | if receiver_is_ambiguous_float_literal { | ||
91 | let text = receiver.syntax().text(); | ||
92 | let without_dot = ..text.len() - TextUnit::of_char('.'); | ||
93 | text.slice(without_dot).to_string() | ||
94 | } else { | ||
95 | receiver.to_string() | ||
96 | } | ||
97 | } | ||
98 | |||
99 | fn include_references(initial_element: &ast::Expr) -> ast::Expr { | ||
100 | let mut resulting_element = initial_element.clone(); | ||
101 | while let Some(parent_ref_element) = | ||
102 | resulting_element.syntax().parent().and_then(ast::RefExpr::cast) | ||
103 | { | ||
104 | resulting_element = ast::Expr::from(parent_ref_element); | ||
105 | } | ||
106 | resulting_element | ||
107 | } | ||
108 | |||
109 | fn postfix_snippet( | ||
110 | ctx: &CompletionContext, | ||
111 | receiver: &ast::Expr, | ||
112 | label: &str, | ||
113 | detail: &str, | ||
114 | snippet: &str, | ||
115 | ) -> Builder { | ||
69 | let edit = { | 116 | let edit = { |
70 | let receiver_syntax = ctx.dot_receiver.as_ref().expect("no receiver available").syntax(); | 117 | let receiver_syntax = receiver.syntax(); |
71 | let receiver_range = ctx.sema.original_range(receiver_syntax).range; | 118 | let receiver_range = ctx.sema.original_range(receiver_syntax).range; |
72 | let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); | 119 | let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); |
73 | TextEdit::replace(delete_range, snippet.to_string()) | 120 | TextEdit::replace(delete_range, snippet.to_string()) |
@@ -340,4 +387,63 @@ mod tests { | |||
340 | "### | 387 | "### |
341 | ); | 388 | ); |
342 | } | 389 | } |
390 | |||
391 | #[test] | ||
392 | fn postfix_completion_for_references() { | ||
393 | assert_debug_snapshot!( | ||
394 | do_postfix_completion( | ||
395 | r#" | ||
396 | fn main() { | ||
397 | &&&&42.<|> | ||
398 | } | ||
399 | "#, | ||
400 | ), | ||
401 | @r###" | ||
402 | [ | ||
403 | CompletionItem { | ||
404 | label: "box", | ||
405 | source_range: [56; 56), | ||
406 | delete: [49; 56), | ||
407 | insert: "Box::new(&&&&42)", | ||
408 | detail: "Box::new(expr)", | ||
409 | }, | ||
410 | CompletionItem { | ||
411 | label: "dbg", | ||
412 | source_range: [56; 56), | ||
413 | delete: [49; 56), | ||
414 | insert: "dbg!(&&&&42)", | ||
415 | detail: "dbg!(expr)", | ||
416 | }, | ||
417 | CompletionItem { | ||
418 | label: "match", | ||
419 | source_range: [56; 56), | ||
420 | delete: [49; 56), | ||
421 | insert: "match &&&&42 {\n ${1:_} => {$0\\},\n}", | ||
422 | detail: "match expr {}", | ||
423 | }, | ||
424 | CompletionItem { | ||
425 | label: "not", | ||
426 | source_range: [56; 56), | ||
427 | delete: [53; 56), | ||
428 | insert: "!42", | ||
429 | detail: "!expr", | ||
430 | }, | ||
431 | CompletionItem { | ||
432 | label: "ref", | ||
433 | source_range: [56; 56), | ||
434 | delete: [53; 56), | ||
435 | insert: "&42", | ||
436 | detail: "&expr", | ||
437 | }, | ||
438 | CompletionItem { | ||
439 | label: "refm", | ||
440 | source_range: [56; 56), | ||
441 | delete: [53; 56), | ||
442 | insert: "&mut 42", | ||
443 | detail: "&mut expr", | ||
444 | }, | ||
445 | ] | ||
446 | "### | ||
447 | ); | ||
448 | } | ||
343 | } | 449 | } |
diff --git a/crates/ra_ide/src/completion/complete_record_literal.rs b/crates/ra_ide/src/completion/complete_record_literal.rs index 83ed1d52c..e4e764f58 100644 --- a/crates/ra_ide/src/completion/complete_record_literal.rs +++ b/crates/ra_ide/src/completion/complete_record_literal.rs | |||
@@ -1,6 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use crate::completion::{CompletionContext, Completions}; | 3 | use crate::completion::{CompletionContext, Completions}; |
4 | use ra_syntax::SmolStr; | ||
4 | 5 | ||
5 | /// Complete fields in fields literals. | 6 | /// Complete fields in fields literals. |
6 | pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) { | 7 | pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) { |
@@ -11,8 +12,24 @@ pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionCon | |||
11 | _ => return, | 12 | _ => return, |
12 | }; | 13 | }; |
13 | 14 | ||
15 | let already_present_names: Vec<SmolStr> = ctx | ||
16 | .record_lit_syntax | ||
17 | .as_ref() | ||
18 | .and_then(|record_literal| record_literal.record_field_list()) | ||
19 | .map(|field_list| field_list.fields()) | ||
20 | .map(|fields| { | ||
21 | fields | ||
22 | .into_iter() | ||
23 | .filter_map(|field| field.name_ref()) | ||
24 | .map(|name_ref| name_ref.text().clone()) | ||
25 | .collect() | ||
26 | }) | ||
27 | .unwrap_or_default(); | ||
28 | |||
14 | for (field, field_ty) in ty.variant_fields(ctx.db, variant) { | 29 | for (field, field_ty) in ty.variant_fields(ctx.db, variant) { |
15 | acc.add_field(ctx, field, &field_ty); | 30 | if !already_present_names.contains(&SmolStr::from(field.name(ctx.db).to_string())) { |
31 | acc.add_field(ctx, field, &field_ty); | ||
32 | } | ||
16 | } | 33 | } |
17 | } | 34 | } |
18 | 35 | ||
@@ -178,4 +195,47 @@ mod tests { | |||
178 | ] | 195 | ] |
179 | "###); | 196 | "###); |
180 | } | 197 | } |
198 | |||
199 | #[test] | ||
200 | fn only_missing_fields_are_completed() { | ||
201 | let completions = complete( | ||
202 | r" | ||
203 | struct S { | ||
204 | foo1: u32, | ||
205 | foo2: u32, | ||
206 | bar: u32, | ||
207 | baz: u32, | ||
208 | } | ||
209 | |||
210 | fn main() { | ||
211 | let foo1 = 1; | ||
212 | let s = S { | ||
213 | foo1, | ||
214 | foo2: 5, | ||
215 | <|> | ||
216 | } | ||
217 | } | ||
218 | ", | ||
219 | ); | ||
220 | assert_debug_snapshot!(completions, @r###" | ||
221 | [ | ||
222 | CompletionItem { | ||
223 | label: "bar", | ||
224 | source_range: [302; 302), | ||
225 | delete: [302; 302), | ||
226 | insert: "bar", | ||
227 | kind: Field, | ||
228 | detail: "u32", | ||
229 | }, | ||
230 | CompletionItem { | ||
231 | label: "baz", | ||
232 | source_range: [302; 302), | ||
233 | delete: [302; 302), | ||
234 | insert: "baz", | ||
235 | kind: Field, | ||
236 | detail: "u32", | ||
237 | }, | ||
238 | ] | ||
239 | "###); | ||
240 | } | ||
181 | } | 241 | } |
diff --git a/crates/ra_ide/src/completion/complete_trait_impl.rs b/crates/ra_ide/src/completion/complete_trait_impl.rs index 7fefa2c7a..ded1ff3bc 100644 --- a/crates/ra_ide/src/completion/complete_trait_impl.rs +++ b/crates/ra_ide/src/completion/complete_trait_impl.rs | |||
@@ -193,7 +193,7 @@ fn add_const_impl( | |||
193 | } | 193 | } |
194 | 194 | ||
195 | fn make_const_compl_syntax(const_: &ast::ConstDef) -> String { | 195 | fn make_const_compl_syntax(const_: &ast::ConstDef) -> String { |
196 | let const_ = edit::strip_attrs_and_docs(const_); | 196 | let const_ = edit::remove_attrs_and_docs(const_); |
197 | 197 | ||
198 | let const_start = const_.syntax().text_range().start(); | 198 | let const_start = const_.syntax().text_range().start(); |
199 | let const_end = const_.syntax().text_range().end(); | 199 | let const_end = const_.syntax().text_range().end(); |
diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs index e58526f31..f536ba3e7 100644 --- a/crates/ra_ide/src/expand_macro.rs +++ b/crates/ra_ide/src/expand_macro.rs | |||
@@ -3,10 +3,9 @@ | |||
3 | use hir::Semantics; | 3 | use hir::Semantics; |
4 | use ra_ide_db::RootDatabase; | 4 | use ra_ide_db::RootDatabase; |
5 | use ra_syntax::{ | 5 | use ra_syntax::{ |
6 | algo::{find_node_at_offset, replace_descendants}, | 6 | algo::{find_node_at_offset, SyntaxRewriter}, |
7 | ast, AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, WalkEvent, T, | 7 | ast, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, WalkEvent, T, |
8 | }; | 8 | }; |
9 | use rustc_hash::FxHashMap; | ||
10 | 9 | ||
11 | use crate::FilePosition; | 10 | use crate::FilePosition; |
12 | 11 | ||
@@ -37,7 +36,7 @@ fn expand_macro_recur( | |||
37 | let mut expanded = sema.expand(macro_call)?; | 36 | let mut expanded = sema.expand(macro_call)?; |
38 | 37 | ||
39 | let children = expanded.descendants().filter_map(ast::MacroCall::cast); | 38 | let children = expanded.descendants().filter_map(ast::MacroCall::cast); |
40 | let mut replaces: FxHashMap<SyntaxElement, SyntaxElement> = FxHashMap::default(); | 39 | let mut rewriter = SyntaxRewriter::default(); |
41 | 40 | ||
42 | for child in children.into_iter() { | 41 | for child in children.into_iter() { |
43 | if let Some(new_node) = expand_macro_recur(sema, &child) { | 42 | if let Some(new_node) = expand_macro_recur(sema, &child) { |
@@ -47,12 +46,13 @@ fn expand_macro_recur( | |||
47 | if expanded == *child.syntax() { | 46 | if expanded == *child.syntax() { |
48 | expanded = new_node; | 47 | expanded = new_node; |
49 | } else { | 48 | } else { |
50 | replaces.insert(child.syntax().clone().into(), new_node.into()); | 49 | rewriter.replace(child.syntax(), &new_node) |
51 | } | 50 | } |
52 | } | 51 | } |
53 | } | 52 | } |
54 | 53 | ||
55 | Some(replace_descendants(&expanded, |n| replaces.get(n).cloned())) | 54 | let res = rewriter.rewrite(&expanded); |
55 | Some(res) | ||
56 | } | 56 | } |
57 | 57 | ||
58 | // FIXME: It would also be cool to share logic here and in the mbe tests, | 58 | // FIXME: It would also be cool to share logic here and in the mbe tests, |
diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs index 3ea0ac230..746cc86ba 100644 --- a/crates/ra_ide/src/references.rs +++ b/crates/ra_ide/src/references.rs | |||
@@ -94,12 +94,16 @@ pub(crate) fn find_all_refs( | |||
94 | let sema = Semantics::new(db); | 94 | let sema = Semantics::new(db); |
95 | let syntax = sema.parse(position.file_id).syntax().clone(); | 95 | let syntax = sema.parse(position.file_id).syntax().clone(); |
96 | 96 | ||
97 | let (opt_name, search_kind) = | 97 | let (opt_name, search_kind) = if let Some(name) = |
98 | if let Some(name) = get_struct_def_name_for_struct_literal_search(&syntax, position) { | 98 | get_struct_def_name_for_struct_literal_search(&sema, &syntax, position) |
99 | (Some(name), ReferenceKind::StructLiteral) | 99 | { |
100 | } else { | 100 | (Some(name), ReferenceKind::StructLiteral) |
101 | (find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other) | 101 | } else { |
102 | }; | 102 | ( |
103 | sema.find_node_at_offset_with_descend::<ast::Name>(&syntax, position.offset), | ||
104 | ReferenceKind::Other, | ||
105 | ) | ||
106 | }; | ||
103 | 107 | ||
104 | let RangeInfo { range, info: def } = find_name(&sema, &syntax, position, opt_name)?; | 108 | let RangeInfo { range, info: def } = find_name(&sema, &syntax, position, opt_name)?; |
105 | 109 | ||
@@ -131,7 +135,8 @@ fn find_name( | |||
131 | let range = name.syntax().text_range(); | 135 | let range = name.syntax().text_range(); |
132 | return Some(RangeInfo::new(range, def)); | 136 | return Some(RangeInfo::new(range, def)); |
133 | } | 137 | } |
134 | let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?; | 138 | let name_ref = |
139 | sema.find_node_at_offset_with_descend::<ast::NameRef>(&syntax, position.offset)?; | ||
135 | let def = classify_name_ref(sema, &name_ref)?.definition(); | 140 | let def = classify_name_ref(sema, &name_ref)?.definition(); |
136 | let range = name_ref.syntax().text_range(); | 141 | let range = name_ref.syntax().text_range(); |
137 | Some(RangeInfo::new(range, def)) | 142 | Some(RangeInfo::new(range, def)) |
@@ -157,6 +162,7 @@ fn decl_access(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> Optio | |||
157 | } | 162 | } |
158 | 163 | ||
159 | fn get_struct_def_name_for_struct_literal_search( | 164 | fn get_struct_def_name_for_struct_literal_search( |
165 | sema: &Semantics<RootDatabase>, | ||
160 | syntax: &SyntaxNode, | 166 | syntax: &SyntaxNode, |
161 | position: FilePosition, | 167 | position: FilePosition, |
162 | ) -> Option<ast::Name> { | 168 | ) -> Option<ast::Name> { |
@@ -164,10 +170,18 @@ fn get_struct_def_name_for_struct_literal_search( | |||
164 | if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN { | 170 | if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN { |
165 | return None; | 171 | return None; |
166 | } | 172 | } |
167 | if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, left.text_range().start()) { | 173 | if let Some(name) = |
174 | sema.find_node_at_offset_with_descend::<ast::Name>(&syntax, left.text_range().start()) | ||
175 | { | ||
168 | return name.syntax().ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); | 176 | return name.syntax().ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); |
169 | } | 177 | } |
170 | if find_node_at_offset::<ast::TypeParamList>(&syntax, left.text_range().start()).is_some() { | 178 | if sema |
179 | .find_node_at_offset_with_descend::<ast::TypeParamList>( | ||
180 | &syntax, | ||
181 | left.text_range().start(), | ||
182 | ) | ||
183 | .is_some() | ||
184 | { | ||
171 | return left.ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); | 185 | return left.ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); |
172 | } | 186 | } |
173 | } | 187 | } |
diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs index 7d1190af9..9acc6158a 100644 --- a/crates/ra_ide/src/references/rename.rs +++ b/crates/ra_ide/src/references/rename.rs | |||
@@ -250,6 +250,63 @@ mod tests { | |||
250 | } | 250 | } |
251 | 251 | ||
252 | #[test] | 252 | #[test] |
253 | fn test_rename_for_macro_args_rev() { | ||
254 | test_rename( | ||
255 | r#" | ||
256 | macro_rules! foo {($i:ident) => {$i} } | ||
257 | fn main() { | ||
258 | let a = "test"; | ||
259 | foo!(a<|>); | ||
260 | }"#, | ||
261 | "b", | ||
262 | r#" | ||
263 | macro_rules! foo {($i:ident) => {$i} } | ||
264 | fn main() { | ||
265 | let b = "test"; | ||
266 | foo!(b); | ||
267 | }"#, | ||
268 | ); | ||
269 | } | ||
270 | |||
271 | #[test] | ||
272 | fn test_rename_for_macro_define_fn() { | ||
273 | test_rename( | ||
274 | r#" | ||
275 | macro_rules! define_fn {($id:ident) => { fn $id{} }} | ||
276 | define_fn!(foo); | ||
277 | fn main() { | ||
278 | fo<|>o(); | ||
279 | }"#, | ||
280 | "bar", | ||
281 | r#" | ||
282 | macro_rules! define_fn {($id:ident) => { fn $id{} }} | ||
283 | define_fn!(bar); | ||
284 | fn main() { | ||
285 | bar(); | ||
286 | }"#, | ||
287 | ); | ||
288 | } | ||
289 | |||
290 | #[test] | ||
291 | fn test_rename_for_macro_define_fn_rev() { | ||
292 | test_rename( | ||
293 | r#" | ||
294 | macro_rules! define_fn {($id:ident) => { fn $id{} }} | ||
295 | define_fn!(fo<|>o); | ||
296 | fn main() { | ||
297 | foo(); | ||
298 | }"#, | ||
299 | "bar", | ||
300 | r#" | ||
301 | macro_rules! define_fn {($id:ident) => { fn $id{} }} | ||
302 | define_fn!(bar); | ||
303 | fn main() { | ||
304 | bar(); | ||
305 | }"#, | ||
306 | ); | ||
307 | } | ||
308 | |||
309 | #[test] | ||
253 | fn test_rename_for_param_inside() { | 310 | fn test_rename_for_param_inside() { |
254 | test_rename( | 311 | test_rename( |
255 | r#" | 312 | r#" |
diff --git a/crates/ra_ide_db/Cargo.toml b/crates/ra_ide_db/Cargo.toml index de4f5bce0..c3921bd40 100644 --- a/crates/ra_ide_db/Cargo.toml +++ b/crates/ra_ide_db/Cargo.toml | |||
@@ -17,6 +17,7 @@ fst = { version = "0.4", default-features = false } | |||
17 | rustc-hash = "1.1.0" | 17 | rustc-hash = "1.1.0" |
18 | superslice = "1.0.0" | 18 | superslice = "1.0.0" |
19 | once_cell = "1.3.1" | 19 | once_cell = "1.3.1" |
20 | either = "1.5.3" | ||
20 | 21 | ||
21 | ra_syntax = { path = "../ra_syntax" } | 22 | ra_syntax = { path = "../ra_syntax" } |
22 | ra_text_edit = { path = "../ra_text_edit" } | 23 | ra_text_edit = { path = "../ra_text_edit" } |
diff --git a/crates/ra_ide_db/src/imports_locator.rs b/crates/ra_ide_db/src/imports_locator.rs index c96351982..bf0d8db60 100644 --- a/crates/ra_ide_db/src/imports_locator.rs +++ b/crates/ra_ide_db/src/imports_locator.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! This module contains an import search funcionality that is provided to the ra_assists module. | 1 | //! This module contains an import search funcionality that is provided to the ra_assists module. |
2 | //! Later, this should be moved away to a separate crate that is accessible from the ra_assists module. | 2 | //! Later, this should be moved away to a separate crate that is accessible from the ra_assists module. |
3 | 3 | ||
4 | use hir::{ModuleDef, Semantics}; | 4 | use hir::{MacroDef, ModuleDef, Semantics}; |
5 | use ra_prof::profile; | 5 | use ra_prof::profile; |
6 | use ra_syntax::{ast, AstNode, SyntaxKind::NAME}; | 6 | use ra_syntax::{ast, AstNode, SyntaxKind::NAME}; |
7 | 7 | ||
@@ -10,6 +10,7 @@ use crate::{ | |||
10 | symbol_index::{self, FileSymbol, Query}, | 10 | symbol_index::{self, FileSymbol, Query}, |
11 | RootDatabase, | 11 | RootDatabase, |
12 | }; | 12 | }; |
13 | use either::Either; | ||
13 | 14 | ||
14 | pub struct ImportsLocator<'a> { | 15 | pub struct ImportsLocator<'a> { |
15 | sema: Semantics<'a, RootDatabase>, | 16 | sema: Semantics<'a, RootDatabase>, |
@@ -20,7 +21,7 @@ impl<'a> ImportsLocator<'a> { | |||
20 | Self { sema: Semantics::new(db) } | 21 | Self { sema: Semantics::new(db) } |
21 | } | 22 | } |
22 | 23 | ||
23 | pub fn find_imports(&mut self, name_to_import: &str) -> Vec<ModuleDef> { | 24 | pub fn find_imports(&mut self, name_to_import: &str) -> Vec<Either<ModuleDef, MacroDef>> { |
24 | let _p = profile("search_for_imports"); | 25 | let _p = profile("search_for_imports"); |
25 | let db = self.sema.db; | 26 | let db = self.sema.db; |
26 | 27 | ||
@@ -43,7 +44,8 @@ impl<'a> ImportsLocator<'a> { | |||
43 | .chain(lib_results.into_iter()) | 44 | .chain(lib_results.into_iter()) |
44 | .filter_map(|import_candidate| self.get_name_definition(&import_candidate)) | 45 | .filter_map(|import_candidate| self.get_name_definition(&import_candidate)) |
45 | .filter_map(|name_definition_to_import| match name_definition_to_import { | 46 | .filter_map(|name_definition_to_import| match name_definition_to_import { |
46 | Definition::ModuleDef(module_def) => Some(module_def), | 47 | Definition::ModuleDef(module_def) => Some(Either::Left(module_def)), |
48 | Definition::Macro(macro_def) => Some(Either::Right(macro_def)), | ||
47 | _ => None, | 49 | _ => None, |
48 | }) | 50 | }) |
49 | .collect() | 51 | .collect() |
diff --git a/crates/ra_ide_db/src/search.rs b/crates/ra_ide_db/src/search.rs index cf78d3e41..117454695 100644 --- a/crates/ra_ide_db/src/search.rs +++ b/crates/ra_ide_db/src/search.rs | |||
@@ -10,9 +10,7 @@ use hir::{DefWithBody, HasSource, ModuleSource, Semantics}; | |||
10 | use once_cell::unsync::Lazy; | 10 | use once_cell::unsync::Lazy; |
11 | use ra_db::{FileId, FileRange, SourceDatabaseExt}; | 11 | use ra_db::{FileId, FileRange, SourceDatabaseExt}; |
12 | use ra_prof::profile; | 12 | use ra_prof::profile; |
13 | use ra_syntax::{ | 13 | use ra_syntax::{ast, match_ast, AstNode, TextRange, TextUnit}; |
14 | algo::find_node_at_offset, ast, match_ast, AstNode, TextRange, TextUnit, TokenAtOffset, | ||
15 | }; | ||
16 | use rustc_hash::FxHashMap; | 14 | use rustc_hash::FxHashMap; |
17 | use test_utils::tested_by; | 15 | use test_utils::tested_by; |
18 | 16 | ||
@@ -219,21 +217,11 @@ impl Definition { | |||
219 | continue; | 217 | continue; |
220 | } | 218 | } |
221 | 219 | ||
222 | let name_ref = | 220 | let name_ref: ast::NameRef = |
223 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&tree, offset) { | 221 | if let Some(name_ref) = sema.find_node_at_offset_with_descend(&tree, offset) { |
224 | name_ref | 222 | name_ref |
225 | } else { | 223 | } else { |
226 | // Handle macro token cases | 224 | continue; |
227 | let token = match tree.token_at_offset(offset) { | ||
228 | TokenAtOffset::None => continue, | ||
229 | TokenAtOffset::Single(t) => t, | ||
230 | TokenAtOffset::Between(_, t) => t, | ||
231 | }; | ||
232 | let expanded = sema.descend_into_macros(token); | ||
233 | match ast::NameRef::cast(expanded.parent()) { | ||
234 | Some(name_ref) => name_ref, | ||
235 | _ => continue, | ||
236 | } | ||
237 | }; | 225 | }; |
238 | 226 | ||
239 | // FIXME: reuse sb | 227 | // FIXME: reuse sb |
diff --git a/crates/ra_ide_db/src/symbol_index.rs b/crates/ra_ide_db/src/symbol_index.rs index 884359ee3..0f46f93c1 100644 --- a/crates/ra_ide_db/src/symbol_index.rs +++ b/crates/ra_ide_db/src/symbol_index.rs | |||
@@ -362,6 +362,13 @@ fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | |||
362 | ast::TypeAliasDef(it) => { decl(it) }, | 362 | ast::TypeAliasDef(it) => { decl(it) }, |
363 | ast::ConstDef(it) => { decl(it) }, | 363 | ast::ConstDef(it) => { decl(it) }, |
364 | ast::StaticDef(it) => { decl(it) }, | 364 | ast::StaticDef(it) => { decl(it) }, |
365 | ast::MacroCall(it) => { | ||
366 | if it.is_macro_rules().is_some() { | ||
367 | decl(it) | ||
368 | } else { | ||
369 | None | ||
370 | } | ||
371 | }, | ||
365 | _ => None, | 372 | _ => None, |
366 | } | 373 | } |
367 | } | 374 | } |
diff --git a/crates/ra_project_model/src/cargo_workspace.rs b/crates/ra_project_model/src/cargo_workspace.rs index c2857dbfc..c7f9bd873 100644 --- a/crates/ra_project_model/src/cargo_workspace.rs +++ b/crates/ra_project_model/src/cargo_workspace.rs | |||
@@ -6,7 +6,7 @@ use std::{ | |||
6 | }; | 6 | }; |
7 | 7 | ||
8 | use anyhow::{Context, Result}; | 8 | use anyhow::{Context, Result}; |
9 | use cargo_metadata::{CargoOpt, Message, MetadataCommand, PackageId}; | 9 | use cargo_metadata::{BuildScript, CargoOpt, Message, MetadataCommand, PackageId}; |
10 | use ra_arena::{Arena, Idx}; | 10 | use ra_arena::{Arena, Idx}; |
11 | use ra_cargo_watch::run_cargo; | 11 | use ra_cargo_watch::run_cargo; |
12 | use ra_db::Edition; | 12 | use ra_db::Edition; |
@@ -254,7 +254,7 @@ pub fn load_out_dirs( | |||
254 | "check".to_string(), | 254 | "check".to_string(), |
255 | "--message-format=json".to_string(), | 255 | "--message-format=json".to_string(), |
256 | "--manifest-path".to_string(), | 256 | "--manifest-path".to_string(), |
257 | format!("{}", cargo_toml.display()), | 257 | cargo_toml.display().to_string(), |
258 | ]; | 258 | ]; |
259 | 259 | ||
260 | if cargo_features.all_features { | 260 | if cargo_features.all_features { |
@@ -263,19 +263,15 @@ pub fn load_out_dirs( | |||
263 | // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` | 263 | // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` |
264 | // https://github.com/oli-obk/cargo_metadata/issues/79 | 264 | // https://github.com/oli-obk/cargo_metadata/issues/79 |
265 | args.push("--no-default-features".to_string()); | 265 | args.push("--no-default-features".to_string()); |
266 | } else if !cargo_features.features.is_empty() { | 266 | } else { |
267 | for feature in &cargo_features.features { | 267 | args.extend(cargo_features.features.iter().cloned()); |
268 | args.push(feature.clone()); | ||
269 | } | ||
270 | } | 268 | } |
271 | 269 | ||
272 | let mut res = FxHashMap::default(); | 270 | let mut acc = FxHashMap::default(); |
273 | let mut child = run_cargo(&args, cargo_toml.parent(), &mut |message| { | 271 | let res = run_cargo(&args, cargo_toml.parent(), &mut |message| { |
274 | match message { | 272 | match message { |
275 | Message::BuildScriptExecuted(message) => { | 273 | Message::BuildScriptExecuted(BuildScript { package_id, out_dir, .. }) => { |
276 | let package_id = message.package_id; | 274 | acc.insert(package_id, out_dir); |
277 | let out_dir = message.out_dir; | ||
278 | res.insert(package_id, out_dir); | ||
279 | } | 275 | } |
280 | 276 | ||
281 | Message::CompilerArtifact(_) => (), | 277 | Message::CompilerArtifact(_) => (), |
@@ -285,6 +281,9 @@ pub fn load_out_dirs( | |||
285 | true | 281 | true |
286 | }); | 282 | }); |
287 | 283 | ||
288 | let _ = child.wait(); | 284 | if let Err(err) = res { |
289 | res | 285 | log::error!("Failed to load outdirs: {:?}", err); |
286 | } | ||
287 | |||
288 | acc | ||
290 | } | 289 | } |
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs index b500a74fb..a3ef9acdc 100644 --- a/crates/ra_project_model/src/lib.rs +++ b/crates/ra_project_model/src/lib.rs | |||
@@ -336,7 +336,7 @@ impl ProjectWorkspace { | |||
336 | extern_source, | 336 | extern_source, |
337 | ); | 337 | ); |
338 | if cargo[tgt].kind == TargetKind::Lib { | 338 | if cargo[tgt].kind == TargetKind::Lib { |
339 | lib_tgt = Some(crate_id); | 339 | lib_tgt = Some((crate_id, cargo[tgt].name.clone())); |
340 | pkg_to_lib_crate.insert(pkg, crate_id); | 340 | pkg_to_lib_crate.insert(pkg, crate_id); |
341 | } | 341 | } |
342 | if cargo[tgt].is_proc_macro { | 342 | if cargo[tgt].is_proc_macro { |
@@ -363,7 +363,7 @@ impl ProjectWorkspace { | |||
363 | 363 | ||
364 | // Set deps to the core, std and to the lib target of the current package | 364 | // Set deps to the core, std and to the lib target of the current package |
365 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { | 365 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { |
366 | if let Some(to) = lib_tgt { | 366 | if let Some((to, name)) = lib_tgt.clone() { |
367 | if to != from | 367 | if to != from |
368 | && crate_graph | 368 | && crate_graph |
369 | .add_dep( | 369 | .add_dep( |
@@ -371,7 +371,7 @@ impl ProjectWorkspace { | |||
371 | // For root projects with dashes in their name, | 371 | // For root projects with dashes in their name, |
372 | // cargo metadata does not do any normalization, | 372 | // cargo metadata does not do any normalization, |
373 | // so we do it ourselves currently | 373 | // so we do it ourselves currently |
374 | CrateName::normalize_dashes(&cargo[pkg].name), | 374 | CrateName::normalize_dashes(&name), |
375 | to, | 375 | to, |
376 | ) | 376 | ) |
377 | .is_err() | 377 | .is_err() |
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index 8efc6b368..6fccc2303 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml | |||
@@ -11,7 +11,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer" | |||
11 | doctest = false | 11 | doctest = false |
12 | 12 | ||
13 | [dependencies] | 13 | [dependencies] |
14 | itertools = "0.8.2" | 14 | itertools = "0.9.0" |
15 | rowan = "0.9.1" | 15 | rowan = "0.9.1" |
16 | rustc_lexer = "0.1.0" | 16 | rustc_lexer = "0.1.0" |
17 | rustc-hash = "1.1.0" | 17 | rustc-hash = "1.1.0" |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index 344cf0fbe..4d463a3ef 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -1,6 +1,9 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use std::ops::RangeInclusive; | 3 | use std::{ |
4 | fmt, | ||
5 | ops::{self, RangeInclusive}, | ||
6 | }; | ||
4 | 7 | ||
5 | use itertools::Itertools; | 8 | use itertools::Itertools; |
6 | use ra_text_edit::TextEditBuilder; | 9 | use ra_text_edit::TextEditBuilder; |
@@ -95,6 +98,10 @@ impl TreeDiff { | |||
95 | builder.replace(from.text_range(), to.to_string()) | 98 | builder.replace(from.text_range(), to.to_string()) |
96 | } | 99 | } |
97 | } | 100 | } |
101 | |||
102 | pub fn is_empty(&self) -> bool { | ||
103 | self.replacements.is_empty() | ||
104 | } | ||
98 | } | 105 | } |
99 | 106 | ||
100 | /// Finds minimal the diff, which, applied to `from`, will result in `to`. | 107 | /// Finds minimal the diff, which, applied to `from`, will result in `to`. |
@@ -218,44 +225,121 @@ fn _replace_children( | |||
218 | with_children(parent, new_children) | 225 | with_children(parent, new_children) |
219 | } | 226 | } |
220 | 227 | ||
221 | /// Replaces descendants in the node, according to the mapping. | 228 | #[derive(Default)] |
222 | /// | 229 | pub struct SyntaxRewriter<'a> { |
223 | /// This is a type-unsafe low-level editing API, if you need to use it, prefer | 230 | f: Option<Box<dyn Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a>>, |
224 | /// to create a type-safe abstraction on top of it instead. | 231 | //FIXME: add debug_assertions that all elements are in fact from the same file. |
225 | pub fn replace_descendants( | 232 | replacements: FxHashMap<SyntaxElement, Replacement>, |
226 | parent: &SyntaxNode, | ||
227 | map: impl Fn(&SyntaxElement) -> Option<SyntaxElement>, | ||
228 | ) -> SyntaxNode { | ||
229 | _replace_descendants(parent, &map) | ||
230 | } | 233 | } |
231 | 234 | ||
232 | fn _replace_descendants( | 235 | impl fmt::Debug for SyntaxRewriter<'_> { |
233 | parent: &SyntaxNode, | 236 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
234 | map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>, | 237 | f.debug_struct("SyntaxRewriter").field("replacements", &self.replacements).finish() |
235 | ) -> SyntaxNode { | 238 | } |
236 | // FIXME: this could be made much faster. | 239 | } |
237 | let new_children = parent.children_with_tokens().map(|it| go(map, it)).collect::<Vec<_>>(); | ||
238 | return with_children(parent, new_children); | ||
239 | 240 | ||
240 | fn go( | 241 | impl<'a> SyntaxRewriter<'a> { |
241 | map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>, | 242 | pub fn from_fn(f: impl Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a) -> SyntaxRewriter<'a> { |
242 | element: SyntaxElement, | 243 | SyntaxRewriter { f: Some(Box::new(f)), replacements: FxHashMap::default() } |
243 | ) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { | 244 | } |
244 | if let Some(replacement) = map(&element) { | 245 | pub fn delete<T: Clone + Into<SyntaxElement>>(&mut self, what: &T) { |
246 | let what = what.clone().into(); | ||
247 | let replacement = Replacement::Delete; | ||
248 | self.replacements.insert(what, replacement); | ||
249 | } | ||
250 | pub fn replace<T: Clone + Into<SyntaxElement>>(&mut self, what: &T, with: &T) { | ||
251 | let what = what.clone().into(); | ||
252 | let replacement = Replacement::Single(with.clone().into()); | ||
253 | self.replacements.insert(what, replacement); | ||
254 | } | ||
255 | pub fn replace_ast<T: AstNode>(&mut self, what: &T, with: &T) { | ||
256 | self.replace(what.syntax(), with.syntax()) | ||
257 | } | ||
258 | |||
259 | pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode { | ||
260 | if self.f.is_none() && self.replacements.is_empty() { | ||
261 | return node.clone(); | ||
262 | } | ||
263 | self.rewrite_children(node) | ||
264 | } | ||
265 | |||
266 | pub fn rewrite_ast<N: AstNode>(self, node: &N) -> N { | ||
267 | N::cast(self.rewrite(node.syntax())).unwrap() | ||
268 | } | ||
269 | |||
270 | pub fn rewrite_root(&self) -> Option<SyntaxNode> { | ||
271 | assert!(self.f.is_none()); | ||
272 | self.replacements | ||
273 | .keys() | ||
274 | .map(|element| match element { | ||
275 | SyntaxElement::Node(it) => it.clone(), | ||
276 | SyntaxElement::Token(it) => it.parent(), | ||
277 | }) | ||
278 | .fold1(|a, b| least_common_ancestor(&a, &b).unwrap()) | ||
279 | } | ||
280 | |||
281 | fn replacement(&self, element: &SyntaxElement) -> Option<Replacement> { | ||
282 | if let Some(f) = &self.f { | ||
283 | assert!(self.replacements.is_empty()); | ||
284 | return f(element).map(Replacement::Single); | ||
285 | } | ||
286 | self.replacements.get(element).cloned() | ||
287 | } | ||
288 | |||
289 | fn rewrite_children(&self, node: &SyntaxNode) -> SyntaxNode { | ||
290 | // FIXME: this could be made much faster. | ||
291 | let new_children = | ||
292 | node.children_with_tokens().flat_map(|it| self.rewrite_self(&it)).collect::<Vec<_>>(); | ||
293 | with_children(node, new_children) | ||
294 | } | ||
295 | |||
296 | fn rewrite_self( | ||
297 | &self, | ||
298 | element: &SyntaxElement, | ||
299 | ) -> Option<NodeOrToken<rowan::GreenNode, rowan::GreenToken>> { | ||
300 | if let Some(replacement) = self.replacement(&element) { | ||
245 | return match replacement { | 301 | return match replacement { |
246 | NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), | 302 | Replacement::Single(NodeOrToken::Node(it)) => { |
247 | NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), | 303 | Some(NodeOrToken::Node(it.green().clone())) |
304 | } | ||
305 | Replacement::Single(NodeOrToken::Token(it)) => { | ||
306 | Some(NodeOrToken::Token(it.green().clone())) | ||
307 | } | ||
308 | Replacement::Delete => None, | ||
248 | }; | 309 | }; |
249 | } | 310 | } |
250 | match element { | 311 | let res = match element { |
251 | NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), | 312 | NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), |
252 | NodeOrToken::Node(it) => { | 313 | NodeOrToken::Node(it) => NodeOrToken::Node(self.rewrite_children(it).green().clone()), |
253 | NodeOrToken::Node(_replace_descendants(&it, map).green().clone()) | 314 | }; |
254 | } | 315 | Some(res) |
255 | } | ||
256 | } | 316 | } |
257 | } | 317 | } |
258 | 318 | ||
319 | impl<'a> ops::AddAssign for SyntaxRewriter<'_> { | ||
320 | fn add_assign(&mut self, rhs: SyntaxRewriter) { | ||
321 | assert!(rhs.f.is_none()); | ||
322 | self.replacements.extend(rhs.replacements) | ||
323 | } | ||
324 | } | ||
325 | |||
326 | #[derive(Clone, Debug)] | ||
327 | enum Replacement { | ||
328 | Delete, | ||
329 | Single(SyntaxElement), | ||
330 | } | ||
331 | |||
332 | /// Replaces descendants in the node, according to the mapping. | ||
333 | /// | ||
334 | /// This is a type-unsafe low-level editing API, if you need to use it, prefer | ||
335 | /// to create a type-safe abstraction on top of it instead. | ||
336 | pub fn _replace_descendants( | ||
337 | parent: &SyntaxNode, | ||
338 | map: impl Fn(&SyntaxElement) -> Option<SyntaxElement>, | ||
339 | ) -> SyntaxNode { | ||
340 | SyntaxRewriter::from_fn(map).rewrite(parent) | ||
341 | } | ||
342 | |||
259 | fn with_children( | 343 | fn with_children( |
260 | parent: &SyntaxNode, | 344 | parent: &SyntaxNode, |
261 | new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, | 345 | new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, |
diff --git a/crates/ra_syntax/src/ast/edit.rs b/crates/ra_syntax/src/ast/edit.rs index 68dae008f..f74c9f9c6 100644 --- a/crates/ra_syntax/src/ast/edit.rs +++ b/crates/ra_syntax/src/ast/edit.rs | |||
@@ -4,7 +4,6 @@ | |||
4 | use std::{iter, ops::RangeInclusive}; | 4 | use std::{iter, ops::RangeInclusive}; |
5 | 5 | ||
6 | use arrayvec::ArrayVec; | 6 | use arrayvec::ArrayVec; |
7 | use rustc_hash::FxHashMap; | ||
8 | 7 | ||
9 | use crate::{ | 8 | use crate::{ |
10 | algo, | 9 | algo, |
@@ -17,6 +16,7 @@ use crate::{ | |||
17 | SyntaxKind::{ATTR, COMMENT, WHITESPACE}, | 16 | SyntaxKind::{ATTR, COMMENT, WHITESPACE}, |
18 | SyntaxNode, SyntaxToken, T, | 17 | SyntaxNode, SyntaxToken, T, |
19 | }; | 18 | }; |
19 | use algo::{neighbor, SyntaxRewriter}; | ||
20 | 20 | ||
21 | impl ast::BinExpr { | 21 | impl ast::BinExpr { |
22 | #[must_use] | 22 | #[must_use] |
@@ -255,6 +255,28 @@ impl ast::UseItem { | |||
255 | } | 255 | } |
256 | self.clone() | 256 | self.clone() |
257 | } | 257 | } |
258 | |||
259 | pub fn remove(&self) -> SyntaxRewriter<'static> { | ||
260 | let mut res = SyntaxRewriter::default(); | ||
261 | res.delete(self.syntax()); | ||
262 | let next_ws = self | ||
263 | .syntax() | ||
264 | .next_sibling_or_token() | ||
265 | .and_then(|it| it.into_token()) | ||
266 | .and_then(ast::Whitespace::cast); | ||
267 | if let Some(next_ws) = next_ws { | ||
268 | let ws_text = next_ws.syntax().text(); | ||
269 | if ws_text.starts_with('\n') { | ||
270 | let rest = &ws_text[1..]; | ||
271 | if rest.is_empty() { | ||
272 | res.delete(next_ws.syntax()) | ||
273 | } else { | ||
274 | res.replace(next_ws.syntax(), &make::tokens::whitespace(rest)); | ||
275 | } | ||
276 | } | ||
277 | } | ||
278 | res | ||
279 | } | ||
258 | } | 280 | } |
259 | 281 | ||
260 | impl ast::UseTree { | 282 | impl ast::UseTree { |
@@ -293,14 +315,30 @@ impl ast::UseTree { | |||
293 | Some(res) | 315 | Some(res) |
294 | } | 316 | } |
295 | } | 317 | } |
318 | |||
319 | pub fn remove(&self) -> SyntaxRewriter<'static> { | ||
320 | let mut res = SyntaxRewriter::default(); | ||
321 | res.delete(self.syntax()); | ||
322 | for &dir in [Direction::Next, Direction::Prev].iter() { | ||
323 | if let Some(nb) = neighbor(self, dir) { | ||
324 | self.syntax() | ||
325 | .siblings_with_tokens(dir) | ||
326 | .skip(1) | ||
327 | .take_while(|it| it.as_node() != Some(nb.syntax())) | ||
328 | .for_each(|el| res.delete(&el)); | ||
329 | return res; | ||
330 | } | ||
331 | } | ||
332 | res | ||
333 | } | ||
296 | } | 334 | } |
297 | 335 | ||
298 | #[must_use] | 336 | #[must_use] |
299 | pub fn strip_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { | 337 | pub fn remove_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { |
300 | N::cast(strip_attrs_and_docs_inner(node.syntax().clone())).unwrap() | 338 | N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap() |
301 | } | 339 | } |
302 | 340 | ||
303 | fn strip_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode { | 341 | fn remove_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode { |
304 | while let Some(start) = | 342 | while let Some(start) = |
305 | node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT) | 343 | node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT) |
306 | { | 344 | { |
@@ -343,28 +381,24 @@ impl IndentLevel { | |||
343 | } | 381 | } |
344 | 382 | ||
345 | fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode { | 383 | fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode { |
346 | let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node | 384 | let mut rewriter = SyntaxRewriter::default(); |
347 | .descendants_with_tokens() | 385 | node.descendants_with_tokens() |
348 | .filter_map(|el| el.into_token()) | 386 | .filter_map(|el| el.into_token()) |
349 | .filter_map(ast::Whitespace::cast) | 387 | .filter_map(ast::Whitespace::cast) |
350 | .filter(|ws| { | 388 | .filter(|ws| { |
351 | let text = ws.syntax().text(); | 389 | let text = ws.syntax().text(); |
352 | text.contains('\n') | 390 | text.contains('\n') |
353 | }) | 391 | }) |
354 | .map(|ws| { | 392 | .for_each(|ws| { |
355 | ( | 393 | let new_ws = make::tokens::whitespace(&format!( |
356 | ws.syntax().clone().into(), | 394 | "{}{:width$}", |
357 | make::tokens::whitespace(&format!( | 395 | ws.syntax().text(), |
358 | "{}{:width$}", | 396 | "", |
359 | ws.syntax().text(), | 397 | width = self.0 as usize * 4 |
360 | "", | 398 | )); |
361 | width = self.0 as usize * 4 | 399 | rewriter.replace(ws.syntax(), &new_ws) |
362 | )) | 400 | }); |
363 | .into(), | 401 | rewriter.rewrite(&node) |
364 | ) | ||
365 | }) | ||
366 | .collect(); | ||
367 | algo::replace_descendants(&node, |n| replacements.get(n).cloned()) | ||
368 | } | 402 | } |
369 | 403 | ||
370 | pub fn decrease_indent<N: AstNode>(self, node: N) -> N { | 404 | pub fn decrease_indent<N: AstNode>(self, node: N) -> N { |
@@ -372,27 +406,21 @@ impl IndentLevel { | |||
372 | } | 406 | } |
373 | 407 | ||
374 | fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode { | 408 | fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode { |
375 | let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node | 409 | let mut rewriter = SyntaxRewriter::default(); |
376 | .descendants_with_tokens() | 410 | node.descendants_with_tokens() |
377 | .filter_map(|el| el.into_token()) | 411 | .filter_map(|el| el.into_token()) |
378 | .filter_map(ast::Whitespace::cast) | 412 | .filter_map(ast::Whitespace::cast) |
379 | .filter(|ws| { | 413 | .filter(|ws| { |
380 | let text = ws.syntax().text(); | 414 | let text = ws.syntax().text(); |
381 | text.contains('\n') | 415 | text.contains('\n') |
382 | }) | 416 | }) |
383 | .map(|ws| { | 417 | .for_each(|ws| { |
384 | ( | 418 | let new_ws = make::tokens::whitespace( |
385 | ws.syntax().clone().into(), | 419 | &ws.syntax().text().replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"), |
386 | make::tokens::whitespace( | 420 | ); |
387 | &ws.syntax() | 421 | rewriter.replace(ws.syntax(), &new_ws) |
388 | .text() | 422 | }); |
389 | .replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"), | 423 | rewriter.rewrite(&node) |
390 | ) | ||
391 | .into(), | ||
392 | ) | ||
393 | }) | ||
394 | .collect(); | ||
395 | algo::replace_descendants(&node, |n| replacements.get(n).cloned()) | ||
396 | } | 424 | } |
397 | } | 425 | } |
398 | 426 | ||
@@ -442,12 +470,11 @@ pub trait AstNodeEdit: AstNode + Sized { | |||
442 | &self, | 470 | &self, |
443 | replacement_map: impl IntoIterator<Item = (D, D)>, | 471 | replacement_map: impl IntoIterator<Item = (D, D)>, |
444 | ) -> Self { | 472 | ) -> Self { |
445 | let map = replacement_map | 473 | let mut rewriter = SyntaxRewriter::default(); |
446 | .into_iter() | 474 | for (from, to) in replacement_map { |
447 | .map(|(from, to)| (from.syntax().clone().into(), to.syntax().clone().into())) | 475 | rewriter.replace(from.syntax(), to.syntax()) |
448 | .collect::<FxHashMap<SyntaxElement, _>>(); | 476 | } |
449 | let new_syntax = algo::replace_descendants(self.syntax(), |n| map.get(n).cloned()); | 477 | rewriter.rewrite_ast(self) |
450 | Self::cast(new_syntax).unwrap() | ||
451 | } | 478 | } |
452 | } | 479 | } |
453 | 480 | ||
diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs index c3ae8f90e..392731dac 100644 --- a/crates/ra_syntax/src/ast/extensions.rs +++ b/crates/ra_syntax/src/ast/extensions.rs | |||
@@ -4,7 +4,7 @@ | |||
4 | use itertools::Itertools; | 4 | use itertools::Itertools; |
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | ast::{self, child_opt, children, AstNode, AttrInput, SyntaxNode}, | 7 | ast::{self, child_opt, children, AstNode, AttrInput, NameOwner, SyntaxNode}, |
8 | SmolStr, SyntaxElement, | 8 | SmolStr, SyntaxElement, |
9 | SyntaxKind::*, | 9 | SyntaxKind::*, |
10 | SyntaxToken, T, | 10 | SyntaxToken, T, |
@@ -514,3 +514,14 @@ impl ast::Visibility { | |||
514 | self.syntax().children_with_tokens().any(|it| it.kind() == T![super]) | 514 | self.syntax().children_with_tokens().any(|it| it.kind() == T![super]) |
515 | } | 515 | } |
516 | } | 516 | } |
517 | |||
518 | impl ast::MacroCall { | ||
519 | pub fn is_macro_rules(&self) -> Option<ast::Name> { | ||
520 | let name_ref = self.path()?.segment()?.name_ref()?; | ||
521 | if name_ref.text() == "macro_rules" { | ||
522 | self.name() | ||
523 | } else { | ||
524 | None | ||
525 | } | ||
526 | } | ||
527 | } | ||
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs index 9f6f1cc53..1145b69e8 100644 --- a/crates/ra_syntax/src/ast/make.rs +++ b/crates/ra_syntax/src/ast/make.rs | |||
@@ -87,6 +87,9 @@ pub fn block_from_expr(e: ast::Expr) -> ast::Block { | |||
87 | pub fn expr_unit() -> ast::Expr { | 87 | pub fn expr_unit() -> ast::Expr { |
88 | expr_from_text("()") | 88 | expr_from_text("()") |
89 | } | 89 | } |
90 | pub fn expr_empty_block() -> ast::Expr { | ||
91 | expr_from_text("{}") | ||
92 | } | ||
90 | pub fn expr_unimplemented() -> ast::Expr { | 93 | pub fn expr_unimplemented() -> ast::Expr { |
91 | expr_from_text("unimplemented!()") | 94 | expr_from_text("unimplemented!()") |
92 | } | 95 | } |
@@ -136,6 +139,20 @@ pub fn placeholder_pat() -> ast::PlaceholderPat { | |||
136 | } | 139 | } |
137 | } | 140 | } |
138 | 141 | ||
142 | /// Creates a tuple of patterns from an interator of patterns. | ||
143 | /// | ||
144 | /// Invariant: `pats` must be length > 1 | ||
145 | /// | ||
146 | /// FIXME handle `pats` length == 1 | ||
147 | pub fn tuple_pat(pats: impl IntoIterator<Item = ast::Pat>) -> ast::TuplePat { | ||
148 | let pats_str = pats.into_iter().map(|p| p.to_string()).join(", "); | ||
149 | return from_text(&format!("({})", pats_str)); | ||
150 | |||
151 | fn from_text(text: &str) -> ast::TuplePat { | ||
152 | ast_from_text(&format!("fn f({}: ())", text)) | ||
153 | } | ||
154 | } | ||
155 | |||
139 | pub fn tuple_struct_pat( | 156 | pub fn tuple_struct_pat( |
140 | path: ast::Path, | 157 | path: ast::Path, |
141 | pats: impl IntoIterator<Item = ast::Pat>, | 158 | pats: impl IntoIterator<Item = ast::Pat>, |
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index d44f0ef1d..e071e9b8d 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml | |||
@@ -17,7 +17,7 @@ anyhow = "1.0.26" | |||
17 | crossbeam-channel = "0.4.0" | 17 | crossbeam-channel = "0.4.0" |
18 | env_logger = { version = "0.7.1", default-features = false } | 18 | env_logger = { version = "0.7.1", default-features = false } |
19 | globset = "0.4.4" | 19 | globset = "0.4.4" |
20 | itertools = "0.8.2" | 20 | itertools = "0.9.0" |
21 | jod-thread = "0.1.0" | 21 | jod-thread = "0.1.0" |
22 | log = "0.4.8" | 22 | log = "0.4.8" |
23 | lsp-types = { version = "0.73.0", features = ["proposed"] } | 23 | lsp-types = { version = "0.73.0", features = ["proposed"] } |
diff --git a/docs/user/assists.md b/docs/user/assists.md index e2850b4dd..f3ce6b0e0 100644 --- a/docs/user/assists.md +++ b/docs/user/assists.md | |||
@@ -267,8 +267,8 @@ enum Action { Move { distance: u32 }, Stop } | |||
267 | 267 | ||
268 | fn handle(action: Action) { | 268 | fn handle(action: Action) { |
269 | match action { | 269 | match action { |
270 | Action::Move { distance } => (), | 270 | Action::Move { distance } => {} |
271 | Action::Stop => (), | 271 | Action::Stop => {} |
272 | } | 272 | } |
273 | } | 273 | } |
274 | ``` | 274 | ``` |
diff --git a/docs/user/readme.adoc b/docs/user/readme.adoc index 0dfc12b52..e00d14dfb 100644 --- a/docs/user/readme.adoc +++ b/docs/user/readme.adoc | |||
@@ -98,6 +98,16 @@ You'll need Cargo, nodejs and npm for this. | |||
98 | 98 | ||
99 | Note that installing via `xtask install` does not work for VS Code Remote, instead you'll need to install the `.vsix` manually. | 99 | Note that installing via `xtask install` does not work for VS Code Remote, instead you'll need to install the `.vsix` manually. |
100 | 100 | ||
101 | ==== Troubleshooting | ||
102 | |||
103 | Here are some useful self-diagnostic commands: | ||
104 | |||
105 | * **Rust Analyzer: Show RA Version** shows the version of `rust-analyzer` binary | ||
106 | * **Rust Analyzer: Status** prints some statistics about the server, like the few latest LSP requests | ||
107 | * To enable server-side logging, run with `env RUST_LOG=info` and see `Output > Rust Analyzer Language Server` in VS Code's panel. | ||
108 | * To log all LSP requests, add `"rust-analyzer.trace.server": "verbose"` to the settings and look for `Server Trace` in the panel. | ||
109 | * To enable client-side logging, add `"rust-analyzer.trace.extension": true` to the settings and open the `Console` tab of VS Code developer tools. | ||
110 | |||
101 | === Language Server Binary | 111 | === Language Server Binary |
102 | 112 | ||
103 | Other editors generally require `rust-analyzer` binary to be in `$PATH`. | 113 | Other editors generally require `rust-analyzer` binary to be in `$PATH`. |
diff --git a/editors/code/package.json b/editors/code/package.json index eb5748515..1d113ebb6 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -73,10 +73,18 @@ | |||
73 | "type": "string" | 73 | "type": "string" |
74 | }, | 74 | }, |
75 | "args": { | 75 | "args": { |
76 | "type": "array" | 76 | "type": "array", |
77 | "items": { | ||
78 | "type": "string" | ||
79 | } | ||
77 | }, | 80 | }, |
78 | "env": { | 81 | "env": { |
79 | "type": "object" | 82 | "type": "object", |
83 | "patternProperties": { | ||
84 | ".+": { | ||
85 | "type": "string" | ||
86 | } | ||
87 | } | ||
80 | } | 88 | } |
81 | } | 89 | } |
82 | } | 90 | } |
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 08d821dd0..82ca749f3 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts | |||
@@ -99,8 +99,10 @@ export async function createClient(config: Config, serverPath: string): Promise< | |||
99 | // Note that while the CallHierarchyFeature is stable the LSP protocol is not. | 99 | // Note that while the CallHierarchyFeature is stable the LSP protocol is not. |
100 | res.registerFeature(new CallHierarchyFeature(res)); | 100 | res.registerFeature(new CallHierarchyFeature(res)); |
101 | 101 | ||
102 | if (config.highlightingSemanticTokens) { | 102 | if (config.package.enableProposedApi) { |
103 | res.registerFeature(new SemanticTokensFeature(res)); | 103 | if (config.highlightingSemanticTokens) { |
104 | res.registerFeature(new SemanticTokensFeature(res)); | ||
105 | } | ||
104 | } | 106 | } |
105 | 107 | ||
106 | return res; | 108 | return res; |
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index be5296fcf..7668c20b7 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts | |||
@@ -1,29 +1,10 @@ | |||
1 | import * as vscode from 'vscode'; | 1 | import * as vscode from 'vscode'; |
2 | import { log } from "./util"; | 2 | import { log } from "./util"; |
3 | 3 | ||
4 | export interface InlayHintOptions { | ||
5 | typeHints: boolean; | ||
6 | parameterHints: boolean; | ||
7 | maxLength: number | null; | ||
8 | } | ||
9 | |||
10 | export interface CargoWatchOptions { | ||
11 | enable: boolean; | ||
12 | arguments: string[]; | ||
13 | command: string; | ||
14 | allTargets: boolean; | ||
15 | } | ||
16 | |||
17 | export interface CargoFeatures { | ||
18 | noDefaultFeatures: boolean; | ||
19 | allFeatures: boolean; | ||
20 | features: string[]; | ||
21 | loadOutDirsFromCheck: boolean; | ||
22 | } | ||
23 | |||
24 | export type UpdatesChannel = "stable" | "nightly"; | 4 | export type UpdatesChannel = "stable" | "nightly"; |
25 | 5 | ||
26 | export const NIGHTLY_TAG = "nightly"; | 6 | export const NIGHTLY_TAG = "nightly"; |
7 | |||
27 | export class Config { | 8 | export class Config { |
28 | readonly extensionId = "matklad.rust-analyzer"; | 9 | readonly extensionId = "matklad.rust-analyzer"; |
29 | 10 | ||
@@ -38,37 +19,30 @@ export class Config { | |||
38 | ] | 19 | ] |
39 | .map(opt => `${this.rootSection}.${opt}`); | 20 | .map(opt => `${this.rootSection}.${opt}`); |
40 | 21 | ||
41 | readonly packageJsonVersion: string = vscode | 22 | readonly package: { |
42 | .extensions | 23 | version: string; |
43 | .getExtension(this.extensionId)! | 24 | releaseTag: string | undefined; |
44 | .packageJSON | 25 | enableProposedApi: boolean | undefined; |
45 | .version; | 26 | } = vscode.extensions.getExtension(this.extensionId)!.packageJSON; |
46 | |||
47 | readonly releaseTag: string | undefined = vscode | ||
48 | .extensions | ||
49 | .getExtension(this.extensionId)! | ||
50 | .packageJSON | ||
51 | .releaseTag ?? undefined; | ||
52 | 27 | ||
53 | private cfg!: vscode.WorkspaceConfiguration; | 28 | readonly globalStoragePath: string; |
54 | 29 | ||
55 | constructor(private readonly ctx: vscode.ExtensionContext) { | 30 | constructor(ctx: vscode.ExtensionContext) { |
56 | vscode.workspace.onDidChangeConfiguration(this.onConfigChange, this, ctx.subscriptions); | 31 | this.globalStoragePath = ctx.globalStoragePath; |
57 | this.refreshConfig(); | 32 | vscode.workspace.onDidChangeConfiguration(this.onDidChangeConfiguration, this, ctx.subscriptions); |
33 | this.refreshLogging(); | ||
58 | } | 34 | } |
59 | 35 | ||
60 | private refreshConfig() { | 36 | private refreshLogging() { |
61 | this.cfg = vscode.workspace.getConfiguration(this.rootSection); | 37 | log.setEnabled(this.traceExtension); |
62 | const enableLogging = this.cfg.get("trace.extension") as boolean; | ||
63 | log.setEnabled(enableLogging); | ||
64 | log.debug( | 38 | log.debug( |
65 | "Extension version:", this.packageJsonVersion, | 39 | "Extension version:", this.package.version, |
66 | "using configuration:", this.cfg | 40 | "using configuration:", this.cfg |
67 | ); | 41 | ); |
68 | } | 42 | } |
69 | 43 | ||
70 | private async onConfigChange(event: vscode.ConfigurationChangeEvent) { | 44 | private async onDidChangeConfiguration(event: vscode.ConfigurationChangeEvent) { |
71 | this.refreshConfig(); | 45 | this.refreshLogging(); |
72 | 46 | ||
73 | const requiresReloadOpt = this.requiresReloadOpts.find( | 47 | const requiresReloadOpt = this.requiresReloadOpts.find( |
74 | opt => event.affectsConfiguration(opt) | 48 | opt => event.affectsConfiguration(opt) |
@@ -86,49 +60,53 @@ export class Config { | |||
86 | } | 60 | } |
87 | } | 61 | } |
88 | 62 | ||
89 | get globalStoragePath(): string { return this.ctx.globalStoragePath; } | ||
90 | |||
91 | // We don't do runtime config validation here for simplicity. More on stackoverflow: | 63 | // We don't do runtime config validation here for simplicity. More on stackoverflow: |
92 | // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension | 64 | // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension |
93 | 65 | ||
94 | get serverPath() { return this.cfg.get("serverPath") as null | string; } | 66 | private get cfg(): vscode.WorkspaceConfiguration { |
95 | get channel() { return this.cfg.get<"stable" | "nightly">("updates.channel")!; } | 67 | return vscode.workspace.getConfiguration(this.rootSection); |
96 | get askBeforeDownload() { return this.cfg.get("updates.askBeforeDownload") as boolean; } | 68 | } |
97 | get highlightingSemanticTokens() { return this.cfg.get("highlighting.semanticTokens") as boolean; } | 69 | |
98 | get highlightingOn() { return this.cfg.get("highlightingOn") as boolean; } | 70 | get serverPath() { return this.cfg.get<null | string>("serverPath")!; } |
99 | get rainbowHighlightingOn() { return this.cfg.get("rainbowHighlightingOn") as boolean; } | 71 | get channel() { return this.cfg.get<UpdatesChannel>("updates.channel")!; } |
100 | get lruCapacity() { return this.cfg.get("lruCapacity") as null | number; } | 72 | get askBeforeDownload() { return this.cfg.get<boolean>("updates.askBeforeDownload")!; } |
101 | get inlayHints(): InlayHintOptions { | 73 | get highlightingSemanticTokens() { return this.cfg.get<boolean>("highlighting.semanticTokens")!; } |
74 | get highlightingOn() { return this.cfg.get<boolean>("highlightingOn")!; } | ||
75 | get rainbowHighlightingOn() { return this.cfg.get<boolean>("rainbowHighlightingOn")!; } | ||
76 | get lruCapacity() { return this.cfg.get<null | number>("lruCapacity")!; } | ||
77 | get excludeGlobs() { return this.cfg.get<string[]>("excludeGlobs")!; } | ||
78 | get useClientWatching() { return this.cfg.get<boolean>("useClientWatching")!; } | ||
79 | get featureFlags() { return this.cfg.get<Record<string, boolean>>("featureFlags")!; } | ||
80 | get rustfmtArgs() { return this.cfg.get<string[]>("rustfmtArgs")!; } | ||
81 | get loadOutDirsFromCheck() { return this.cfg.get<boolean>("loadOutDirsFromCheck")!; } | ||
82 | get traceExtension() { return this.cfg.get<boolean>("trace.extension")!; } | ||
83 | |||
84 | // for internal use | ||
85 | get withSysroot() { return this.cfg.get<boolean>("withSysroot", true)!; } | ||
86 | |||
87 | get inlayHints() { | ||
102 | return { | 88 | return { |
103 | typeHints: this.cfg.get("inlayHints.typeHints") as boolean, | 89 | typeHints: this.cfg.get<boolean>("inlayHints.typeHints")!, |
104 | parameterHints: this.cfg.get("inlayHints.parameterHints") as boolean, | 90 | parameterHints: this.cfg.get<boolean>("inlayHints.parameterHints")!, |
105 | maxLength: this.cfg.get("inlayHints.maxLength") as null | number, | 91 | maxLength: this.cfg.get<null | number>("inlayHints.maxLength")!, |
106 | }; | 92 | }; |
107 | } | 93 | } |
108 | get excludeGlobs() { return this.cfg.get("excludeGlobs") as string[]; } | ||
109 | get useClientWatching() { return this.cfg.get("useClientWatching") as boolean; } | ||
110 | get featureFlags() { return this.cfg.get("featureFlags") as Record<string, boolean>; } | ||
111 | get rustfmtArgs() { return this.cfg.get("rustfmtArgs") as string[]; } | ||
112 | get loadOutDirsFromCheck() { return this.cfg.get("loadOutDirsFromCheck") as boolean; } | ||
113 | 94 | ||
114 | get cargoWatchOptions(): CargoWatchOptions { | 95 | get cargoWatchOptions() { |
115 | return { | 96 | return { |
116 | enable: this.cfg.get("cargo-watch.enable") as boolean, | 97 | enable: this.cfg.get<boolean>("cargo-watch.enable")!, |
117 | arguments: this.cfg.get("cargo-watch.arguments") as string[], | 98 | arguments: this.cfg.get<string[]>("cargo-watch.arguments")!, |
118 | allTargets: this.cfg.get("cargo-watch.allTargets") as boolean, | 99 | allTargets: this.cfg.get<boolean>("cargo-watch.allTargets")!, |
119 | command: this.cfg.get("cargo-watch.command") as string, | 100 | command: this.cfg.get<string>("cargo-watch.command")!, |
120 | }; | 101 | }; |
121 | } | 102 | } |
122 | 103 | ||
123 | get cargoFeatures(): CargoFeatures { | 104 | get cargoFeatures() { |
124 | return { | 105 | return { |
125 | noDefaultFeatures: this.cfg.get("cargoFeatures.noDefaultFeatures") as boolean, | 106 | noDefaultFeatures: this.cfg.get<boolean>("cargoFeatures.noDefaultFeatures")!, |
126 | allFeatures: this.cfg.get("cargoFeatures.allFeatures") as boolean, | 107 | allFeatures: this.cfg.get<boolean>("cargoFeatures.allFeatures")!, |
127 | features: this.cfg.get("cargoFeatures.features") as string[], | 108 | features: this.cfg.get<string[]>("cargoFeatures.features")!, |
128 | loadOutDirsFromCheck: this.cfg.get("cargoFeatures.loadOutDirsFromCheck") as boolean, | 109 | loadOutDirsFromCheck: this.cfg.get<boolean>("cargoFeatures.loadOutDirsFromCheck")!, |
129 | }; | 110 | }; |
130 | } | 111 | } |
131 | |||
132 | // for internal use | ||
133 | get withSysroot() { return this.cfg.get("withSysroot", true) as boolean; } | ||
134 | } | 112 | } |
diff --git a/editors/code/src/inlay_hints.ts b/editors/code/src/inlay_hints.ts index b19b09ad5..17d0dfa33 100644 --- a/editors/code/src/inlay_hints.ts +++ b/editors/code/src/inlay_hints.ts | |||
@@ -134,8 +134,6 @@ class HintsUpdater implements Disposable { | |||
134 | 134 | ||
135 | // No text documents changed, so we may try to use the cache | 135 | // No text documents changed, so we may try to use the cache |
136 | if (!file.cachedDecorations) { | 136 | if (!file.cachedDecorations) { |
137 | file.inlaysRequest?.cancel(); | ||
138 | |||
139 | const hints = await this.fetchHints(file); | 137 | const hints = await this.fetchHints(file); |
140 | if (!hints) return; | 138 | if (!hints) return; |
141 | 139 | ||
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 5d2da9a76..7b7c19dfc 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts | |||
@@ -110,9 +110,9 @@ async function bootstrap(config: Config, state: PersistentState): Promise<string | |||
110 | } | 110 | } |
111 | 111 | ||
112 | async function bootstrapExtension(config: Config, state: PersistentState): Promise<void> { | 112 | async function bootstrapExtension(config: Config, state: PersistentState): Promise<void> { |
113 | if (config.releaseTag === undefined) return; | 113 | if (config.package.releaseTag === undefined) return; |
114 | if (config.channel === "stable") { | 114 | if (config.channel === "stable") { |
115 | if (config.releaseTag === NIGHTLY_TAG) { | 115 | if (config.package.releaseTag === NIGHTLY_TAG) { |
116 | vscode.window.showWarningMessage(`You are running a nightly version of rust-analyzer extension. | 116 | vscode.window.showWarningMessage(`You are running a nightly version of rust-analyzer extension. |
117 | To switch to stable, uninstall the extension and re-install it from the marketplace`); | 117 | To switch to stable, uninstall the extension and re-install it from the marketplace`); |
118 | } | 118 | } |
@@ -185,7 +185,7 @@ async function getServer(config: Config, state: PersistentState): Promise<string | |||
185 | } | 185 | } |
186 | return explicitPath; | 186 | return explicitPath; |
187 | }; | 187 | }; |
188 | if (config.releaseTag === undefined) return "rust-analyzer"; | 188 | if (config.package.releaseTag === undefined) return "rust-analyzer"; |
189 | 189 | ||
190 | let binaryName: string | undefined = undefined; | 190 | let binaryName: string | undefined = undefined; |
191 | if (process.arch === "x64" || process.arch === "x32") { | 191 | if (process.arch === "x64" || process.arch === "x32") { |
@@ -211,21 +211,21 @@ async function getServer(config: Config, state: PersistentState): Promise<string | |||
211 | await state.updateServerVersion(undefined); | 211 | await state.updateServerVersion(undefined); |
212 | } | 212 | } |
213 | 213 | ||
214 | if (state.serverVersion === config.packageJsonVersion) return dest; | 214 | if (state.serverVersion === config.package.version) return dest; |
215 | 215 | ||
216 | if (config.askBeforeDownload) { | 216 | if (config.askBeforeDownload) { |
217 | const userResponse = await vscode.window.showInformationMessage( | 217 | const userResponse = await vscode.window.showInformationMessage( |
218 | `Language server version ${config.packageJsonVersion} for rust-analyzer is not installed.`, | 218 | `Language server version ${config.package.version} for rust-analyzer is not installed.`, |
219 | "Download now" | 219 | "Download now" |
220 | ); | 220 | ); |
221 | if (userResponse !== "Download now") return dest; | 221 | if (userResponse !== "Download now") return dest; |
222 | } | 222 | } |
223 | 223 | ||
224 | const release = await fetchRelease(config.releaseTag); | 224 | const release = await fetchRelease(config.package.releaseTag); |
225 | const artifact = release.assets.find(artifact => artifact.name === binaryName); | 225 | const artifact = release.assets.find(artifact => artifact.name === binaryName); |
226 | assert(!!artifact, `Bad release: ${JSON.stringify(release)}`); | 226 | assert(!!artifact, `Bad release: ${JSON.stringify(release)}`); |
227 | 227 | ||
228 | await download(artifact.browser_download_url, dest, "Downloading rust-analyzer server", { mode: 0o755 }); | 228 | await download(artifact.browser_download_url, dest, "Downloading rust-analyzer server", { mode: 0o755 }); |
229 | await state.updateServerVersion(config.packageJsonVersion); | 229 | await state.updateServerVersion(config.package.version); |
230 | return dest; | 230 | return dest; |
231 | } | 231 | } |
diff --git a/xtask/src/codegen/gen_assists_docs.rs b/xtask/src/codegen/gen_assists_docs.rs index 6da5ca89e..31d606535 100644 --- a/xtask/src/codegen/gen_assists_docs.rs +++ b/xtask/src/codegen/gen_assists_docs.rs | |||
@@ -4,7 +4,7 @@ use std::{fs, path::Path}; | |||
4 | 4 | ||
5 | use crate::{ | 5 | use crate::{ |
6 | codegen::{self, extract_comment_blocks_with_empty_lines, Mode}, | 6 | codegen::{self, extract_comment_blocks_with_empty_lines, Mode}, |
7 | project_root, Result, | 7 | project_root, rust_files, Result, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | pub fn generate_assists_docs(mode: Mode) -> Result<()> { | 10 | pub fn generate_assists_docs(mode: Mode) -> Result<()> { |
@@ -46,12 +46,8 @@ fn reveal_hash_comments(text: &str) -> String { | |||
46 | 46 | ||
47 | fn collect_assists() -> Result<Vec<Assist>> { | 47 | fn collect_assists() -> Result<Vec<Assist>> { |
48 | let mut res = Vec::new(); | 48 | let mut res = Vec::new(); |
49 | for entry in fs::read_dir(project_root().join(codegen::ASSISTS_DIR))? { | 49 | for path in rust_files(&project_root().join(codegen::ASSISTS_DIR)) { |
50 | let entry = entry?; | 50 | collect_file(&mut res, path.as_path())?; |
51 | let path = entry.path(); | ||
52 | if path.is_file() { | ||
53 | collect_file(&mut res, path.as_path())?; | ||
54 | } | ||
55 | } | 51 | } |
56 | res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id)); | 52 | res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id)); |
57 | return Ok(res); | 53 | return Ok(res); |
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs index 2002d3e2a..3255eefb9 100644 --- a/xtask/src/dist.rs +++ b/xtask/src/dist.rs | |||
@@ -7,13 +7,18 @@ use crate::{ | |||
7 | project_root, | 7 | project_root, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | pub fn run_dist(version: &str, release_tag: &str) -> Result<()> { | 10 | pub struct ClientOpts { |
11 | pub version: String, | ||
12 | pub release_tag: String, | ||
13 | } | ||
14 | |||
15 | pub fn run_dist(client_opts: Option<ClientOpts>) -> Result<()> { | ||
11 | let dist = project_root().join("dist"); | 16 | let dist = project_root().join("dist"); |
12 | rm_rf(&dist)?; | 17 | rm_rf(&dist)?; |
13 | fs2::create_dir_all(&dist)?; | 18 | fs2::create_dir_all(&dist)?; |
14 | 19 | ||
15 | if cfg!(target_os = "linux") { | 20 | if let Some(ClientOpts { version, release_tag }) = client_opts { |
16 | dist_client(version, release_tag)?; | 21 | dist_client(&version, &release_tag)?; |
17 | } | 22 | } |
18 | dist_server()?; | 23 | dist_server()?; |
19 | Ok(()) | 24 | Ok(()) |
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs index e1472e85d..4f01f84fb 100644 --- a/xtask/src/lib.rs +++ b/xtask/src/lib.rs | |||
@@ -17,6 +17,7 @@ use std::{ | |||
17 | path::{Path, PathBuf}, | 17 | path::{Path, PathBuf}, |
18 | process::{Command, Stdio}, | 18 | process::{Command, Stdio}, |
19 | }; | 19 | }; |
20 | use walkdir::{DirEntry, WalkDir}; | ||
20 | 21 | ||
21 | use crate::{ | 22 | use crate::{ |
22 | codegen::Mode, | 23 | codegen::Mode, |
@@ -37,6 +38,21 @@ pub fn project_root() -> PathBuf { | |||
37 | .to_path_buf() | 38 | .to_path_buf() |
38 | } | 39 | } |
39 | 40 | ||
41 | pub fn rust_files(path: &Path) -> impl Iterator<Item = PathBuf> { | ||
42 | let iter = WalkDir::new(path); | ||
43 | return iter | ||
44 | .into_iter() | ||
45 | .filter_entry(|e| !is_hidden(e)) | ||
46 | .map(|e| e.unwrap()) | ||
47 | .filter(|e| !e.file_type().is_dir()) | ||
48 | .map(|e| e.into_path()) | ||
49 | .filter(|path| path.extension().map(|it| it == "rs").unwrap_or(false)); | ||
50 | |||
51 | fn is_hidden(entry: &DirEntry) -> bool { | ||
52 | entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) | ||
53 | } | ||
54 | } | ||
55 | |||
40 | pub fn run_rustfmt(mode: Mode) -> Result<()> { | 56 | pub fn run_rustfmt(mode: Mode) -> Result<()> { |
41 | let _dir = pushd(project_root()); | 57 | let _dir = pushd(project_root()); |
42 | ensure_rustfmt()?; | 58 | ensure_rustfmt()?; |
diff --git a/xtask/src/main.rs b/xtask/src/main.rs index aafa73610..a9adcfba4 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs | |||
@@ -13,7 +13,7 @@ use std::env; | |||
13 | use pico_args::Arguments; | 13 | use pico_args::Arguments; |
14 | use xtask::{ | 14 | use xtask::{ |
15 | codegen::{self, Mode}, | 15 | codegen::{self, Mode}, |
16 | dist::run_dist, | 16 | dist::{run_dist, ClientOpts}, |
17 | install::{ClientOpt, InstallCmd, ServerOpt}, | 17 | install::{ClientOpt, InstallCmd, ServerOpt}, |
18 | not_bash::pushd, | 18 | not_bash::pushd, |
19 | pre_commit, project_root, run_clippy, run_fuzzer, run_pre_cache, run_release, run_rustfmt, | 19 | pre_commit, project_root, run_clippy, run_fuzzer, run_pre_cache, run_release, run_rustfmt, |
@@ -103,10 +103,16 @@ FLAGS: | |||
103 | run_release(dry_run) | 103 | run_release(dry_run) |
104 | } | 104 | } |
105 | "dist" => { | 105 | "dist" => { |
106 | let version: String = args.value_from_str("--version")?; | 106 | let client_opts = if args.contains("--client") { |
107 | let release_tag: String = args.value_from_str("--tag")?; | 107 | Some(ClientOpts { |
108 | version: args.value_from_str("--version")?, | ||
109 | release_tag: args.value_from_str("--tag")?, | ||
110 | }) | ||
111 | } else { | ||
112 | None | ||
113 | }; | ||
108 | args.finish()?; | 114 | args.finish()?; |
109 | run_dist(&version, &release_tag) | 115 | run_dist(client_opts) |
110 | } | 116 | } |
111 | _ => { | 117 | _ => { |
112 | eprintln!( | 118 | eprintln!( |
diff --git a/xtask/tests/tidy-tests/main.rs b/xtask/tests/tidy-tests/main.rs index 5ae86c87c..80911a68e 100644 --- a/xtask/tests/tidy-tests/main.rs +++ b/xtask/tests/tidy-tests/main.rs | |||
@@ -5,13 +5,12 @@ use std::{ | |||
5 | path::{Path, PathBuf}, | 5 | path::{Path, PathBuf}, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use walkdir::{DirEntry, WalkDir}; | 8 | use xtask::{not_bash::fs2, project_root, rust_files}; |
9 | use xtask::{not_bash::fs2, project_root}; | ||
10 | 9 | ||
11 | #[test] | 10 | #[test] |
12 | fn rust_files_are_tidy() { | 11 | fn rust_files_are_tidy() { |
13 | let mut tidy_docs = TidyDocs::default(); | 12 | let mut tidy_docs = TidyDocs::default(); |
14 | for path in rust_files() { | 13 | for path in rust_files(&project_root().join("crates")) { |
15 | let text = fs2::read_to_string(&path).unwrap(); | 14 | let text = fs2::read_to_string(&path).unwrap(); |
16 | check_todo(&path, &text); | 15 | check_todo(&path, &text); |
17 | check_trailing_ws(&path, &text); | 16 | check_trailing_ws(&path, &text); |
@@ -142,19 +141,3 @@ fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool { | |||
142 | 141 | ||
143 | false | 142 | false |
144 | } | 143 | } |
145 | |||
146 | fn rust_files() -> impl Iterator<Item = PathBuf> { | ||
147 | let crates = project_root().join("crates"); | ||
148 | let iter = WalkDir::new(crates); | ||
149 | return iter | ||
150 | .into_iter() | ||
151 | .filter_entry(|e| !is_hidden(e)) | ||
152 | .map(|e| e.unwrap()) | ||
153 | .filter(|e| !e.file_type().is_dir()) | ||
154 | .map(|e| e.into_path()) | ||
155 | .filter(|path| path.extension().map(|it| it == "rs").unwrap_or(false)); | ||
156 | |||
157 | fn is_hidden(entry: &DirEntry) -> bool { | ||
158 | entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) | ||
159 | } | ||
160 | } | ||