diff options
64 files changed, 1337 insertions, 479 deletions
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 09752b817..7b9fbe6b8 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml | |||
@@ -18,6 +18,8 @@ jobs: | |||
18 | dist-windows: | 18 | dist-windows: |
19 | name: dist (Windows) | 19 | name: dist (Windows) |
20 | runs-on: windows-latest | 20 | runs-on: windows-latest |
21 | env: | ||
22 | RA_TARGET: x86_64-pc-windows-msvc | ||
21 | 23 | ||
22 | steps: | 24 | steps: |
23 | - name: Checkout repository | 25 | - name: Checkout repository |
@@ -39,8 +41,6 @@ jobs: | |||
39 | 41 | ||
40 | - name: Dist | 42 | - name: Dist |
41 | run: cargo xtask dist | 43 | run: cargo xtask dist |
42 | env: | ||
43 | RA_TARGET: x86_64-pc-windows-msvc | ||
44 | 44 | ||
45 | - name: Upload artifacts | 45 | - name: Upload artifacts |
46 | uses: actions/upload-artifact@v1 | 46 | uses: actions/upload-artifact@v1 |
@@ -51,6 +51,8 @@ jobs: | |||
51 | dist-ubuntu: | 51 | dist-ubuntu: |
52 | name: dist (Ubuntu 16.04) | 52 | name: dist (Ubuntu 16.04) |
53 | runs-on: ubuntu-16.04 | 53 | runs-on: ubuntu-16.04 |
54 | env: | ||
55 | RA_TARGET: x86_64-unknown-linux-gnu | ||
54 | 56 | ||
55 | steps: | 57 | steps: |
56 | - name: Checkout repository | 58 | - name: Checkout repository |
@@ -71,18 +73,14 @@ jobs: | |||
71 | - name: Dist | 73 | - name: Dist |
72 | if: github.ref == 'refs/heads/release' | 74 | if: github.ref == 'refs/heads/release' |
73 | run: cargo xtask dist --client 0.2.$GITHUB_RUN_NUMBER | 75 | run: cargo xtask dist --client 0.2.$GITHUB_RUN_NUMBER |
74 | env: | ||
75 | RA_TARGET: x86_64-unknown-linux-gnu | ||
76 | 76 | ||
77 | - name: Dist | 77 | - name: Dist |
78 | if: github.ref != 'refs/heads/release' | 78 | if: github.ref != 'refs/heads/release' |
79 | run: cargo xtask dist --nightly --client 0.3.$GITHUB_RUN_NUMBER-nightly | 79 | run: cargo xtask dist --nightly --client 0.3.$GITHUB_RUN_NUMBER-nightly |
80 | env: | ||
81 | RA_TARGET: x86_64-unknown-linux-gnu | ||
82 | 80 | ||
83 | - name: Nightly analysis-stats check | 81 | - name: Nightly analysis-stats check |
84 | if: github.ref != 'refs/heads/release' | 82 | if: github.ref != 'refs/heads/release' |
85 | run: ./dist/rust-analyzer-x86_64-unknown-linux-gnu analysis-stats . | 83 | run: target/${{ env.RA_TARGET }}/release/rust-analyzer analysis-stats . |
86 | 84 | ||
87 | - name: Upload artifacts | 85 | - name: Upload artifacts |
88 | uses: actions/upload-artifact@v1 | 86 | uses: actions/upload-artifact@v1 |
@@ -93,6 +91,8 @@ jobs: | |||
93 | dist-macos-latest: | 91 | dist-macos-latest: |
94 | name: dist (MacOS latest) | 92 | name: dist (MacOS latest) |
95 | runs-on: macos-latest | 93 | runs-on: macos-latest |
94 | env: | ||
95 | RA_TARGET: x86_64-apple-darwin | ||
96 | 96 | ||
97 | steps: | 97 | steps: |
98 | - name: Checkout repository | 98 | - name: Checkout repository |
@@ -107,8 +107,6 @@ jobs: | |||
107 | 107 | ||
108 | - name: Dist | 108 | - name: Dist |
109 | run: cargo xtask dist | 109 | run: cargo xtask dist |
110 | env: | ||
111 | RA_TARGET: x86_64-apple-darwin | ||
112 | 110 | ||
113 | - name: Upload artifacts | 111 | - name: Upload artifacts |
114 | uses: actions/upload-artifact@v1 | 112 | uses: actions/upload-artifact@v1 |
@@ -119,6 +117,8 @@ jobs: | |||
119 | dist-macos-11: | 117 | dist-macos-11: |
120 | name: dist (MacOS 11.0) | 118 | name: dist (MacOS 11.0) |
121 | runs-on: macos-11.0 | 119 | runs-on: macos-11.0 |
120 | env: | ||
121 | RA_TARGET: aarch64-apple-darwin | ||
122 | 122 | ||
123 | steps: | 123 | steps: |
124 | - name: Checkout repository | 124 | - name: Checkout repository |
@@ -134,8 +134,6 @@ jobs: | |||
134 | 134 | ||
135 | - name: Dist | 135 | - name: Dist |
136 | run: cargo xtask dist | 136 | run: cargo xtask dist |
137 | env: | ||
138 | RA_TARGET: aarch64-apple-darwin | ||
139 | 137 | ||
140 | - name: Upload artifacts | 138 | - name: Upload artifacts |
141 | uses: actions/upload-artifact@v1 | 139 | uses: actions/upload-artifact@v1 |
diff --git a/Cargo.lock b/Cargo.lock index fbb79e01f..fd04ec3c5 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -168,9 +168,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" | |||
168 | 168 | ||
169 | [[package]] | 169 | [[package]] |
170 | name = "chalk-derive" | 170 | name = "chalk-derive" |
171 | version = "0.43.0" | 171 | version = "0.45.0" |
172 | source = "registry+https://github.com/rust-lang/crates.io-index" | 172 | source = "registry+https://github.com/rust-lang/crates.io-index" |
173 | checksum = "e2d9e0c8adcced1ab0fea5cb8a38647922893d5b495e363e1814299fd380469b" | 173 | checksum = "ec7dacf94958d1a930b95d049d9443860859af59eadc77849392093eb577bcee" |
174 | dependencies = [ | 174 | dependencies = [ |
175 | "proc-macro2", | 175 | "proc-macro2", |
176 | "quote", | 176 | "quote", |
@@ -180,19 +180,20 @@ dependencies = [ | |||
180 | 180 | ||
181 | [[package]] | 181 | [[package]] |
182 | name = "chalk-ir" | 182 | name = "chalk-ir" |
183 | version = "0.43.0" | 183 | version = "0.45.0" |
184 | source = "registry+https://github.com/rust-lang/crates.io-index" | 184 | source = "registry+https://github.com/rust-lang/crates.io-index" |
185 | checksum = "c5218266a5709bc4943de997e64d3fab41c9e9f68efd54a898de53135e987bd3" | 185 | checksum = "a1a5b38ede247def17da87f4badb62396a5753db6048e2011d3089d8b3796c67" |
186 | dependencies = [ | 186 | dependencies = [ |
187 | "bitflags", | ||
187 | "chalk-derive", | 188 | "chalk-derive", |
188 | "lazy_static", | 189 | "lazy_static", |
189 | ] | 190 | ] |
190 | 191 | ||
191 | [[package]] | 192 | [[package]] |
192 | name = "chalk-recursive" | 193 | name = "chalk-recursive" |
193 | version = "0.43.0" | 194 | version = "0.45.0" |
194 | source = "registry+https://github.com/rust-lang/crates.io-index" | 195 | source = "registry+https://github.com/rust-lang/crates.io-index" |
195 | checksum = "ed8f34f13fd4f30251f9f6f1dc56f80363201390ecbcac2fdfc8e33036cd9c4a" | 196 | checksum = "7a18db146d7a023edc20ad094e8c2284451f7888719645004979617d1f17c041" |
196 | dependencies = [ | 197 | dependencies = [ |
197 | "chalk-derive", | 198 | "chalk-derive", |
198 | "chalk-ir", | 199 | "chalk-ir", |
@@ -203,9 +204,9 @@ dependencies = [ | |||
203 | 204 | ||
204 | [[package]] | 205 | [[package]] |
205 | name = "chalk-solve" | 206 | name = "chalk-solve" |
206 | version = "0.43.0" | 207 | version = "0.45.0" |
207 | source = "registry+https://github.com/rust-lang/crates.io-index" | 208 | source = "registry+https://github.com/rust-lang/crates.io-index" |
208 | checksum = "379c9f584488346044709d4c638c38d61a06fe593d4de2ac5f15fd2b0ba4cd9d" | 209 | checksum = "7f73e0de04a0f394e47ed8118e00541bcf681d7c3c2ef500fa743eb4cf3a4850" |
209 | dependencies = [ | 210 | dependencies = [ |
210 | "chalk-derive", | 211 | "chalk-derive", |
211 | "chalk-ir", | 212 | "chalk-ir", |
@@ -507,9 +508,9 @@ checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04" | |||
507 | 508 | ||
508 | [[package]] | 509 | [[package]] |
509 | name = "heck" | 510 | name = "heck" |
510 | version = "0.3.1" | 511 | version = "0.3.2" |
511 | source = "registry+https://github.com/rust-lang/crates.io-index" | 512 | source = "registry+https://github.com/rust-lang/crates.io-index" |
512 | checksum = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" | 513 | checksum = "87cbf45460356b7deeb5e3415b5563308c0a9b057c85e12b06ad551f98d0a6ac" |
513 | dependencies = [ | 514 | dependencies = [ |
514 | "unicode-segmentation", | 515 | "unicode-segmentation", |
515 | ] | 516 | ] |
@@ -1074,9 +1075,9 @@ dependencies = [ | |||
1074 | 1075 | ||
1075 | [[package]] | 1076 | [[package]] |
1076 | name = "parking_lot_core" | 1077 | name = "parking_lot_core" |
1077 | version = "0.8.1" | 1078 | version = "0.8.2" |
1078 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1079 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1079 | checksum = "d7c6d9b8427445284a09c55be860a15855ab580a417ccad9da88f5a06787ced0" | 1080 | checksum = "9ccb628cad4f84851442432c60ad8e1f607e29752d0bf072cbd0baf28aa34272" |
1080 | dependencies = [ | 1081 | dependencies = [ |
1081 | "cfg-if 1.0.0", | 1082 | "cfg-if 1.0.0", |
1082 | "instant", | 1083 | "instant", |
@@ -1358,6 +1359,7 @@ dependencies = [ | |||
1358 | "rustc-hash", | 1359 | "rustc-hash", |
1359 | "serde", | 1360 | "serde", |
1360 | "serde_json", | 1361 | "serde_json", |
1362 | "serde_path_to_error", | ||
1361 | "ssr", | 1363 | "ssr", |
1362 | "stdx", | 1364 | "stdx", |
1363 | "syntax", | 1365 | "syntax", |
@@ -1376,9 +1378,9 @@ dependencies = [ | |||
1376 | 1378 | ||
1377 | [[package]] | 1379 | [[package]] |
1378 | name = "rustc-ap-rustc_lexer" | 1380 | name = "rustc-ap-rustc_lexer" |
1379 | version = "691.0.0" | 1381 | version = "695.0.0" |
1380 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1382 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1381 | checksum = "44bc89d9ca7a78fb82e103b389362c55f03800745f8ba14e068b805cfaf783ec" | 1383 | checksum = "390bad134705b0bff02cd9541ac66df751a91c3cc734c3369cd6151ca269caed" |
1382 | dependencies = [ | 1384 | dependencies = [ |
1383 | "unicode-xid", | 1385 | "unicode-xid", |
1384 | ] | 1386 | ] |
@@ -1527,6 +1529,15 @@ dependencies = [ | |||
1527 | ] | 1529 | ] |
1528 | 1530 | ||
1529 | [[package]] | 1531 | [[package]] |
1532 | name = "serde_path_to_error" | ||
1533 | version = "0.1.4" | ||
1534 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
1535 | checksum = "42f6109f0506e20f7e0f910e51a0079acf41da8e0694e6442527c4ddf5a2b158" | ||
1536 | dependencies = [ | ||
1537 | "serde", | ||
1538 | ] | ||
1539 | |||
1540 | [[package]] | ||
1530 | name = "serde_repr" | 1541 | name = "serde_repr" |
1531 | version = "0.1.6" | 1542 | version = "0.1.6" |
1532 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1543 | source = "registry+https://github.com/rust-lang/crates.io-index" |
@@ -1827,9 +1838,9 @@ checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" | |||
1827 | 1838 | ||
1828 | [[package]] | 1839 | [[package]] |
1829 | name = "ungrammar" | 1840 | name = "ungrammar" |
1830 | version = "1.4.0" | 1841 | version = "1.5.0" |
1831 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1842 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1832 | checksum = "68951379f3ced25754472ca5addbf74d7dab58c9818f49290a3d8caa3ab44fb7" | 1843 | checksum = "c11bffada52edc8f2a56160b286ea4640acf90ffcb21bded361ccb8ed43a1457" |
1833 | 1844 | ||
1834 | [[package]] | 1845 | [[package]] |
1835 | name = "unicase" | 1846 | name = "unicase" |
diff --git a/crates/assists/src/handlers/extract_module_to_file.rs b/crates/assists/src/handlers/extract_module_to_file.rs index 5fc190fa6..50bf67ef7 100644 --- a/crates/assists/src/handlers/extract_module_to_file.rs +++ b/crates/assists/src/handlers/extract_module_to_file.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | use ast::edit::IndentLevel; | 1 | use ast::edit::IndentLevel; |
2 | use ide_db::base_db::{AnchoredPathBuf, SourceDatabaseExt}; | 2 | use ide_db::base_db::AnchoredPathBuf; |
3 | use syntax::{ | 3 | use syntax::{ |
4 | ast::{self, edit::AstNodeEdit, NameOwner}, | 4 | ast::{self, edit::AstNodeEdit, NameOwner}, |
5 | AstNode, | 5 | AstNode, |
@@ -21,43 +21,44 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; | |||
21 | // mod foo; | 21 | // mod foo; |
22 | // ``` | 22 | // ``` |
23 | pub(crate) fn extract_module_to_file(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 23 | pub(crate) fn extract_module_to_file(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
24 | let assist_id = AssistId("extract_module_to_file", AssistKind::RefactorExtract); | ||
25 | let assist_label = "Extract module to file"; | ||
26 | let db = ctx.db(); | ||
27 | let module_ast = ctx.find_node_at_offset::<ast::Module>()?; | 24 | let module_ast = ctx.find_node_at_offset::<ast::Module>()?; |
28 | let module_items = module_ast.item_list()?; | ||
29 | let dedent_module_items_text = module_items.dedent(IndentLevel(1)).to_string(); | ||
30 | let module_name = module_ast.name()?; | 25 | let module_name = module_ast.name()?; |
26 | |||
27 | let module_def = ctx.sema.to_def(&module_ast)?; | ||
28 | let parent_module = module_def.parent(ctx.db())?; | ||
29 | |||
30 | let module_items = module_ast.item_list()?; | ||
31 | let target = module_ast.syntax().text_range(); | 31 | let target = module_ast.syntax().text_range(); |
32 | let anchor_file_id = ctx.frange.file_id; | 32 | let anchor_file_id = ctx.frange.file_id; |
33 | let sr = db.file_source_root(anchor_file_id); | ||
34 | let sr = db.source_root(sr); | ||
35 | let file_path = sr.path_for_file(&anchor_file_id)?; | ||
36 | let (file_name, file_ext) = file_path.name_and_extension()?; | ||
37 | acc.add(assist_id, assist_label, target, |builder| { | ||
38 | builder.replace(target, format!("mod {};", module_name)); | ||
39 | let path = if is_main_or_lib(file_name) { | ||
40 | format!("./{}.{}", module_name, file_ext.unwrap()) | ||
41 | } else { | ||
42 | format!("./{}/{}.{}", file_name, module_name, file_ext.unwrap()) | ||
43 | }; | ||
44 | let dst = AnchoredPathBuf { anchor: anchor_file_id, path }; | ||
45 | let contents = update_module_items_string(dedent_module_items_text); | ||
46 | builder.create_file(dst, contents); | ||
47 | }) | ||
48 | } | ||
49 | fn is_main_or_lib(file_name: &str) -> bool { | ||
50 | file_name == "main".to_string() || file_name == "lib".to_string() | ||
51 | } | ||
52 | fn update_module_items_string(items_str: String) -> String { | ||
53 | let mut items_string_lines: Vec<&str> = items_str.lines().collect(); | ||
54 | items_string_lines.pop(); // Delete last line | ||
55 | items_string_lines.reverse(); | ||
56 | items_string_lines.pop(); // Delete first line | ||
57 | items_string_lines.reverse(); | ||
58 | 33 | ||
59 | let string = items_string_lines.join("\n"); | 34 | acc.add( |
60 | format!("{}", string) | 35 | AssistId("extract_module_to_file", AssistKind::RefactorExtract), |
36 | "Extract module to file", | ||
37 | target, | ||
38 | |builder| { | ||
39 | let path = { | ||
40 | let dir = match parent_module.name(ctx.db()) { | ||
41 | Some(name) if !parent_module.is_mod_rs(ctx.db()) => format!("{}/", name), | ||
42 | _ => String::new(), | ||
43 | }; | ||
44 | format!("./{}{}.rs", dir, module_name) | ||
45 | }; | ||
46 | let contents = { | ||
47 | let items = module_items.dedent(IndentLevel(1)).to_string(); | ||
48 | let mut items = | ||
49 | items.trim_start_matches('{').trim_end_matches('}').trim().to_string(); | ||
50 | if !items.is_empty() { | ||
51 | items.push('\n'); | ||
52 | } | ||
53 | items | ||
54 | }; | ||
55 | |||
56 | builder.replace(target, format!("mod {};", module_name)); | ||
57 | |||
58 | let dst = AnchoredPathBuf { anchor: anchor_file_id, path }; | ||
59 | builder.create_file(dst, contents); | ||
60 | }, | ||
61 | ) | ||
61 | } | 62 | } |
62 | 63 | ||
63 | #[cfg(test)] | 64 | #[cfg(test)] |
@@ -67,104 +68,66 @@ mod tests { | |||
67 | use super::*; | 68 | use super::*; |
68 | 69 | ||
69 | #[test] | 70 | #[test] |
70 | fn extract_module_to_file_with_basic_module() { | 71 | fn extract_from_root() { |
71 | check_assist( | 72 | check_assist( |
72 | extract_module_to_file, | 73 | extract_module_to_file, |
73 | r#" | 74 | r#" |
74 | //- /foo.rs crate:foo | ||
75 | mod tests {<|> | 75 | mod tests {<|> |
76 | #[test] fn t() {} | 76 | #[test] fn t() {} |
77 | } | 77 | } |
78 | "#, | 78 | "#, |
79 | r#" | 79 | r#" |
80 | //- /foo.rs | 80 | //- /main.rs |
81 | mod tests; | 81 | mod tests; |
82 | //- /foo/tests.rs | 82 | //- /tests.rs |
83 | #[test] fn t() {}"#, | 83 | #[test] fn t() {} |
84 | ) | ||
85 | } | ||
86 | |||
87 | #[test] | ||
88 | fn extract_module_to_file_with_file_path() { | ||
89 | check_assist( | ||
90 | extract_module_to_file, | ||
91 | r#" | ||
92 | //- /src/foo.rs crate:foo | ||
93 | mod bar {<|> | ||
94 | fn f() { | ||
95 | |||
96 | } | ||
97 | } | ||
98 | fn main() { | ||
99 | println!("Hello, world!"); | ||
100 | } | ||
101 | "#, | 84 | "#, |
102 | r#" | 85 | ); |
103 | //- /src/foo.rs | ||
104 | mod bar; | ||
105 | fn main() { | ||
106 | println!("Hello, world!"); | ||
107 | } | ||
108 | //- /src/foo/bar.rs | ||
109 | fn f() { | ||
110 | |||
111 | }"#, | ||
112 | ) | ||
113 | } | 86 | } |
114 | 87 | ||
115 | #[test] | 88 | #[test] |
116 | fn extract_module_to_file_with_main_filw() { | 89 | fn extract_from_submodule() { |
117 | check_assist( | 90 | check_assist( |
118 | extract_module_to_file, | 91 | extract_module_to_file, |
119 | r#" | 92 | r#" |
120 | //- /main.rs | 93 | //- /main.rs |
121 | mod foo {<|> | 94 | mod submod; |
122 | fn f() { | 95 | //- /submod.rs |
123 | 96 | mod inner<|> { | |
124 | } | 97 | fn f() {} |
125 | } | ||
126 | fn main() { | ||
127 | println!("Hello, world!"); | ||
128 | } | 98 | } |
99 | fn g() {} | ||
129 | "#, | 100 | "#, |
130 | r#" | 101 | r#" |
131 | //- /main.rs | 102 | //- /submod.rs |
132 | mod foo; | 103 | mod inner; |
133 | fn main() { | 104 | fn g() {} |
134 | println!("Hello, world!"); | 105 | //- /submod/inner.rs |
135 | } | 106 | fn f() {} |
136 | //- /foo.rs | 107 | "#, |
137 | fn f() { | 108 | ); |
138 | |||
139 | }"#, | ||
140 | ) | ||
141 | } | 109 | } |
142 | 110 | ||
143 | #[test] | 111 | #[test] |
144 | fn extract_module_to_file_with_lib_file() { | 112 | fn extract_from_mod_rs() { |
145 | check_assist( | 113 | check_assist( |
146 | extract_module_to_file, | 114 | extract_module_to_file, |
147 | r#" | 115 | r#" |
148 | //- /lib.rs | 116 | //- /main.rs |
149 | mod foo {<|> | 117 | mod submodule; |
150 | fn f() { | 118 | //- /submodule/mod.rs |
151 | 119 | mod inner<|> { | |
152 | } | 120 | fn f() {} |
153 | } | ||
154 | fn main() { | ||
155 | println!("Hello, world!"); | ||
156 | } | 121 | } |
122 | fn g() {} | ||
157 | "#, | 123 | "#, |
158 | r#" | 124 | r#" |
159 | //- /lib.rs | 125 | //- /submodule/mod.rs |
160 | mod foo; | 126 | mod inner; |
161 | fn main() { | 127 | fn g() {} |
162 | println!("Hello, world!"); | 128 | //- /submodule/inner.rs |
163 | } | 129 | fn f() {} |
164 | //- /foo.rs | 130 | "#, |
165 | fn f() { | 131 | ); |
166 | |||
167 | }"#, | ||
168 | ) | ||
169 | } | 132 | } |
170 | } | 133 | } |
diff --git a/crates/completion/src/completions.rs b/crates/completion/src/completions.rs index 1ef6b5f48..d9fe13485 100644 --- a/crates/completion/src/completions.rs +++ b/crates/completion/src/completions.rs | |||
@@ -19,9 +19,14 @@ use hir::{ModPath, ScopeDef, Type}; | |||
19 | use crate::{ | 19 | use crate::{ |
20 | item::Builder, | 20 | item::Builder, |
21 | render::{ | 21 | render::{ |
22 | const_::render_const, enum_variant::render_variant, function::render_fn, | 22 | const_::render_const, |
23 | macro_::render_macro, render_field, render_resolution, render_tuple_field, | 23 | enum_variant::render_variant, |
24 | type_alias::render_type_alias, RenderContext, | 24 | function::render_fn, |
25 | macro_::render_macro, | ||
26 | pattern::{render_struct_pat, render_variant_pat}, | ||
27 | render_field, render_resolution, render_tuple_field, | ||
28 | type_alias::render_type_alias, | ||
29 | RenderContext, | ||
25 | }, | 30 | }, |
26 | CompletionContext, CompletionItem, | 31 | CompletionContext, CompletionItem, |
27 | }; | 32 | }; |
@@ -105,6 +110,28 @@ impl Completions { | |||
105 | self.add(item) | 110 | self.add(item) |
106 | } | 111 | } |
107 | 112 | ||
113 | pub(crate) fn add_variant_pat( | ||
114 | &mut self, | ||
115 | ctx: &CompletionContext, | ||
116 | variant: hir::Variant, | ||
117 | local_name: Option<hir::Name>, | ||
118 | ) { | ||
119 | if let Some(item) = render_variant_pat(RenderContext::new(ctx), variant, local_name) { | ||
120 | self.add(item); | ||
121 | } | ||
122 | } | ||
123 | |||
124 | pub(crate) fn add_struct_pat( | ||
125 | &mut self, | ||
126 | ctx: &CompletionContext, | ||
127 | strukt: hir::Struct, | ||
128 | local_name: Option<hir::Name>, | ||
129 | ) { | ||
130 | if let Some(item) = render_struct_pat(RenderContext::new(ctx), strukt, local_name) { | ||
131 | self.add(item); | ||
132 | } | ||
133 | } | ||
134 | |||
108 | pub(crate) fn add_const(&mut self, ctx: &CompletionContext, constant: hir::Const) { | 135 | pub(crate) fn add_const(&mut self, ctx: &CompletionContext, constant: hir::Const) { |
109 | if let Some(item) = render_const(RenderContext::new(ctx), constant) { | 136 | if let Some(item) = render_const(RenderContext::new(ctx), constant) { |
110 | self.add(item); | 137 | self.add(item); |
diff --git a/crates/completion/src/completions/pattern.rs b/crates/completion/src/completions/pattern.rs index 4d56731ec..eee31098d 100644 --- a/crates/completion/src/completions/pattern.rs +++ b/crates/completion/src/completions/pattern.rs | |||
@@ -2,9 +2,9 @@ | |||
2 | 2 | ||
3 | use crate::{CompletionContext, Completions}; | 3 | use crate::{CompletionContext, Completions}; |
4 | 4 | ||
5 | /// Completes constats and paths in patterns. | 5 | /// Completes constants and paths in patterns. |
6 | pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { | 6 | pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { |
7 | if !(ctx.is_pat_binding_or_const || ctx.is_irrefutable_let_pat_binding) { | 7 | if !(ctx.is_pat_binding_or_const || ctx.is_irrefutable_pat_binding) { |
8 | return; | 8 | return; |
9 | } | 9 | } |
10 | if ctx.record_pat_syntax.is_some() { | 10 | if ctx.record_pat_syntax.is_some() { |
@@ -15,20 +15,21 @@ pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { | |||
15 | // suggest variants + auto-imports | 15 | // suggest variants + auto-imports |
16 | ctx.scope.process_all_names(&mut |name, res| { | 16 | ctx.scope.process_all_names(&mut |name, res| { |
17 | let add_resolution = match &res { | 17 | let add_resolution = match &res { |
18 | hir::ScopeDef::ModuleDef(def) => { | 18 | hir::ScopeDef::ModuleDef(def) => match def { |
19 | if ctx.is_irrefutable_let_pat_binding { | 19 | hir::ModuleDef::Adt(hir::Adt::Struct(strukt)) => { |
20 | matches!(def, hir::ModuleDef::Adt(hir::Adt::Struct(_))) | 20 | acc.add_struct_pat(ctx, strukt.clone(), Some(name.clone())); |
21 | } else { | 21 | true |
22 | matches!( | ||
23 | def, | ||
24 | hir::ModuleDef::Adt(hir::Adt::Enum(..)) | ||
25 | | hir::ModuleDef::Adt(hir::Adt::Struct(..)) | ||
26 | | hir::ModuleDef::Variant(..) | ||
27 | | hir::ModuleDef::Const(..) | ||
28 | | hir::ModuleDef::Module(..) | ||
29 | ) | ||
30 | } | 22 | } |
31 | } | 23 | hir::ModuleDef::Variant(variant) if !ctx.is_irrefutable_pat_binding => { |
24 | acc.add_variant_pat(ctx, variant.clone(), Some(name.clone())); | ||
25 | true | ||
26 | } | ||
27 | hir::ModuleDef::Adt(hir::Adt::Enum(..)) | ||
28 | | hir::ModuleDef::Variant(..) | ||
29 | | hir::ModuleDef::Const(..) | ||
30 | | hir::ModuleDef::Module(..) => !ctx.is_irrefutable_pat_binding, | ||
31 | _ => false, | ||
32 | }, | ||
32 | hir::ScopeDef::MacroDef(_) => true, | 33 | hir::ScopeDef::MacroDef(_) => true, |
33 | _ => false, | 34 | _ => false, |
34 | }; | 35 | }; |
@@ -42,13 +43,21 @@ pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { | |||
42 | mod tests { | 43 | mod tests { |
43 | use expect_test::{expect, Expect}; | 44 | use expect_test::{expect, Expect}; |
44 | 45 | ||
45 | use crate::{test_utils::completion_list, CompletionKind}; | 46 | use crate::{ |
47 | test_utils::{check_edit, completion_list}, | ||
48 | CompletionKind, | ||
49 | }; | ||
46 | 50 | ||
47 | fn check(ra_fixture: &str, expect: Expect) { | 51 | fn check(ra_fixture: &str, expect: Expect) { |
48 | let actual = completion_list(ra_fixture, CompletionKind::Reference); | 52 | let actual = completion_list(ra_fixture, CompletionKind::Reference); |
49 | expect.assert_eq(&actual) | 53 | expect.assert_eq(&actual) |
50 | } | 54 | } |
51 | 55 | ||
56 | fn check_snippet(ra_fixture: &str, expect: Expect) { | ||
57 | let actual = completion_list(ra_fixture, CompletionKind::Snippet); | ||
58 | expect.assert_eq(&actual) | ||
59 | } | ||
60 | |||
52 | #[test] | 61 | #[test] |
53 | fn completes_enum_variants_and_modules() { | 62 | fn completes_enum_variants_and_modules() { |
54 | check( | 63 | check( |
@@ -69,7 +78,7 @@ fn foo() { | |||
69 | en E | 78 | en E |
70 | ct Z | 79 | ct Z |
71 | st Bar | 80 | st Bar |
72 | ev X () | 81 | ev X |
73 | md m | 82 | md m |
74 | "#]], | 83 | "#]], |
75 | ); | 84 | ); |
@@ -114,4 +123,139 @@ fn foo() { | |||
114 | "#]], | 123 | "#]], |
115 | ); | 124 | ); |
116 | } | 125 | } |
126 | |||
127 | #[test] | ||
128 | fn completes_in_param() { | ||
129 | check( | ||
130 | r#" | ||
131 | enum E { X } | ||
132 | |||
133 | static FOO: E = E::X; | ||
134 | struct Bar { f: u32 } | ||
135 | |||
136 | fn foo(<|>) { | ||
137 | } | ||
138 | "#, | ||
139 | expect![[r#" | ||
140 | st Bar | ||
141 | "#]], | ||
142 | ); | ||
143 | } | ||
144 | |||
145 | #[test] | ||
146 | fn completes_pat_in_let() { | ||
147 | check_snippet( | ||
148 | r#" | ||
149 | struct Bar { f: u32 } | ||
150 | |||
151 | fn foo() { | ||
152 | let <|> | ||
153 | } | ||
154 | "#, | ||
155 | expect![[r#" | ||
156 | bn Bar Bar { f$1 }$0 | ||
157 | "#]], | ||
158 | ); | ||
159 | } | ||
160 | |||
161 | #[test] | ||
162 | fn completes_param_pattern() { | ||
163 | check_snippet( | ||
164 | r#" | ||
165 | struct Foo { bar: String, baz: String } | ||
166 | struct Bar(String, String); | ||
167 | struct Baz; | ||
168 | fn outer(<|>) {} | ||
169 | "#, | ||
170 | expect![[r#" | ||
171 | bn Foo Foo { bar$1, baz$2 }: Foo$0 | ||
172 | bn Bar Bar($1, $2): Bar$0 | ||
173 | "#]], | ||
174 | ) | ||
175 | } | ||
176 | |||
177 | #[test] | ||
178 | fn completes_let_pattern() { | ||
179 | check_snippet( | ||
180 | r#" | ||
181 | struct Foo { bar: String, baz: String } | ||
182 | struct Bar(String, String); | ||
183 | struct Baz; | ||
184 | fn outer() { | ||
185 | let <|> | ||
186 | } | ||
187 | "#, | ||
188 | expect![[r#" | ||
189 | bn Foo Foo { bar$1, baz$2 }$0 | ||
190 | bn Bar Bar($1, $2)$0 | ||
191 | "#]], | ||
192 | ) | ||
193 | } | ||
194 | |||
195 | #[test] | ||
196 | fn completes_refutable_pattern() { | ||
197 | check_snippet( | ||
198 | r#" | ||
199 | struct Foo { bar: i32, baz: i32 } | ||
200 | struct Bar(String, String); | ||
201 | struct Baz; | ||
202 | fn outer() { | ||
203 | match () { | ||
204 | <|> | ||
205 | } | ||
206 | } | ||
207 | "#, | ||
208 | expect![[r#" | ||
209 | bn Foo Foo { bar$1, baz$2 }$0 | ||
210 | bn Bar Bar($1, $2)$0 | ||
211 | "#]], | ||
212 | ) | ||
213 | } | ||
214 | |||
215 | #[test] | ||
216 | fn omits_private_fields_pat() { | ||
217 | check_snippet( | ||
218 | r#" | ||
219 | mod foo { | ||
220 | pub struct Foo { pub bar: i32, baz: i32 } | ||
221 | pub struct Bar(pub String, String); | ||
222 | pub struct Invisible(String, String); | ||
223 | } | ||
224 | use foo::*; | ||
225 | |||
226 | fn outer() { | ||
227 | match () { | ||
228 | <|> | ||
229 | } | ||
230 | } | ||
231 | "#, | ||
232 | expect![[r#" | ||
233 | bn Foo Foo { bar$1, .. }$0 | ||
234 | bn Bar Bar($1, ..)$0 | ||
235 | "#]], | ||
236 | ) | ||
237 | } | ||
238 | |||
239 | #[test] | ||
240 | fn only_shows_ident_completion() { | ||
241 | check_edit( | ||
242 | "Foo", | ||
243 | r#" | ||
244 | struct Foo(i32); | ||
245 | fn main() { | ||
246 | match Foo(92) { | ||
247 | <|>(92) => (), | ||
248 | } | ||
249 | } | ||
250 | "#, | ||
251 | r#" | ||
252 | struct Foo(i32); | ||
253 | fn main() { | ||
254 | match Foo(92) { | ||
255 | Foo(92) => (), | ||
256 | } | ||
257 | } | ||
258 | "#, | ||
259 | ); | ||
260 | } | ||
117 | } | 261 | } |
diff --git a/crates/completion/src/completions/qualified_path.rs b/crates/completion/src/completions/qualified_path.rs index 1300f00b2..882c4dcbc 100644 --- a/crates/completion/src/completions/qualified_path.rs +++ b/crates/completion/src/completions/qualified_path.rs | |||
@@ -118,6 +118,12 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon | |||
118 | _ => return, | 118 | _ => return, |
119 | }; | 119 | }; |
120 | 120 | ||
121 | if let Some(Adt::Enum(e)) = ty.as_adt() { | ||
122 | for variant in e.variants(ctx.db) { | ||
123 | acc.add_enum_variant(ctx, variant, None); | ||
124 | } | ||
125 | } | ||
126 | |||
121 | let traits_in_scope = ctx.scope.traits_in_scope(); | 127 | let traits_in_scope = ctx.scope.traits_in_scope(); |
122 | let mut seen = FxHashSet::default(); | 128 | let mut seen = FxHashSet::default(); |
123 | ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { | 129 | ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { |
@@ -752,4 +758,27 @@ fn main() { | |||
752 | "#]], | 758 | "#]], |
753 | ); | 759 | ); |
754 | } | 760 | } |
761 | |||
762 | #[test] | ||
763 | fn completes_self_enum() { | ||
764 | check( | ||
765 | r#" | ||
766 | enum Foo { | ||
767 | Bar, | ||
768 | Baz, | ||
769 | } | ||
770 | |||
771 | impl Foo { | ||
772 | fn foo(self) { | ||
773 | Self::<|> | ||
774 | } | ||
775 | } | ||
776 | "#, | ||
777 | expect![[r#" | ||
778 | ev Bar () | ||
779 | ev Baz () | ||
780 | me foo(…) fn foo(self) | ||
781 | "#]], | ||
782 | ); | ||
783 | } | ||
755 | } | 784 | } |
diff --git a/crates/completion/src/completions/unqualified_path.rs b/crates/completion/src/completions/unqualified_path.rs index 099ffb4d4..d09849752 100644 --- a/crates/completion/src/completions/unqualified_path.rs +++ b/crates/completion/src/completions/unqualified_path.rs | |||
@@ -1,5 +1,7 @@ | |||
1 | //! Completion of names from the current scope, e.g. locals and imported items. | 1 | //! Completion of names from the current scope, e.g. locals and imported items. |
2 | 2 | ||
3 | use std::iter; | ||
4 | |||
3 | use either::Either; | 5 | use either::Either; |
4 | use hir::{Adt, ModPath, ModuleDef, ScopeDef, Type}; | 6 | use hir::{Adt, ModPath, ModuleDef, ScopeDef, Type}; |
5 | use ide_db::helpers::insert_use::ImportScope; | 7 | use ide_db::helpers::insert_use::ImportScope; |
@@ -50,7 +52,9 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC | |||
50 | } | 52 | } |
51 | 53 | ||
52 | fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &Type) { | 54 | fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &Type) { |
53 | if let Some(Adt::Enum(enum_data)) = ty.as_adt() { | 55 | if let Some(Adt::Enum(enum_data)) = |
56 | iter::successors(Some(ty.clone()), |ty| ty.remove_ref()).last().and_then(|ty| ty.as_adt()) | ||
57 | { | ||
54 | let variants = enum_data.variants(ctx.db); | 58 | let variants = enum_data.variants(ctx.db); |
55 | 59 | ||
56 | let module = if let Some(module) = ctx.scope.module() { | 60 | let module = if let Some(module) = ctx.scope.module() { |
@@ -701,6 +705,7 @@ fn main() { <|> } | |||
701 | "#]], | 705 | "#]], |
702 | ); | 706 | ); |
703 | } | 707 | } |
708 | |||
704 | #[test] | 709 | #[test] |
705 | fn completes_enum_variant_matcharm() { | 710 | fn completes_enum_variant_matcharm() { |
706 | check( | 711 | check( |
@@ -722,6 +727,26 @@ fn main() { | |||
722 | } | 727 | } |
723 | 728 | ||
724 | #[test] | 729 | #[test] |
730 | fn completes_enum_variant_matcharm_ref() { | ||
731 | check( | ||
732 | r#" | ||
733 | enum Foo { Bar, Baz, Quux } | ||
734 | |||
735 | fn main() { | ||
736 | let foo = Foo::Quux; | ||
737 | match &foo { Qu<|> } | ||
738 | } | ||
739 | "#, | ||
740 | expect![[r#" | ||
741 | ev Foo::Bar () | ||
742 | ev Foo::Baz () | ||
743 | ev Foo::Quux () | ||
744 | en Foo | ||
745 | "#]], | ||
746 | ) | ||
747 | } | ||
748 | |||
749 | #[test] | ||
725 | fn completes_enum_variant_iflet() { | 750 | fn completes_enum_variant_iflet() { |
726 | check( | 751 | check( |
727 | r#" | 752 | r#" |
diff --git a/crates/completion/src/context.rs b/crates/completion/src/context.rs index 5cd11cf77..41de324d8 100644 --- a/crates/completion/src/context.rs +++ b/crates/completion/src/context.rs | |||
@@ -51,7 +51,7 @@ pub(crate) struct CompletionContext<'a> { | |||
51 | /// If a name-binding or reference to a const in a pattern. | 51 | /// If a name-binding or reference to a const in a pattern. |
52 | /// Irrefutable patterns (like let) are excluded. | 52 | /// Irrefutable patterns (like let) are excluded. |
53 | pub(super) is_pat_binding_or_const: bool, | 53 | pub(super) is_pat_binding_or_const: bool, |
54 | pub(super) is_irrefutable_let_pat_binding: bool, | 54 | pub(super) is_irrefutable_pat_binding: bool, |
55 | /// A single-indent path, like `foo`. `::foo` should not be considered a trivial path. | 55 | /// A single-indent path, like `foo`. `::foo` should not be considered a trivial path. |
56 | pub(super) is_trivial_path: bool, | 56 | pub(super) is_trivial_path: bool, |
57 | /// If not a trivial path, the prefix (qualifier). | 57 | /// If not a trivial path, the prefix (qualifier). |
@@ -147,7 +147,7 @@ impl<'a> CompletionContext<'a> { | |||
147 | active_parameter: ActiveParameter::at(db, position), | 147 | active_parameter: ActiveParameter::at(db, position), |
148 | is_param: false, | 148 | is_param: false, |
149 | is_pat_binding_or_const: false, | 149 | is_pat_binding_or_const: false, |
150 | is_irrefutable_let_pat_binding: false, | 150 | is_irrefutable_pat_binding: false, |
151 | is_trivial_path: false, | 151 | is_trivial_path: false, |
152 | path_qual: None, | 152 | path_qual: None, |
153 | after_if: false, | 153 | after_if: false, |
@@ -327,14 +327,19 @@ impl<'a> CompletionContext<'a> { | |||
327 | if bind_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast).is_some() { | 327 | if bind_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast).is_some() { |
328 | self.is_pat_binding_or_const = false; | 328 | self.is_pat_binding_or_const = false; |
329 | } | 329 | } |
330 | if let Some(let_stmt) = bind_pat.syntax().ancestors().find_map(ast::LetStmt::cast) { | 330 | if let Some(Some(pat)) = bind_pat.syntax().ancestors().find_map(|node| { |
331 | if let Some(pat) = let_stmt.pat() { | 331 | match_ast! { |
332 | if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range()) | 332 | match node { |
333 | { | 333 | ast::LetStmt(it) => Some(it.pat()), |
334 | self.is_pat_binding_or_const = false; | 334 | ast::Param(it) => Some(it.pat()), |
335 | self.is_irrefutable_let_pat_binding = true; | 335 | _ => None, |
336 | } | 336 | } |
337 | } | 337 | } |
338 | }) { | ||
339 | if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range()) { | ||
340 | self.is_pat_binding_or_const = false; | ||
341 | self.is_irrefutable_pat_binding = true; | ||
342 | } | ||
338 | } | 343 | } |
339 | } | 344 | } |
340 | if is_node::<ast::Param>(name.syntax()) { | 345 | if is_node::<ast::Param>(name.syntax()) { |
diff --git a/crates/completion/src/render.rs b/crates/completion/src/render.rs index 1092a4825..1ba7201a1 100644 --- a/crates/completion/src/render.rs +++ b/crates/completion/src/render.rs | |||
@@ -5,6 +5,7 @@ pub(crate) mod macro_; | |||
5 | pub(crate) mod function; | 5 | pub(crate) mod function; |
6 | pub(crate) mod enum_variant; | 6 | pub(crate) mod enum_variant; |
7 | pub(crate) mod const_; | 7 | pub(crate) mod const_; |
8 | pub(crate) mod pattern; | ||
8 | pub(crate) mod type_alias; | 9 | pub(crate) mod type_alias; |
9 | 10 | ||
10 | mod builder_ext; | 11 | mod builder_ext; |
@@ -159,6 +160,12 @@ impl<'a> Render<'a> { | |||
159 | let item = render_fn(self.ctx, import_to_add, Some(local_name), *func); | 160 | let item = render_fn(self.ctx, import_to_add, Some(local_name), *func); |
160 | return Some(item); | 161 | return Some(item); |
161 | } | 162 | } |
163 | ScopeDef::ModuleDef(Variant(_)) | ||
164 | if self.ctx.completion.is_pat_binding_or_const | ||
165 | | self.ctx.completion.is_irrefutable_pat_binding => | ||
166 | { | ||
167 | CompletionItemKind::EnumVariant | ||
168 | } | ||
162 | ScopeDef::ModuleDef(Variant(var)) => { | 169 | ScopeDef::ModuleDef(Variant(var)) => { |
163 | let item = render_variant(self.ctx, import_to_add, Some(local_name), *var, None); | 170 | let item = render_variant(self.ctx, import_to_add, Some(local_name), *var, None); |
164 | return Some(item); | 171 | return Some(item); |
diff --git a/crates/completion/src/render/builder_ext.rs b/crates/completion/src/render/builder_ext.rs index ce8718bd5..d053a988b 100644 --- a/crates/completion/src/render/builder_ext.rs +++ b/crates/completion/src/render/builder_ext.rs | |||
@@ -34,7 +34,6 @@ impl Builder { | |||
34 | return false; | 34 | return false; |
35 | } | 35 | } |
36 | if ctx.is_pattern_call { | 36 | if ctx.is_pattern_call { |
37 | mark::hit!(dont_duplicate_pattern_parens); | ||
38 | return false; | 37 | return false; |
39 | } | 38 | } |
40 | if ctx.is_call { | 39 | if ctx.is_call { |
diff --git a/crates/completion/src/render/enum_variant.rs b/crates/completion/src/render/enum_variant.rs index 7176fd9b3..732e139ec 100644 --- a/crates/completion/src/render/enum_variant.rs +++ b/crates/completion/src/render/enum_variant.rs | |||
@@ -126,50 +126,5 @@ fn main() -> Option<i32> { | |||
126 | } | 126 | } |
127 | "#, | 127 | "#, |
128 | ); | 128 | ); |
129 | check_edit( | ||
130 | "Some", | ||
131 | r#" | ||
132 | enum Option<T> { Some(T), None } | ||
133 | use Option::*; | ||
134 | fn main(value: Option<i32>) { | ||
135 | match value { | ||
136 | Som<|> | ||
137 | } | ||
138 | } | ||
139 | "#, | ||
140 | r#" | ||
141 | enum Option<T> { Some(T), None } | ||
142 | use Option::*; | ||
143 | fn main(value: Option<i32>) { | ||
144 | match value { | ||
145 | Some($0) | ||
146 | } | ||
147 | } | ||
148 | "#, | ||
149 | ); | ||
150 | } | ||
151 | |||
152 | #[test] | ||
153 | fn dont_duplicate_pattern_parens() { | ||
154 | mark::check!(dont_duplicate_pattern_parens); | ||
155 | check_edit( | ||
156 | "Var", | ||
157 | r#" | ||
158 | enum E { Var(i32) } | ||
159 | fn main() { | ||
160 | match E::Var(92) { | ||
161 | E::<|>(92) => (), | ||
162 | } | ||
163 | } | ||
164 | "#, | ||
165 | r#" | ||
166 | enum E { Var(i32) } | ||
167 | fn main() { | ||
168 | match E::Var(92) { | ||
169 | E::Var(92) => (), | ||
170 | } | ||
171 | } | ||
172 | "#, | ||
173 | ); | ||
174 | } | 129 | } |
175 | } | 130 | } |
diff --git a/crates/completion/src/render/pattern.rs b/crates/completion/src/render/pattern.rs new file mode 100644 index 000000000..a3b6a3cac --- /dev/null +++ b/crates/completion/src/render/pattern.rs | |||
@@ -0,0 +1,148 @@ | |||
1 | //! Renderer for patterns. | ||
2 | |||
3 | use hir::{db::HirDatabase, HasAttrs, HasVisibility, Name, StructKind}; | ||
4 | use itertools::Itertools; | ||
5 | |||
6 | use crate::{ | ||
7 | config::SnippetCap, item::CompletionKind, render::RenderContext, CompletionItem, | ||
8 | CompletionItemKind, | ||
9 | }; | ||
10 | |||
11 | fn visible_fields( | ||
12 | ctx: &RenderContext<'_>, | ||
13 | fields: &[hir::Field], | ||
14 | item: impl HasAttrs, | ||
15 | ) -> Option<(Vec<hir::Field>, bool)> { | ||
16 | let module = ctx.completion.scope.module()?; | ||
17 | let n_fields = fields.len(); | ||
18 | let fields = fields | ||
19 | .into_iter() | ||
20 | .filter(|field| field.is_visible_from(ctx.db(), module)) | ||
21 | .copied() | ||
22 | .collect::<Vec<_>>(); | ||
23 | |||
24 | let fields_omitted = | ||
25 | n_fields - fields.len() > 0 || item.attrs(ctx.db()).by_key("non_exhaustive").exists(); | ||
26 | Some((fields, fields_omitted)) | ||
27 | } | ||
28 | |||
29 | pub(crate) fn render_struct_pat( | ||
30 | ctx: RenderContext<'_>, | ||
31 | strukt: hir::Struct, | ||
32 | local_name: Option<Name>, | ||
33 | ) -> Option<CompletionItem> { | ||
34 | let _p = profile::span("render_struct_pat"); | ||
35 | |||
36 | let fields = strukt.fields(ctx.db()); | ||
37 | let (visible_fields, fields_omitted) = visible_fields(&ctx, &fields, strukt)?; | ||
38 | |||
39 | if visible_fields.is_empty() { | ||
40 | // Matching a struct without matching its fields is pointless, unlike matching a Variant without its fields | ||
41 | return None; | ||
42 | } | ||
43 | |||
44 | let name = local_name.unwrap_or_else(|| strukt.name(ctx.db())).to_string(); | ||
45 | let pat = render_pat(&ctx, &name, strukt.kind(ctx.db()), &visible_fields, fields_omitted)?; | ||
46 | |||
47 | Some(build_completion(ctx, name, pat, strukt)) | ||
48 | } | ||
49 | |||
50 | pub(crate) fn render_variant_pat( | ||
51 | ctx: RenderContext<'_>, | ||
52 | variant: hir::Variant, | ||
53 | local_name: Option<Name>, | ||
54 | ) -> Option<CompletionItem> { | ||
55 | let _p = profile::span("render_variant_pat"); | ||
56 | |||
57 | let fields = variant.fields(ctx.db()); | ||
58 | let (visible_fields, fields_omitted) = visible_fields(&ctx, &fields, variant)?; | ||
59 | |||
60 | let name = local_name.unwrap_or_else(|| variant.name(ctx.db())).to_string(); | ||
61 | let pat = render_pat(&ctx, &name, variant.kind(ctx.db()), &visible_fields, fields_omitted)?; | ||
62 | |||
63 | Some(build_completion(ctx, name, pat, variant)) | ||
64 | } | ||
65 | |||
66 | fn build_completion( | ||
67 | ctx: RenderContext<'_>, | ||
68 | name: String, | ||
69 | pat: String, | ||
70 | item: impl HasAttrs + Copy, | ||
71 | ) -> CompletionItem { | ||
72 | let completion = CompletionItem::new(CompletionKind::Snippet, ctx.source_range(), name) | ||
73 | .kind(CompletionItemKind::Binding) | ||
74 | .set_documentation(ctx.docs(item)) | ||
75 | .set_deprecated(ctx.is_deprecated(item)) | ||
76 | .detail(&pat); | ||
77 | let completion = if let Some(snippet_cap) = ctx.snippet_cap() { | ||
78 | completion.insert_snippet(snippet_cap, pat) | ||
79 | } else { | ||
80 | completion.insert_text(pat) | ||
81 | }; | ||
82 | completion.build() | ||
83 | } | ||
84 | |||
85 | fn render_pat( | ||
86 | ctx: &RenderContext<'_>, | ||
87 | name: &str, | ||
88 | kind: StructKind, | ||
89 | fields: &[hir::Field], | ||
90 | fields_omitted: bool, | ||
91 | ) -> Option<String> { | ||
92 | let mut pat = match kind { | ||
93 | StructKind::Tuple if ctx.snippet_cap().is_some() => { | ||
94 | render_tuple_as_pat(&fields, &name, fields_omitted) | ||
95 | } | ||
96 | StructKind::Record => { | ||
97 | render_record_as_pat(ctx.db(), ctx.snippet_cap(), &fields, &name, fields_omitted) | ||
98 | } | ||
99 | _ => return None, | ||
100 | }; | ||
101 | |||
102 | if ctx.completion.is_param { | ||
103 | pat.push(':'); | ||
104 | pat.push(' '); | ||
105 | pat.push_str(&name); | ||
106 | } | ||
107 | if ctx.snippet_cap().is_some() { | ||
108 | pat.push_str("$0"); | ||
109 | } | ||
110 | Some(pat) | ||
111 | } | ||
112 | |||
113 | fn render_record_as_pat( | ||
114 | db: &dyn HirDatabase, | ||
115 | snippet_cap: Option<SnippetCap>, | ||
116 | fields: &[hir::Field], | ||
117 | name: &str, | ||
118 | fields_omitted: bool, | ||
119 | ) -> String { | ||
120 | let fields = fields.iter(); | ||
121 | if snippet_cap.is_some() { | ||
122 | format!( | ||
123 | "{name} {{ {}{} }}", | ||
124 | fields | ||
125 | .enumerate() | ||
126 | .map(|(idx, field)| format!("{}${}", field.name(db), idx + 1)) | ||
127 | .format(", "), | ||
128 | if fields_omitted { ", .." } else { "" }, | ||
129 | name = name | ||
130 | ) | ||
131 | } else { | ||
132 | format!( | ||
133 | "{name} {{ {}{} }}", | ||
134 | fields.map(|field| field.name(db)).format(", "), | ||
135 | if fields_omitted { ", .." } else { "" }, | ||
136 | name = name | ||
137 | ) | ||
138 | } | ||
139 | } | ||
140 | |||
141 | fn render_tuple_as_pat(fields: &[hir::Field], name: &str, fields_omitted: bool) -> String { | ||
142 | format!( | ||
143 | "{name}({}{})", | ||
144 | fields.iter().enumerate().map(|(idx, _)| format!("${}", idx + 1)).format(", "), | ||
145 | if fields_omitted { ", .." } else { "" }, | ||
146 | name = name | ||
147 | ) | ||
148 | } | ||
diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs index 73ca6ba9f..b7ded3478 100644 --- a/crates/hir/src/code_model.rs +++ b/crates/hir/src/code_model.rs | |||
@@ -9,7 +9,7 @@ use hir_def::{ | |||
9 | adt::StructKind, | 9 | adt::StructKind, |
10 | adt::VariantData, | 10 | adt::VariantData, |
11 | builtin_type::BuiltinType, | 11 | builtin_type::BuiltinType, |
12 | expr::{BindingAnnotation, Pat, PatId}, | 12 | expr::{BindingAnnotation, LabelId, Pat, PatId}, |
13 | import_map, | 13 | import_map, |
14 | item_tree::ItemTreeNode, | 14 | item_tree::ItemTreeNode, |
15 | lang_item::LangItemTarget, | 15 | lang_item::LangItemTarget, |
@@ -374,8 +374,6 @@ impl Module { | |||
374 | let crate_def_map = db.crate_def_map(self.id.krate); | 374 | let crate_def_map = db.crate_def_map(self.id.krate); |
375 | crate_def_map.add_diagnostics(db.upcast(), self.id.local_id, sink); | 375 | crate_def_map.add_diagnostics(db.upcast(), self.id.local_id, sink); |
376 | for decl in self.declarations(db) { | 376 | for decl in self.declarations(db) { |
377 | decl.diagnostics(db, sink); | ||
378 | |||
379 | match decl { | 377 | match decl { |
380 | crate::ModuleDef::Function(f) => f.diagnostics(db, sink), | 378 | crate::ModuleDef::Function(f) => f.diagnostics(db, sink), |
381 | crate::ModuleDef::Module(m) => { | 379 | crate::ModuleDef::Module(m) => { |
@@ -384,7 +382,9 @@ impl Module { | |||
384 | m.diagnostics(db, sink) | 382 | m.diagnostics(db, sink) |
385 | } | 383 | } |
386 | } | 384 | } |
387 | _ => (), | 385 | _ => { |
386 | decl.diagnostics(db, sink); | ||
387 | } | ||
388 | } | 388 | } |
389 | } | 389 | } |
390 | 390 | ||
@@ -511,6 +511,10 @@ impl Struct { | |||
511 | db.struct_data(self.id).repr.clone() | 511 | db.struct_data(self.id).repr.clone() |
512 | } | 512 | } |
513 | 513 | ||
514 | pub fn kind(self, db: &dyn HirDatabase) -> StructKind { | ||
515 | self.variant_data(db).kind() | ||
516 | } | ||
517 | |||
514 | fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> { | 518 | fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> { |
515 | db.struct_data(self.id).variant_data.clone() | 519 | db.struct_data(self.id).variant_data.clone() |
516 | } | 520 | } |
@@ -1202,6 +1206,34 @@ impl Local { | |||
1202 | } | 1206 | } |
1203 | 1207 | ||
1204 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] | 1208 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] |
1209 | pub struct Label { | ||
1210 | pub(crate) parent: DefWithBodyId, | ||
1211 | pub(crate) label_id: LabelId, | ||
1212 | } | ||
1213 | |||
1214 | impl Label { | ||
1215 | pub fn module(self, db: &dyn HirDatabase) -> Module { | ||
1216 | self.parent(db).module(db) | ||
1217 | } | ||
1218 | |||
1219 | pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody { | ||
1220 | self.parent.into() | ||
1221 | } | ||
1222 | |||
1223 | pub fn name(self, db: &dyn HirDatabase) -> Name { | ||
1224 | let body = db.body(self.parent.into()); | ||
1225 | body[self.label_id].name.clone() | ||
1226 | } | ||
1227 | |||
1228 | pub fn source(self, db: &dyn HirDatabase) -> InFile<ast::Label> { | ||
1229 | let (_body, source_map) = db.body_with_source_map(self.parent.into()); | ||
1230 | let src = source_map.label_syntax(self.label_id); | ||
1231 | let root = src.file_syntax(db.upcast()); | ||
1232 | src.map(|ast| ast.to_node(&root)) | ||
1233 | } | ||
1234 | } | ||
1235 | |||
1236 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] | ||
1205 | pub enum GenericParam { | 1237 | pub enum GenericParam { |
1206 | TypeParam(TypeParam), | 1238 | TypeParam(TypeParam), |
1207 | LifetimeParam(LifetimeParam), | 1239 | LifetimeParam(LifetimeParam), |
diff --git a/crates/hir/src/from_id.rs b/crates/hir/src/from_id.rs index 8e0c571b8..a0792b9a6 100644 --- a/crates/hir/src/from_id.rs +++ b/crates/hir/src/from_id.rs | |||
@@ -4,12 +4,15 @@ | |||
4 | //! are splitting the hir. | 4 | //! are splitting the hir. |
5 | 5 | ||
6 | use hir_def::{ | 6 | use hir_def::{ |
7 | expr::PatId, item_scope::ItemInNs, AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, | 7 | expr::{LabelId, PatId}, |
8 | GenericDefId, ModuleDefId, VariantId, | 8 | item_scope::ItemInNs, |
9 | AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, GenericDefId, ModuleDefId, | ||
10 | VariantId, | ||
9 | }; | 11 | }; |
10 | 12 | ||
11 | use crate::{ | 13 | use crate::{ |
12 | Adt, AssocItem, DefWithBody, Field, GenericDef, Local, MacroDef, ModuleDef, Variant, VariantDef, | 14 | Adt, AssocItem, DefWithBody, Field, GenericDef, Label, Local, MacroDef, ModuleDef, Variant, |
15 | VariantDef, | ||
13 | }; | 16 | }; |
14 | 17 | ||
15 | macro_rules! from_id { | 18 | macro_rules! from_id { |
@@ -228,6 +231,12 @@ impl From<(DefWithBodyId, PatId)> for Local { | |||
228 | } | 231 | } |
229 | } | 232 | } |
230 | 233 | ||
234 | impl From<(DefWithBodyId, LabelId)> for Label { | ||
235 | fn from((parent, label_id): (DefWithBodyId, LabelId)) -> Self { | ||
236 | Label { parent, label_id } | ||
237 | } | ||
238 | } | ||
239 | |||
231 | impl From<MacroDef> for ItemInNs { | 240 | impl From<MacroDef> for ItemInNs { |
232 | fn from(macro_def: MacroDef) -> Self { | 241 | fn from(macro_def: MacroDef) -> Self { |
233 | ItemInNs::Macros(macro_def.into()) | 242 | ItemInNs::Macros(macro_def.into()) |
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index bdd270c58..7ac9fd507 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -35,8 +35,8 @@ pub use crate::{ | |||
35 | code_model::{ | 35 | code_model::{ |
36 | Access, Adt, AsAssocItem, AssocItem, AssocItemContainer, Callable, CallableKind, Const, | 36 | Access, Adt, AsAssocItem, AssocItem, AssocItemContainer, Callable, CallableKind, Const, |
37 | Crate, CrateDependency, DefWithBody, Enum, Field, FieldSource, Function, GenericDef, | 37 | Crate, CrateDependency, DefWithBody, Enum, Field, FieldSource, Function, GenericDef, |
38 | HasVisibility, Impl, LifetimeParam, Local, MacroDef, Module, ModuleDef, ScopeDef, Static, | 38 | HasVisibility, Impl, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, ScopeDef, |
39 | Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef, | 39 | Static, Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef, |
40 | }, | 40 | }, |
41 | has_source::HasSource, | 41 | has_source::HasSource, |
42 | semantics::{PathResolution, Semantics, SemanticsScope}, | 42 | semantics::{PathResolution, Semantics, SemanticsScope}, |
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 25ebf73d8..67cd16e31 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs | |||
@@ -15,7 +15,7 @@ use itertools::Itertools; | |||
15 | use rustc_hash::{FxHashMap, FxHashSet}; | 15 | use rustc_hash::{FxHashMap, FxHashSet}; |
16 | use syntax::{ | 16 | use syntax::{ |
17 | algo::find_node_at_offset, | 17 | algo::find_node_at_offset, |
18 | ast::{self, GenericParamsOwner}, | 18 | ast::{self, GenericParamsOwner, LoopBodyOwner}, |
19 | match_ast, AstNode, SyntaxNode, SyntaxToken, TextSize, | 19 | match_ast, AstNode, SyntaxNode, SyntaxToken, TextSize, |
20 | }; | 20 | }; |
21 | 21 | ||
@@ -25,8 +25,8 @@ use crate::{ | |||
25 | diagnostics::Diagnostic, | 25 | diagnostics::Diagnostic, |
26 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, | 26 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, |
27 | source_analyzer::{resolve_hir_path, SourceAnalyzer}, | 27 | source_analyzer::{resolve_hir_path, SourceAnalyzer}, |
28 | AssocItem, Callable, Crate, Field, Function, HirFileId, Impl, InFile, LifetimeParam, Local, | 28 | AssocItem, Callable, Crate, Field, Function, HirFileId, Impl, InFile, Label, LifetimeParam, |
29 | MacroDef, Module, ModuleDef, Name, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, | 29 | Local, MacroDef, Module, ModuleDef, Name, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, |
30 | VariantDef, | 30 | VariantDef, |
31 | }; | 31 | }; |
32 | 32 | ||
@@ -182,6 +182,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
182 | self.imp.resolve_lifetime_param(lifetime) | 182 | self.imp.resolve_lifetime_param(lifetime) |
183 | } | 183 | } |
184 | 184 | ||
185 | pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> { | ||
186 | self.imp.resolve_label(lifetime) | ||
187 | } | ||
188 | |||
185 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | 189 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { |
186 | self.imp.type_of_expr(expr) | 190 | self.imp.type_of_expr(expr) |
187 | } | 191 | } |
@@ -425,6 +429,28 @@ impl<'db> SemanticsImpl<'db> { | |||
425 | ToDef::to_def(self, src) | 429 | ToDef::to_def(self, src) |
426 | } | 430 | } |
427 | 431 | ||
432 | fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> { | ||
433 | let text = lifetime.text(); | ||
434 | let label = lifetime.syntax().ancestors().find_map(|syn| { | ||
435 | let label = match_ast! { | ||
436 | match syn { | ||
437 | ast::ForExpr(it) => it.label(), | ||
438 | ast::WhileExpr(it) => it.label(), | ||
439 | ast::LoopExpr(it) => it.label(), | ||
440 | ast::EffectExpr(it) => it.label(), | ||
441 | _ => None, | ||
442 | } | ||
443 | }; | ||
444 | label.filter(|l| { | ||
445 | l.lifetime() | ||
446 | .and_then(|lt| lt.lifetime_ident_token()) | ||
447 | .map_or(false, |lt| lt.text() == text) | ||
448 | }) | ||
449 | })?; | ||
450 | let src = self.find_file(label.syntax().clone()).with_value(label); | ||
451 | ToDef::to_def(self, src) | ||
452 | } | ||
453 | |||
428 | fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | 454 | fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { |
429 | self.analyze(expr.syntax()).type_of_expr(self.db, expr) | 455 | self.analyze(expr.syntax()).type_of_expr(self.db, expr) |
430 | } | 456 | } |
@@ -720,6 +746,7 @@ to_def_impls![ | |||
720 | (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def), | 746 | (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def), |
721 | (crate::MacroDef, ast::MacroRules, macro_rules_to_def), | 747 | (crate::MacroDef, ast::MacroRules, macro_rules_to_def), |
722 | (crate::Local, ast::IdentPat, bind_pat_to_def), | 748 | (crate::Local, ast::IdentPat, bind_pat_to_def), |
749 | (crate::Label, ast::Label, label_to_def), | ||
723 | ]; | 750 | ]; |
724 | 751 | ||
725 | fn find_root(node: &SyntaxNode) -> SyntaxNode { | 752 | fn find_root(node: &SyntaxNode) -> SyntaxNode { |
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs index 3efca5baa..424e6e8a9 100644 --- a/crates/hir/src/semantics/source_to_def.rs +++ b/crates/hir/src/semantics/source_to_def.rs | |||
@@ -4,7 +4,7 @@ use base_db::FileId; | |||
4 | use hir_def::{ | 4 | use hir_def::{ |
5 | child_by_source::ChildBySource, | 5 | child_by_source::ChildBySource, |
6 | dyn_map::DynMap, | 6 | dyn_map::DynMap, |
7 | expr::PatId, | 7 | expr::{LabelId, PatId}, |
8 | keys::{self, Key}, | 8 | keys::{self, Key}, |
9 | ConstId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId, GenericDefId, ImplId, | 9 | ConstId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId, GenericDefId, ImplId, |
10 | LifetimeParamId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, | 10 | LifetimeParamId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, |
@@ -108,12 +108,21 @@ impl SourceToDefCtx<'_, '_> { | |||
108 | &mut self, | 108 | &mut self, |
109 | src: InFile<ast::IdentPat>, | 109 | src: InFile<ast::IdentPat>, |
110 | ) -> Option<(DefWithBodyId, PatId)> { | 110 | ) -> Option<(DefWithBodyId, PatId)> { |
111 | let container = self.find_pat_container(src.as_ref().map(|it| it.syntax()))?; | 111 | let container = self.find_pat_or_label_container(src.as_ref().map(|it| it.syntax()))?; |
112 | let (_body, source_map) = self.db.body_with_source_map(container); | 112 | let (_body, source_map) = self.db.body_with_source_map(container); |
113 | let src = src.map(ast::Pat::from); | 113 | let src = src.map(ast::Pat::from); |
114 | let pat_id = source_map.node_pat(src.as_ref())?; | 114 | let pat_id = source_map.node_pat(src.as_ref())?; |
115 | Some((container, pat_id)) | 115 | Some((container, pat_id)) |
116 | } | 116 | } |
117 | pub(super) fn label_to_def( | ||
118 | &mut self, | ||
119 | src: InFile<ast::Label>, | ||
120 | ) -> Option<(DefWithBodyId, LabelId)> { | ||
121 | let container = self.find_pat_or_label_container(src.as_ref().map(|it| it.syntax()))?; | ||
122 | let (_body, source_map) = self.db.body_with_source_map(container); | ||
123 | let label_id = source_map.node_label(src.as_ref())?; | ||
124 | Some((container, label_id)) | ||
125 | } | ||
117 | 126 | ||
118 | fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>( | 127 | fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>( |
119 | &mut self, | 128 | &mut self, |
@@ -237,7 +246,7 @@ impl SourceToDefCtx<'_, '_> { | |||
237 | None | 246 | None |
238 | } | 247 | } |
239 | 248 | ||
240 | fn find_pat_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> { | 249 | fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> { |
241 | for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) { | 250 | for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) { |
242 | let res: DefWithBodyId = match_ast! { | 251 | let res: DefWithBodyId = match_ast! { |
243 | match (container.value) { | 252 | match (container.value) { |
diff --git a/crates/hir_def/src/body.rs b/crates/hir_def/src/body.rs index 998b82601..d07004b9d 100644 --- a/crates/hir_def/src/body.rs +++ b/crates/hir_def/src/body.rs | |||
@@ -26,7 +26,7 @@ pub(crate) use lower::LowerCtx; | |||
26 | use crate::{ | 26 | use crate::{ |
27 | attr::{Attrs, RawAttrs}, | 27 | attr::{Attrs, RawAttrs}, |
28 | db::DefDatabase, | 28 | db::DefDatabase, |
29 | expr::{Expr, ExprId, Pat, PatId}, | 29 | expr::{Expr, ExprId, Label, LabelId, Pat, PatId}, |
30 | item_scope::BuiltinShadowMode, | 30 | item_scope::BuiltinShadowMode, |
31 | item_scope::ItemScope, | 31 | item_scope::ItemScope, |
32 | nameres::CrateDefMap, | 32 | nameres::CrateDefMap, |
@@ -226,6 +226,7 @@ pub(crate) struct Mark { | |||
226 | pub struct Body { | 226 | pub struct Body { |
227 | pub exprs: Arena<Expr>, | 227 | pub exprs: Arena<Expr>, |
228 | pub pats: Arena<Pat>, | 228 | pub pats: Arena<Pat>, |
229 | pub labels: Arena<Label>, | ||
229 | /// The patterns for the function's parameters. While the parameter types are | 230 | /// The patterns for the function's parameters. While the parameter types are |
230 | /// part of the function signature, the patterns are not (they don't change | 231 | /// part of the function signature, the patterns are not (they don't change |
231 | /// the external type of the function). | 232 | /// the external type of the function). |
@@ -244,6 +245,8 @@ pub type ExprSource = InFile<ExprPtr>; | |||
244 | pub type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>; | 245 | pub type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>; |
245 | pub type PatSource = InFile<PatPtr>; | 246 | pub type PatSource = InFile<PatPtr>; |
246 | 247 | ||
248 | pub type LabelPtr = AstPtr<ast::Label>; | ||
249 | pub type LabelSource = InFile<LabelPtr>; | ||
247 | /// An item body together with the mapping from syntax nodes to HIR expression | 250 | /// An item body together with the mapping from syntax nodes to HIR expression |
248 | /// IDs. This is needed to go from e.g. a position in a file to the HIR | 251 | /// IDs. This is needed to go from e.g. a position in a file to the HIR |
249 | /// expression containing it; but for type inference etc., we want to operate on | 252 | /// expression containing it; but for type inference etc., we want to operate on |
@@ -261,6 +264,8 @@ pub struct BodySourceMap { | |||
261 | expr_map_back: ArenaMap<ExprId, Result<ExprSource, SyntheticSyntax>>, | 264 | expr_map_back: ArenaMap<ExprId, Result<ExprSource, SyntheticSyntax>>, |
262 | pat_map: FxHashMap<PatSource, PatId>, | 265 | pat_map: FxHashMap<PatSource, PatId>, |
263 | pat_map_back: ArenaMap<PatId, Result<PatSource, SyntheticSyntax>>, | 266 | pat_map_back: ArenaMap<PatId, Result<PatSource, SyntheticSyntax>>, |
267 | label_map: FxHashMap<LabelSource, LabelId>, | ||
268 | label_map_back: ArenaMap<LabelId, LabelSource>, | ||
264 | field_map: FxHashMap<(ExprId, usize), InFile<AstPtr<ast::RecordExprField>>>, | 269 | field_map: FxHashMap<(ExprId, usize), InFile<AstPtr<ast::RecordExprField>>>, |
265 | expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>, | 270 | expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>, |
266 | 271 | ||
@@ -334,6 +339,14 @@ impl Index<PatId> for Body { | |||
334 | } | 339 | } |
335 | } | 340 | } |
336 | 341 | ||
342 | impl Index<LabelId> for Body { | ||
343 | type Output = Label; | ||
344 | |||
345 | fn index(&self, label: LabelId) -> &Label { | ||
346 | &self.labels[label] | ||
347 | } | ||
348 | } | ||
349 | |||
337 | impl BodySourceMap { | 350 | impl BodySourceMap { |
338 | pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprSource, SyntheticSyntax> { | 351 | pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprSource, SyntheticSyntax> { |
339 | self.expr_map_back[expr].clone() | 352 | self.expr_map_back[expr].clone() |
@@ -363,6 +376,15 @@ impl BodySourceMap { | |||
363 | self.pat_map.get(&src).cloned() | 376 | self.pat_map.get(&src).cloned() |
364 | } | 377 | } |
365 | 378 | ||
379 | pub fn label_syntax(&self, label: LabelId) -> LabelSource { | ||
380 | self.label_map_back[label].clone() | ||
381 | } | ||
382 | |||
383 | pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> { | ||
384 | let src = node.map(|it| AstPtr::new(it)); | ||
385 | self.label_map.get(&src).cloned() | ||
386 | } | ||
387 | |||
366 | pub fn field_syntax(&self, expr: ExprId, field: usize) -> InFile<AstPtr<ast::RecordExprField>> { | 388 | pub fn field_syntax(&self, expr: ExprId, field: usize) -> InFile<AstPtr<ast::RecordExprField>> { |
367 | self.field_map[&(expr, field)].clone() | 389 | self.field_map[&(expr, field)].clone() |
368 | } | 390 | } |
diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs index 0f404be1b..17c72779b 100644 --- a/crates/hir_def/src/body/lower.rs +++ b/crates/hir_def/src/body/lower.rs | |||
@@ -22,13 +22,14 @@ use test_utils::mark; | |||
22 | 22 | ||
23 | use crate::{ | 23 | use crate::{ |
24 | adt::StructKind, | 24 | adt::StructKind, |
25 | body::{Body, BodySourceMap, Expander, PatPtr, SyntheticSyntax}, | 25 | body::{Body, BodySourceMap, Expander, LabelSource, PatPtr, SyntheticSyntax}, |
26 | builtin_type::{BuiltinFloat, BuiltinInt}, | 26 | builtin_type::{BuiltinFloat, BuiltinInt}, |
27 | db::DefDatabase, | 27 | db::DefDatabase, |
28 | diagnostics::{InactiveCode, MacroError, UnresolvedProcMacro}, | 28 | diagnostics::{InactiveCode, MacroError, UnresolvedProcMacro}, |
29 | expr::{ | 29 | expr::{ |
30 | dummy_expr_id, ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, | 30 | dummy_expr_id, ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Label, |
31 | LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, | 31 | LabelId, Literal, LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, |
32 | Statement, | ||
32 | }, | 33 | }, |
33 | item_scope::BuiltinShadowMode, | 34 | item_scope::BuiltinShadowMode, |
34 | item_tree::{ItemTree, ItemTreeId, ItemTreeNode}, | 35 | item_tree::{ItemTree, ItemTreeId, ItemTreeNode}, |
@@ -72,6 +73,7 @@ pub(super) fn lower( | |||
72 | body: Body { | 73 | body: Body { |
73 | exprs: Arena::default(), | 74 | exprs: Arena::default(), |
74 | pats: Arena::default(), | 75 | pats: Arena::default(), |
76 | labels: Arena::default(), | ||
75 | params: Vec::new(), | 77 | params: Vec::new(), |
76 | body_expr: dummy_expr_id(), | 78 | body_expr: dummy_expr_id(), |
77 | item_scope: Default::default(), | 79 | item_scope: Default::default(), |
@@ -175,6 +177,18 @@ impl ExprCollector<'_> { | |||
175 | id | 177 | id |
176 | } | 178 | } |
177 | 179 | ||
180 | fn alloc_label(&mut self, label: Label, ptr: AstPtr<ast::Label>) -> LabelId { | ||
181 | let src = self.expander.to_source(ptr); | ||
182 | let id = self.make_label(label, src.clone()); | ||
183 | self.source_map.label_map.insert(src, id); | ||
184 | id | ||
185 | } | ||
186 | fn make_label(&mut self, label: Label, src: LabelSource) -> LabelId { | ||
187 | let id = self.body.labels.alloc(label); | ||
188 | self.source_map.label_map_back.insert(id, src); | ||
189 | id | ||
190 | } | ||
191 | |||
178 | fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { | 192 | fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { |
179 | let syntax_ptr = AstPtr::new(&expr); | 193 | let syntax_ptr = AstPtr::new(&expr); |
180 | if self.check_cfg(&expr).is_none() { | 194 | if self.check_cfg(&expr).is_none() { |
@@ -228,37 +242,40 @@ impl ExprCollector<'_> { | |||
228 | self.alloc_expr(Expr::Unsafe { body }, syntax_ptr) | 242 | self.alloc_expr(Expr::Unsafe { body }, syntax_ptr) |
229 | } | 243 | } |
230 | // FIXME: we need to record these effects somewhere... | 244 | // FIXME: we need to record these effects somewhere... |
231 | ast::Effect::Label(label) => match e.block_expr() { | 245 | ast::Effect::Label(label) => { |
232 | Some(block) => { | 246 | let label = self.collect_label(label); |
233 | let res = self.collect_block(block); | 247 | match e.block_expr() { |
234 | match &mut self.body.exprs[res] { | 248 | Some(block) => { |
235 | Expr::Block { label: block_label, .. } => { | 249 | let res = self.collect_block(block); |
236 | *block_label = label.lifetime().map(|t| Name::new_lifetime(&t)) | 250 | match &mut self.body.exprs[res] { |
251 | Expr::Block { label: block_label, .. } => { | ||
252 | *block_label = Some(label); | ||
253 | } | ||
254 | _ => unreachable!(), | ||
237 | } | 255 | } |
238 | _ => unreachable!(), | 256 | res |
239 | } | 257 | } |
240 | res | 258 | None => self.missing_expr(), |
241 | } | 259 | } |
242 | None => self.missing_expr(), | 260 | } |
243 | }, | ||
244 | // FIXME: we need to record these effects somewhere... | 261 | // FIXME: we need to record these effects somewhere... |
245 | ast::Effect::Async(_) => { | 262 | ast::Effect::Async(_) => { |
246 | let body = self.collect_block_opt(e.block_expr()); | 263 | let body = self.collect_block_opt(e.block_expr()); |
247 | self.alloc_expr(Expr::Async { body }, syntax_ptr) | 264 | self.alloc_expr(Expr::Async { body }, syntax_ptr) |
248 | } | 265 | } |
266 | ast::Effect::Const(_) => { | ||
267 | let body = self.collect_block_opt(e.block_expr()); | ||
268 | self.alloc_expr(Expr::Const { body }, syntax_ptr) | ||
269 | } | ||
249 | }, | 270 | }, |
250 | ast::Expr::BlockExpr(e) => self.collect_block(e), | 271 | ast::Expr::BlockExpr(e) => self.collect_block(e), |
251 | ast::Expr::LoopExpr(e) => { | 272 | ast::Expr::LoopExpr(e) => { |
273 | let label = e.label().map(|label| self.collect_label(label)); | ||
252 | let body = self.collect_block_opt(e.loop_body()); | 274 | let body = self.collect_block_opt(e.loop_body()); |
253 | self.alloc_expr( | 275 | self.alloc_expr(Expr::Loop { body, label }, syntax_ptr) |
254 | Expr::Loop { | ||
255 | body, | ||
256 | label: e.label().and_then(|l| l.lifetime()).map(|l| Name::new_lifetime(&l)), | ||
257 | }, | ||
258 | syntax_ptr, | ||
259 | ) | ||
260 | } | 276 | } |
261 | ast::Expr::WhileExpr(e) => { | 277 | ast::Expr::WhileExpr(e) => { |
278 | let label = e.label().map(|label| self.collect_label(label)); | ||
262 | let body = self.collect_block_opt(e.loop_body()); | 279 | let body = self.collect_block_opt(e.loop_body()); |
263 | 280 | ||
264 | let condition = match e.condition() { | 281 | let condition = match e.condition() { |
@@ -279,42 +296,20 @@ impl ExprCollector<'_> { | |||
279 | ]; | 296 | ]; |
280 | let match_expr = | 297 | let match_expr = |
281 | self.alloc_expr_desugared(Expr::Match { expr: match_expr, arms }); | 298 | self.alloc_expr_desugared(Expr::Match { expr: match_expr, arms }); |
282 | return self.alloc_expr( | 299 | return self |
283 | Expr::Loop { | 300 | .alloc_expr(Expr::Loop { body: match_expr, label }, syntax_ptr); |
284 | body: match_expr, | ||
285 | label: e | ||
286 | .label() | ||
287 | .and_then(|l| l.lifetime()) | ||
288 | .map(|l| Name::new_lifetime(&l)), | ||
289 | }, | ||
290 | syntax_ptr, | ||
291 | ); | ||
292 | } | 301 | } |
293 | }, | 302 | }, |
294 | }; | 303 | }; |
295 | 304 | ||
296 | self.alloc_expr( | 305 | self.alloc_expr(Expr::While { condition, body, label }, syntax_ptr) |
297 | Expr::While { | ||
298 | condition, | ||
299 | body, | ||
300 | label: e.label().and_then(|l| l.lifetime()).map(|l| Name::new_lifetime(&l)), | ||
301 | }, | ||
302 | syntax_ptr, | ||
303 | ) | ||
304 | } | 306 | } |
305 | ast::Expr::ForExpr(e) => { | 307 | ast::Expr::ForExpr(e) => { |
308 | let label = e.label().map(|label| self.collect_label(label)); | ||
306 | let iterable = self.collect_expr_opt(e.iterable()); | 309 | let iterable = self.collect_expr_opt(e.iterable()); |
307 | let pat = self.collect_pat_opt(e.pat()); | 310 | let pat = self.collect_pat_opt(e.pat()); |
308 | let body = self.collect_block_opt(e.loop_body()); | 311 | let body = self.collect_block_opt(e.loop_body()); |
309 | self.alloc_expr( | 312 | self.alloc_expr(Expr::For { iterable, pat, body, label }, syntax_ptr) |
310 | Expr::For { | ||
311 | iterable, | ||
312 | pat, | ||
313 | body, | ||
314 | label: e.label().and_then(|l| l.lifetime()).map(|l| Name::new_lifetime(&l)), | ||
315 | }, | ||
316 | syntax_ptr, | ||
317 | ) | ||
318 | } | 313 | } |
319 | ast::Expr::CallExpr(e) => { | 314 | ast::Expr::CallExpr(e) => { |
320 | let callee = self.collect_expr_opt(e.expr()); | 315 | let callee = self.collect_expr_opt(e.expr()); |
@@ -814,6 +809,13 @@ impl ExprCollector<'_> { | |||
814 | } | 809 | } |
815 | } | 810 | } |
816 | 811 | ||
812 | fn collect_label(&mut self, ast_label: ast::Label) -> LabelId { | ||
813 | let label = Label { | ||
814 | name: ast_label.lifetime().as_ref().map_or_else(Name::missing, Name::new_lifetime), | ||
815 | }; | ||
816 | self.alloc_label(label, AstPtr::new(&ast_label)) | ||
817 | } | ||
818 | |||
817 | fn collect_pat(&mut self, pat: ast::Pat) -> PatId { | 819 | fn collect_pat(&mut self, pat: ast::Pat) -> PatId { |
818 | let pattern = match &pat { | 820 | let pattern = match &pat { |
819 | ast::Pat::IdentPat(bp) => { | 821 | ast::Pat::IdentPat(bp) => { |
@@ -932,6 +934,14 @@ impl ExprCollector<'_> { | |||
932 | let inner = self.collect_pat_opt(boxpat.pat()); | 934 | let inner = self.collect_pat_opt(boxpat.pat()); |
933 | Pat::Box { inner } | 935 | Pat::Box { inner } |
934 | } | 936 | } |
937 | ast::Pat::ConstBlockPat(const_block_pat) => { | ||
938 | if let Some(expr) = const_block_pat.block_expr() { | ||
939 | let expr_id = self.collect_block(expr); | ||
940 | Pat::ConstBlock(expr_id) | ||
941 | } else { | ||
942 | Pat::Missing | ||
943 | } | ||
944 | } | ||
935 | // FIXME: implement | 945 | // FIXME: implement |
936 | ast::Pat::RangePat(_) | ast::Pat::MacroPat(_) => Pat::Missing, | 946 | ast::Pat::RangePat(_) | ast::Pat::MacroPat(_) => Pat::Missing, |
937 | }; | 947 | }; |
diff --git a/crates/hir_def/src/expr.rs b/crates/hir_def/src/expr.rs index e5d740a36..6a481769d 100644 --- a/crates/hir_def/src/expr.rs +++ b/crates/hir_def/src/expr.rs | |||
@@ -30,6 +30,12 @@ pub(crate) fn dummy_expr_id() -> ExprId { | |||
30 | pub type PatId = Idx<Pat>; | 30 | pub type PatId = Idx<Pat>; |
31 | 31 | ||
32 | #[derive(Debug, Clone, Eq, PartialEq)] | 32 | #[derive(Debug, Clone, Eq, PartialEq)] |
33 | pub struct Label { | ||
34 | pub name: Name, | ||
35 | } | ||
36 | pub type LabelId = Idx<Label>; | ||
37 | |||
38 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
33 | pub enum Literal { | 39 | pub enum Literal { |
34 | String(String), | 40 | String(String), |
35 | ByteString(Vec<u8>), | 41 | ByteString(Vec<u8>), |
@@ -52,22 +58,22 @@ pub enum Expr { | |||
52 | Block { | 58 | Block { |
53 | statements: Vec<Statement>, | 59 | statements: Vec<Statement>, |
54 | tail: Option<ExprId>, | 60 | tail: Option<ExprId>, |
55 | label: Option<Name>, | 61 | label: Option<LabelId>, |
56 | }, | 62 | }, |
57 | Loop { | 63 | Loop { |
58 | body: ExprId, | 64 | body: ExprId, |
59 | label: Option<Name>, | 65 | label: Option<LabelId>, |
60 | }, | 66 | }, |
61 | While { | 67 | While { |
62 | condition: ExprId, | 68 | condition: ExprId, |
63 | body: ExprId, | 69 | body: ExprId, |
64 | label: Option<Name>, | 70 | label: Option<LabelId>, |
65 | }, | 71 | }, |
66 | For { | 72 | For { |
67 | iterable: ExprId, | 73 | iterable: ExprId, |
68 | pat: PatId, | 74 | pat: PatId, |
69 | body: ExprId, | 75 | body: ExprId, |
70 | label: Option<Name>, | 76 | label: Option<LabelId>, |
71 | }, | 77 | }, |
72 | Call { | 78 | Call { |
73 | callee: ExprId, | 79 | callee: ExprId, |
@@ -114,6 +120,9 @@ pub enum Expr { | |||
114 | Async { | 120 | Async { |
115 | body: ExprId, | 121 | body: ExprId, |
116 | }, | 122 | }, |
123 | Const { | ||
124 | body: ExprId, | ||
125 | }, | ||
117 | Cast { | 126 | Cast { |
118 | expr: ExprId, | 127 | expr: ExprId, |
119 | type_ref: TypeRef, | 128 | type_ref: TypeRef, |
@@ -253,7 +262,10 @@ impl Expr { | |||
253 | f(*expr); | 262 | f(*expr); |
254 | } | 263 | } |
255 | } | 264 | } |
256 | Expr::TryBlock { body } | Expr::Unsafe { body } | Expr::Async { body } => f(*body), | 265 | Expr::TryBlock { body } |
266 | | Expr::Unsafe { body } | ||
267 | | Expr::Async { body } | ||
268 | | Expr::Const { body } => f(*body), | ||
257 | Expr::Loop { body, .. } => f(*body), | 269 | Expr::Loop { body, .. } => f(*body), |
258 | Expr::While { condition, body, .. } => { | 270 | Expr::While { condition, body, .. } => { |
259 | f(*condition); | 271 | f(*condition); |
@@ -399,12 +411,18 @@ pub enum Pat { | |||
399 | TupleStruct { path: Option<Path>, args: Vec<PatId>, ellipsis: Option<usize> }, | 411 | TupleStruct { path: Option<Path>, args: Vec<PatId>, ellipsis: Option<usize> }, |
400 | Ref { pat: PatId, mutability: Mutability }, | 412 | Ref { pat: PatId, mutability: Mutability }, |
401 | Box { inner: PatId }, | 413 | Box { inner: PatId }, |
414 | ConstBlock(ExprId), | ||
402 | } | 415 | } |
403 | 416 | ||
404 | impl Pat { | 417 | impl Pat { |
405 | pub fn walk_child_pats(&self, mut f: impl FnMut(PatId)) { | 418 | pub fn walk_child_pats(&self, mut f: impl FnMut(PatId)) { |
406 | match self { | 419 | match self { |
407 | Pat::Range { .. } | Pat::Lit(..) | Pat::Path(..) | Pat::Wild | Pat::Missing => {} | 420 | Pat::Range { .. } |
421 | | Pat::Lit(..) | ||
422 | | Pat::Path(..) | ||
423 | | Pat::ConstBlock(..) | ||
424 | | Pat::Wild | ||
425 | | Pat::Missing => {} | ||
408 | Pat::Bind { subpat, .. } => { | 426 | Pat::Bind { subpat, .. } => { |
409 | subpat.iter().copied().for_each(f); | 427 | subpat.iter().copied().for_each(f); |
410 | } | 428 | } |
diff --git a/crates/hir_def/src/nameres/mod_resolution.rs b/crates/hir_def/src/nameres/mod_resolution.rs index b4ccd4488..af3262439 100644 --- a/crates/hir_def/src/nameres/mod_resolution.rs +++ b/crates/hir_def/src/nameres/mod_resolution.rs | |||
@@ -79,7 +79,7 @@ impl ModDir { | |||
79 | for candidate in candidate_files.iter() { | 79 | for candidate in candidate_files.iter() { |
80 | let path = AnchoredPath { anchor: file_id, path: candidate.as_str() }; | 80 | let path = AnchoredPath { anchor: file_id, path: candidate.as_str() }; |
81 | if let Some(file_id) = db.resolve_path(path) { | 81 | if let Some(file_id) = db.resolve_path(path) { |
82 | let is_mod_rs = candidate.ends_with("mod.rs"); | 82 | let is_mod_rs = candidate.ends_with("/mod.rs"); |
83 | 83 | ||
84 | let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() { | 84 | let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() { |
85 | (DirPath::empty(), false) | 85 | (DirPath::empty(), false) |
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs index dddbbcdac..6382521fb 100644 --- a/crates/hir_expand/src/builtin_macro.rs +++ b/crates/hir_expand/src/builtin_macro.rs | |||
@@ -563,6 +563,7 @@ mod tests { | |||
563 | 563 | ||
564 | let args = macro_call.token_tree().unwrap(); | 564 | let args = macro_call.token_tree().unwrap(); |
565 | let parsed_args = mbe::ast_to_token_tree(&args).unwrap().0; | 565 | let parsed_args = mbe::ast_to_token_tree(&args).unwrap().0; |
566 | let call_id = AstId::new(file_id.into(), ast_id_map.ast_id(¯o_call)); | ||
566 | 567 | ||
567 | let arg_id = db.intern_eager_expansion({ | 568 | let arg_id = db.intern_eager_expansion({ |
568 | EagerCallLoc { | 569 | EagerCallLoc { |
@@ -570,7 +571,7 @@ mod tests { | |||
570 | fragment: FragmentKind::Expr, | 571 | fragment: FragmentKind::Expr, |
571 | subtree: Arc::new(parsed_args.clone()), | 572 | subtree: Arc::new(parsed_args.clone()), |
572 | krate, | 573 | krate, |
573 | file_id: file_id.into(), | 574 | call: call_id, |
574 | } | 575 | } |
575 | }); | 576 | }); |
576 | 577 | ||
@@ -580,7 +581,7 @@ mod tests { | |||
580 | fragment, | 581 | fragment, |
581 | subtree: Arc::new(subtree), | 582 | subtree: Arc::new(subtree), |
582 | krate, | 583 | krate, |
583 | file_id: file_id.into(), | 584 | call: call_id, |
584 | }; | 585 | }; |
585 | 586 | ||
586 | let id: MacroCallId = db.intern_eager_expansion(eager).into(); | 587 | let id: MacroCallId = db.intern_eager_expansion(eager).into(); |
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index 4477d867f..077de3727 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -5,7 +5,7 @@ use std::sync::Arc; | |||
5 | use base_db::{salsa, SourceDatabase}; | 5 | use base_db::{salsa, SourceDatabase}; |
6 | use mbe::{ExpandError, ExpandResult, MacroRules}; | 6 | use mbe::{ExpandError, ExpandResult, MacroRules}; |
7 | use parser::FragmentKind; | 7 | use parser::FragmentKind; |
8 | use syntax::{algo::diff, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode}; | 8 | use syntax::{algo::diff, ast::NameOwner, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode}; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
11 | ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, | 11 | ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, |
@@ -129,11 +129,11 @@ fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | |||
129 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | 129 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { |
130 | match id.kind { | 130 | match id.kind { |
131 | MacroDefKind::Declarative => { | 131 | MacroDefKind::Declarative => { |
132 | let macro_call = match id.ast_id?.to_node(db) { | 132 | let macro_rules = match id.ast_id?.to_node(db) { |
133 | syntax::ast::Macro::MacroRules(mac) => mac, | 133 | syntax::ast::Macro::MacroRules(mac) => mac, |
134 | syntax::ast::Macro::MacroDef(_) => return None, | 134 | syntax::ast::Macro::MacroDef(_) => return None, |
135 | }; | 135 | }; |
136 | let arg = macro_call.token_tree()?; | 136 | let arg = macro_rules.token_tree()?; |
137 | let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| { | 137 | let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| { |
138 | log::warn!("fail on macro_def to token tree: {:#?}", arg); | 138 | log::warn!("fail on macro_def to token tree: {:#?}", arg); |
139 | None | 139 | None |
@@ -141,7 +141,8 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, | |||
141 | let rules = match MacroRules::parse(&tt) { | 141 | let rules = match MacroRules::parse(&tt) { |
142 | Ok(it) => it, | 142 | Ok(it) => it, |
143 | Err(err) => { | 143 | Err(err) => { |
144 | log::warn!("fail on macro_def parse: error: {:#?} {:#?}", err, tt); | 144 | let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); |
145 | log::warn!("fail on macro_def parse ({}): {:?} {:#?}", name, err, tt); | ||
145 | return None; | 146 | return None; |
146 | } | 147 | } |
147 | }; | 148 | }; |
diff --git a/crates/hir_expand/src/eager.rs b/crates/hir_expand/src/eager.rs index 0229a836e..6354b090d 100644 --- a/crates/hir_expand/src/eager.rs +++ b/crates/hir_expand/src/eager.rs | |||
@@ -110,6 +110,9 @@ pub fn expand_eager_macro( | |||
110 | || err("malformed macro invocation"), | 110 | || err("malformed macro invocation"), |
111 | )?; | 111 | )?; |
112 | 112 | ||
113 | let ast_map = db.ast_id_map(macro_call.file_id); | ||
114 | let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(¯o_call.value)); | ||
115 | |||
113 | // Note: | 116 | // Note: |
114 | // When `lazy_expand` is called, its *parent* file must be already exists. | 117 | // When `lazy_expand` is called, its *parent* file must be already exists. |
115 | // Here we store an eager macro id for the argument expanded subtree here | 118 | // Here we store an eager macro id for the argument expanded subtree here |
@@ -120,7 +123,7 @@ pub fn expand_eager_macro( | |||
120 | fragment: FragmentKind::Expr, | 123 | fragment: FragmentKind::Expr, |
121 | subtree: Arc::new(parsed_args.clone()), | 124 | subtree: Arc::new(parsed_args.clone()), |
122 | krate, | 125 | krate, |
123 | file_id: macro_call.file_id, | 126 | call: call_id, |
124 | } | 127 | } |
125 | }); | 128 | }); |
126 | let arg_file_id: MacroCallId = arg_id.into(); | 129 | let arg_file_id: MacroCallId = arg_id.into(); |
@@ -141,13 +144,8 @@ pub fn expand_eager_macro( | |||
141 | let res = eager.expand(db, arg_id, &subtree); | 144 | let res = eager.expand(db, arg_id, &subtree); |
142 | 145 | ||
143 | let (subtree, fragment) = diagnostic_sink.expand_result_option(res)?; | 146 | let (subtree, fragment) = diagnostic_sink.expand_result_option(res)?; |
144 | let eager = EagerCallLoc { | 147 | let eager = |
145 | def, | 148 | EagerCallLoc { def, fragment, subtree: Arc::new(subtree), krate, call: call_id }; |
146 | fragment, | ||
147 | subtree: Arc::new(subtree), | ||
148 | krate, | ||
149 | file_id: macro_call.file_id, | ||
150 | }; | ||
151 | 149 | ||
152 | Ok(db.intern_eager_expansion(eager)) | 150 | Ok(db.intern_eager_expansion(eager)) |
153 | } else { | 151 | } else { |
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index d486186e5..3fa1b1d77 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -83,7 +83,7 @@ impl HirFileId { | |||
83 | } | 83 | } |
84 | MacroCallId::EagerMacro(id) => { | 84 | MacroCallId::EagerMacro(id) => { |
85 | let loc = db.lookup_intern_eager_expansion(id); | 85 | let loc = db.lookup_intern_eager_expansion(id); |
86 | loc.file_id | 86 | loc.call.file_id |
87 | } | 87 | } |
88 | }; | 88 | }; |
89 | file_id.original_file(db) | 89 | file_id.original_file(db) |
@@ -103,7 +103,7 @@ impl HirFileId { | |||
103 | } | 103 | } |
104 | MacroCallId::EagerMacro(id) => { | 104 | MacroCallId::EagerMacro(id) => { |
105 | let loc = db.lookup_intern_eager_expansion(id); | 105 | let loc = db.lookup_intern_eager_expansion(id); |
106 | loc.file_id | 106 | loc.call.file_id |
107 | } | 107 | } |
108 | }; | 108 | }; |
109 | } | 109 | } |
@@ -114,17 +114,16 @@ impl HirFileId { | |||
114 | pub fn call_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> { | 114 | pub fn call_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> { |
115 | match self.0 { | 115 | match self.0 { |
116 | HirFileIdRepr::FileId(_) => None, | 116 | HirFileIdRepr::FileId(_) => None, |
117 | HirFileIdRepr::MacroFile(macro_file) => { | 117 | HirFileIdRepr::MacroFile(macro_file) => match macro_file.macro_call_id { |
118 | let lazy_id = match macro_file.macro_call_id { | 118 | MacroCallId::LazyMacro(lazy_id) => { |
119 | MacroCallId::LazyMacro(id) => id, | 119 | let loc: MacroCallLoc = db.lookup_intern_macro(lazy_id); |
120 | MacroCallId::EagerMacro(_id) => { | 120 | Some(loc.kind.node(db)) |
121 | // FIXME: handle call node for eager macro | 121 | } |
122 | return None; | 122 | MacroCallId::EagerMacro(id) => { |
123 | } | 123 | let loc: EagerCallLoc = db.lookup_intern_eager_expansion(id); |
124 | }; | 124 | Some(loc.call.with_value(loc.call.to_node(db).syntax().clone())) |
125 | let loc = db.lookup_intern_macro(lazy_id); | 125 | } |
126 | Some(loc.kind.node(db)) | 126 | }, |
127 | } | ||
128 | } | 127 | } |
129 | } | 128 | } |
130 | 129 | ||
@@ -304,7 +303,7 @@ pub struct EagerCallLoc { | |||
304 | pub(crate) fragment: FragmentKind, | 303 | pub(crate) fragment: FragmentKind, |
305 | pub(crate) subtree: Arc<tt::Subtree>, | 304 | pub(crate) subtree: Arc<tt::Subtree>, |
306 | pub(crate) krate: CrateId, | 305 | pub(crate) krate: CrateId, |
307 | pub(crate) file_id: HirFileId, | 306 | pub(crate) call: AstId<ast::MacroCall>, |
308 | } | 307 | } |
309 | 308 | ||
310 | /// ExpansionInfo mainly describes how to map text range between src and expanded macro | 309 | /// ExpansionInfo mainly describes how to map text range between src and expanded macro |
diff --git a/crates/hir_ty/Cargo.toml b/crates/hir_ty/Cargo.toml index 289e812fe..965c1780a 100644 --- a/crates/hir_ty/Cargo.toml +++ b/crates/hir_ty/Cargo.toml | |||
@@ -17,9 +17,9 @@ ena = "0.14.0" | |||
17 | log = "0.4.8" | 17 | log = "0.4.8" |
18 | rustc-hash = "1.1.0" | 18 | rustc-hash = "1.1.0" |
19 | scoped-tls = "1" | 19 | scoped-tls = "1" |
20 | chalk-solve = { version = "0.43", default-features = false } | 20 | chalk-solve = { version = "0.45", default-features = false } |
21 | chalk-ir = "0.43" | 21 | chalk-ir = "0.45" |
22 | chalk-recursive = "0.43" | 22 | chalk-recursive = "0.45" |
23 | 23 | ||
24 | stdx = { path = "../stdx", version = "0.0.0" } | 24 | stdx = { path = "../stdx", version = "0.0.0" } |
25 | hir_def = { path = "../hir_def", version = "0.0.0" } | 25 | hir_def = { path = "../hir_def", version = "0.0.0" } |
diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs index 2cdce2cef..70a3f3075 100644 --- a/crates/hir_ty/src/infer/expr.rs +++ b/crates/hir_ty/src/infer/expr.rs | |||
@@ -143,7 +143,7 @@ impl<'a> InferenceContext<'a> { | |||
143 | self.breakables.push(BreakableContext { | 143 | self.breakables.push(BreakableContext { |
144 | may_break: false, | 144 | may_break: false, |
145 | break_ty: break_ty.clone(), | 145 | break_ty: break_ty.clone(), |
146 | label: label.clone(), | 146 | label: label.map(|label| self.body[label].name.clone()), |
147 | }); | 147 | }); |
148 | let ty = self.infer_block(statements, *tail, &Expectation::has_type(break_ty)); | 148 | let ty = self.infer_block(statements, *tail, &Expectation::has_type(break_ty)); |
149 | let ctxt = self.breakables.pop().expect("breakable stack broken"); | 149 | let ctxt = self.breakables.pop().expect("breakable stack broken"); |
@@ -155,7 +155,7 @@ impl<'a> InferenceContext<'a> { | |||
155 | } | 155 | } |
156 | None => self.infer_block(statements, *tail, expected), | 156 | None => self.infer_block(statements, *tail, expected), |
157 | }, | 157 | }, |
158 | Expr::Unsafe { body } => self.infer_expr(*body, expected), | 158 | Expr::Unsafe { body } | Expr::Const { body } => self.infer_expr(*body, expected), |
159 | Expr::TryBlock { body } => { | 159 | Expr::TryBlock { body } => { |
160 | let _inner = self.infer_expr(*body, expected); | 160 | let _inner = self.infer_expr(*body, expected); |
161 | // FIXME should be std::result::Result<{inner}, _> | 161 | // FIXME should be std::result::Result<{inner}, _> |
@@ -172,7 +172,7 @@ impl<'a> InferenceContext<'a> { | |||
172 | self.breakables.push(BreakableContext { | 172 | self.breakables.push(BreakableContext { |
173 | may_break: false, | 173 | may_break: false, |
174 | break_ty: self.table.new_type_var(), | 174 | break_ty: self.table.new_type_var(), |
175 | label: label.clone(), | 175 | label: label.map(|label| self.body[label].name.clone()), |
176 | }); | 176 | }); |
177 | self.infer_expr(*body, &Expectation::has_type(Ty::unit())); | 177 | self.infer_expr(*body, &Expectation::has_type(Ty::unit())); |
178 | 178 | ||
@@ -191,7 +191,7 @@ impl<'a> InferenceContext<'a> { | |||
191 | self.breakables.push(BreakableContext { | 191 | self.breakables.push(BreakableContext { |
192 | may_break: false, | 192 | may_break: false, |
193 | break_ty: Ty::Unknown, | 193 | break_ty: Ty::Unknown, |
194 | label: label.clone(), | 194 | label: label.map(|label| self.body[label].name.clone()), |
195 | }); | 195 | }); |
196 | // while let is desugared to a match loop, so this is always simple while | 196 | // while let is desugared to a match loop, so this is always simple while |
197 | self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool))); | 197 | self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool))); |
@@ -207,7 +207,7 @@ impl<'a> InferenceContext<'a> { | |||
207 | self.breakables.push(BreakableContext { | 207 | self.breakables.push(BreakableContext { |
208 | may_break: false, | 208 | may_break: false, |
209 | break_ty: Ty::Unknown, | 209 | break_ty: Ty::Unknown, |
210 | label: label.clone(), | 210 | label: label.map(|label| self.body[label].name.clone()), |
211 | }); | 211 | }); |
212 | let pat_ty = | 212 | let pat_ty = |
213 | self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item()); | 213 | self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item()); |
diff --git a/crates/hir_ty/src/infer/pat.rs b/crates/hir_ty/src/infer/pat.rs index b70ec55eb..d974f805b 100644 --- a/crates/hir_ty/src/infer/pat.rs +++ b/crates/hir_ty/src/infer/pat.rs | |||
@@ -243,6 +243,9 @@ impl<'a> InferenceContext<'a> { | |||
243 | } | 243 | } |
244 | None => Ty::Unknown, | 244 | None => Ty::Unknown, |
245 | }, | 245 | }, |
246 | Pat::ConstBlock(expr) => { | ||
247 | self.infer_expr(*expr, &Expectation::has_type(expected.clone())) | ||
248 | } | ||
246 | Pat::Missing => Ty::Unknown, | 249 | Pat::Missing => Ty::Unknown, |
247 | }; | 250 | }; |
248 | // use a new type variable if we got Ty::Unknown here | 251 | // use a new type variable if we got Ty::Unknown here |
@@ -264,8 +267,9 @@ fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool { | |||
264 | | Pat::Range { .. } | 267 | | Pat::Range { .. } |
265 | | Pat::Slice { .. } => true, | 268 | | Pat::Slice { .. } => true, |
266 | Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)), | 269 | Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)), |
267 | // FIXME: Path/Lit might actually evaluate to ref, but inference is unimplemented. | 270 | // FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented. |
268 | Pat::Path(..) => true, | 271 | Pat::Path(..) => true, |
272 | Pat::ConstBlock(..) => true, | ||
269 | Pat::Lit(expr) => match body[*expr] { | 273 | Pat::Lit(expr) => match body[*expr] { |
270 | Expr::Literal(Literal::String(..)) => false, | 274 | Expr::Literal(Literal::String(..)) => false, |
271 | _ => true, | 275 | _ => true, |
diff --git a/crates/hir_ty/src/tests/patterns.rs b/crates/hir_ty/src/tests/patterns.rs index 5a5f48fd0..2053d8f56 100644 --- a/crates/hir_ty/src/tests/patterns.rs +++ b/crates/hir_ty/src/tests/patterns.rs | |||
@@ -774,3 +774,33 @@ fn foo(tuple: Tuple) { | |||
774 | "#]], | 774 | "#]], |
775 | ); | 775 | ); |
776 | } | 776 | } |
777 | |||
778 | #[test] | ||
779 | fn const_block_pattern() { | ||
780 | check_infer( | ||
781 | r#" | ||
782 | struct Foo(usize); | ||
783 | fn foo(foo: Foo) { | ||
784 | match foo { | ||
785 | const { Foo(15 + 32) } => {}, | ||
786 | _ => {} | ||
787 | } | ||
788 | }"#, | ||
789 | expect![[r#" | ||
790 | 26..29 'foo': Foo | ||
791 | 36..115 '{ ... } }': () | ||
792 | 42..113 'match ... }': () | ||
793 | 48..51 'foo': Foo | ||
794 | 62..84 'const ... 32) }': Foo | ||
795 | 68..84 '{ Foo(... 32) }': Foo | ||
796 | 70..73 'Foo': Foo(usize) -> Foo | ||
797 | 70..82 'Foo(15 + 32)': Foo | ||
798 | 74..76 '15': usize | ||
799 | 74..81 '15 + 32': usize | ||
800 | 79..81 '32': usize | ||
801 | 88..90 '{}': () | ||
802 | 100..101 '_': Foo | ||
803 | 105..107 '{}': () | ||
804 | "#]], | ||
805 | ); | ||
806 | } | ||
diff --git a/crates/hir_ty/src/tests/simple.rs b/crates/hir_ty/src/tests/simple.rs index a569223b4..a61282d5a 100644 --- a/crates/hir_ty/src/tests/simple.rs +++ b/crates/hir_ty/src/tests/simple.rs | |||
@@ -1894,6 +1894,7 @@ fn effects_smoke_test() { | |||
1894 | let x = unsafe { 92 }; | 1894 | let x = unsafe { 92 }; |
1895 | let y = async { async { () }.await }; | 1895 | let y = async { async { () }.await }; |
1896 | let z = try { () }; | 1896 | let z = try { () }; |
1897 | let w = const { 92 }; | ||
1897 | let t = 'a: { 92 }; | 1898 | let t = 'a: { 92 }; |
1898 | } | 1899 | } |
1899 | 1900 | ||
@@ -1905,7 +1906,7 @@ fn effects_smoke_test() { | |||
1905 | } | 1906 | } |
1906 | "#, | 1907 | "#, |
1907 | expect![[r#" | 1908 | expect![[r#" |
1908 | 16..136 '{ ...2 }; }': () | 1909 | 16..162 '{ ...2 }; }': () |
1909 | 26..27 'x': i32 | 1910 | 26..27 'x': i32 |
1910 | 30..43 'unsafe { 92 }': i32 | 1911 | 30..43 'unsafe { 92 }': i32 |
1911 | 37..43 '{ 92 }': i32 | 1912 | 37..43 '{ 92 }': i32 |
@@ -1921,9 +1922,13 @@ fn effects_smoke_test() { | |||
1921 | 99..109 'try { () }': {unknown} | 1922 | 99..109 'try { () }': {unknown} |
1922 | 103..109 '{ () }': () | 1923 | 103..109 '{ () }': () |
1923 | 105..107 '()': () | 1924 | 105..107 '()': () |
1924 | 119..120 't': i32 | 1925 | 119..120 'w': i32 |
1925 | 127..133 '{ 92 }': i32 | 1926 | 123..135 'const { 92 }': i32 |
1926 | 129..131 '92': i32 | 1927 | 129..135 '{ 92 }': i32 |
1928 | 131..133 '92': i32 | ||
1929 | 145..146 't': i32 | ||
1930 | 153..159 '{ 92 }': i32 | ||
1931 | 155..157 '92': i32 | ||
1927 | "#]], | 1932 | "#]], |
1928 | ) | 1933 | ) |
1929 | } | 1934 | } |
diff --git a/crates/hir_ty/src/traits/chalk.rs b/crates/hir_ty/src/traits/chalk.rs index 69eae6f79..2196af677 100644 --- a/crates/hir_ty/src/traits/chalk.rs +++ b/crates/hir_ty/src/traits/chalk.rs | |||
@@ -56,8 +56,13 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { | |||
56 | fn adt_datum(&self, struct_id: AdtId) -> Arc<StructDatum> { | 56 | fn adt_datum(&self, struct_id: AdtId) -> Arc<StructDatum> { |
57 | self.db.struct_datum(self.krate, struct_id) | 57 | self.db.struct_datum(self.krate, struct_id) |
58 | } | 58 | } |
59 | fn adt_repr(&self, _struct_id: AdtId) -> rust_ir::AdtRepr { | 59 | fn adt_repr(&self, _struct_id: AdtId) -> Arc<rust_ir::AdtRepr<Interner>> { |
60 | rust_ir::AdtRepr { repr_c: false, repr_packed: false } | 60 | // FIXME: keep track of these |
61 | Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None }) | ||
62 | } | ||
63 | fn discriminant_type(&self, _ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> { | ||
64 | // FIXME: keep track of this | ||
65 | chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(&Interner) | ||
61 | } | 66 | } |
62 | fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> { | 67 | fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> { |
63 | self.db.impl_datum(self.krate, impl_id) | 68 | self.db.impl_datum(self.krate, impl_id) |
@@ -457,6 +462,7 @@ fn well_known_trait_from_lang_attr(name: &str) -> Option<WellKnownTrait> { | |||
457 | "fn" => WellKnownTrait::Fn, | 462 | "fn" => WellKnownTrait::Fn, |
458 | "unsize" => WellKnownTrait::Unsize, | 463 | "unsize" => WellKnownTrait::Unsize, |
459 | "coerce_unsized" => WellKnownTrait::CoerceUnsized, | 464 | "coerce_unsized" => WellKnownTrait::CoerceUnsized, |
465 | "discriminant_kind" => WellKnownTrait::DiscriminantKind, | ||
460 | _ => return None, | 466 | _ => return None, |
461 | }) | 467 | }) |
462 | } | 468 | } |
@@ -473,6 +479,7 @@ fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str { | |||
473 | WellKnownTrait::Unsize => "unsize", | 479 | WellKnownTrait::Unsize => "unsize", |
474 | WellKnownTrait::Unpin => "unpin", | 480 | WellKnownTrait::Unpin => "unpin", |
475 | WellKnownTrait::CoerceUnsized => "coerce_unsized", | 481 | WellKnownTrait::CoerceUnsized => "coerce_unsized", |
482 | WellKnownTrait::DiscriminantKind => "discriminant_kind", | ||
476 | } | 483 | } |
477 | } | 484 | } |
478 | 485 | ||
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs index 3ad30f0c9..b2714cb69 100644 --- a/crates/ide/src/diagnostics.rs +++ b/crates/ide/src/diagnostics.rs | |||
@@ -895,4 +895,17 @@ impl TestStruct { | |||
895 | "#, | 895 | "#, |
896 | ); | 896 | ); |
897 | } | 897 | } |
898 | |||
899 | #[test] | ||
900 | fn test_single_incorrect_case_diagnostic_in_function_name_issue_6970() { | ||
901 | let input = r#"fn FOO<|>() {}"#; | ||
902 | let expected = r#"fn foo() {}"#; | ||
903 | |||
904 | let (analysis, file_position) = fixture::position(input); | ||
905 | let diagnostics = | ||
906 | analysis.diagnostics(&DiagnosticsConfig::default(), file_position.file_id).unwrap(); | ||
907 | assert_eq!(diagnostics.len(), 1); | ||
908 | |||
909 | check_fixes(input, expected); | ||
910 | } | ||
898 | } | 911 | } |
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 65e45c920..912144f8b 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs | |||
@@ -750,6 +750,31 @@ fn test() { | |||
750 | } | 750 | } |
751 | 751 | ||
752 | #[test] | 752 | #[test] |
753 | fn goto_through_included_file() { | ||
754 | check( | ||
755 | r#" | ||
756 | //- /main.rs | ||
757 | #[rustc_builtin_macro] | ||
758 | macro_rules! include {} | ||
759 | |||
760 | include!("foo.rs"); | ||
761 | //^^^^^^^^^^^^^^^^^^^ | ||
762 | |||
763 | fn f() { | ||
764 | foo<|>(); | ||
765 | } | ||
766 | |||
767 | mod confuse_index { | ||
768 | pub fn foo() {} | ||
769 | } | ||
770 | |||
771 | //- /foo.rs | ||
772 | fn foo() {} | ||
773 | "#, | ||
774 | ); | ||
775 | } | ||
776 | |||
777 | #[test] | ||
753 | fn goto_for_type_param() { | 778 | fn goto_for_type_param() { |
754 | check( | 779 | check( |
755 | r#" | 780 | r#" |
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index dbad9a84f..a75cc85b6 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs | |||
@@ -490,8 +490,18 @@ impl Analysis { | |||
490 | .unwrap_or_default()) | 490 | .unwrap_or_default()) |
491 | } | 491 | } |
492 | 492 | ||
493 | /// Computes assists (aka code actions aka intentions) for the given | ||
494 | /// position. Computes enough info to show the lightbulb list in the editor, | ||
495 | /// but doesn't compute actual edits, to improve performance. | ||
496 | /// | ||
497 | /// When the user clicks on the assist, call `resolve_assists` to get the | ||
498 | /// edit. | ||
499 | pub fn assists(&self, config: &AssistConfig, frange: FileRange) -> Cancelable<Vec<Assist>> { | ||
500 | self.with_db(|db| Assist::unresolved(db, config, frange)) | ||
501 | } | ||
502 | |||
493 | /// Computes resolved assists with source changes for the given position. | 503 | /// Computes resolved assists with source changes for the given position. |
494 | pub fn resolved_assists( | 504 | pub fn resolve_assists( |
495 | &self, | 505 | &self, |
496 | config: &AssistConfig, | 506 | config: &AssistConfig, |
497 | frange: FileRange, | 507 | frange: FileRange, |
@@ -499,16 +509,6 @@ impl Analysis { | |||
499 | self.with_db(|db| assists::Assist::resolved(db, config, frange)) | 509 | self.with_db(|db| assists::Assist::resolved(db, config, frange)) |
500 | } | 510 | } |
501 | 511 | ||
502 | /// Computes unresolved assists (aka code actions aka intentions) for the given | ||
503 | /// position. | ||
504 | pub fn unresolved_assists( | ||
505 | &self, | ||
506 | config: &AssistConfig, | ||
507 | frange: FileRange, | ||
508 | ) -> Cancelable<Vec<Assist>> { | ||
509 | self.with_db(|db| Assist::unresolved(db, config, frange)) | ||
510 | } | ||
511 | |||
512 | /// Computes the set of diagnostics for the given file. | 512 | /// Computes the set of diagnostics for the given file. |
513 | pub fn diagnostics( | 513 | pub fn diagnostics( |
514 | &self, | 514 | &self, |
@@ -535,6 +535,14 @@ impl Analysis { | |||
535 | self.with_db(|db| references::rename::prepare_rename(db, position)) | 535 | self.with_db(|db| references::rename::prepare_rename(db, position)) |
536 | } | 536 | } |
537 | 537 | ||
538 | pub fn will_rename_file( | ||
539 | &self, | ||
540 | file_id: FileId, | ||
541 | new_name_stem: &str, | ||
542 | ) -> Cancelable<Option<SourceChange>> { | ||
543 | self.with_db(|db| references::rename::will_rename_file(db, file_id, new_name_stem)) | ||
544 | } | ||
545 | |||
538 | pub fn structural_search_replace( | 546 | pub fn structural_search_replace( |
539 | &self, | 547 | &self, |
540 | query: &str, | 548 | query: &str, |
diff --git a/crates/ide/src/references/rename.rs b/crates/ide/src/references/rename.rs index 4f353852f..854bf194e 100644 --- a/crates/ide/src/references/rename.rs +++ b/crates/ide/src/references/rename.rs | |||
@@ -6,7 +6,7 @@ use std::{ | |||
6 | }; | 6 | }; |
7 | 7 | ||
8 | use hir::{Module, ModuleDef, ModuleSource, Semantics}; | 8 | use hir::{Module, ModuleDef, ModuleSource, Semantics}; |
9 | use ide_db::base_db::{AnchoredPathBuf, FileRange, SourceDatabaseExt}; | 9 | use ide_db::base_db::{AnchoredPathBuf, FileId, FileRange, SourceDatabaseExt}; |
10 | use ide_db::{ | 10 | use ide_db::{ |
11 | defs::{Definition, NameClass, NameRefClass}, | 11 | defs::{Definition, NameClass, NameRefClass}, |
12 | RootDatabase, | 12 | RootDatabase, |
@@ -110,6 +110,23 @@ pub(crate) fn rename_with_semantics( | |||
110 | } | 110 | } |
111 | } | 111 | } |
112 | 112 | ||
113 | pub(crate) fn will_rename_file( | ||
114 | db: &RootDatabase, | ||
115 | file_id: FileId, | ||
116 | new_name_stem: &str, | ||
117 | ) -> Option<SourceChange> { | ||
118 | let sema = Semantics::new(db); | ||
119 | let module = sema.to_module_def(file_id)?; | ||
120 | |||
121 | let decl = module.declaration_source(db)?; | ||
122 | let range = decl.value.name()?.syntax().text_range(); | ||
123 | |||
124 | let position = FilePosition { file_id: decl.file_id.original_file(db), offset: range.start() }; | ||
125 | let mut change = rename_mod(&sema, position, module, new_name_stem).ok()?.info; | ||
126 | change.file_system_edits.clear(); | ||
127 | Some(change) | ||
128 | } | ||
129 | |||
113 | fn find_module_at_offset( | 130 | fn find_module_at_offset( |
114 | sema: &Semantics<RootDatabase>, | 131 | sema: &Semantics<RootDatabase>, |
115 | position: FilePosition, | 132 | position: FilePosition, |
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index e897d5a52..c7a3556a7 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs | |||
@@ -46,6 +46,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = | |||
46 | T![continue], | 46 | T![continue], |
47 | T![async], | 47 | T![async], |
48 | T![try], | 48 | T![try], |
49 | T![const], | ||
49 | T![loop], | 50 | T![loop], |
50 | T![for], | 51 | T![for], |
51 | LIFETIME_IDENT, | 52 | LIFETIME_IDENT, |
@@ -115,6 +116,14 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar | |||
115 | block_expr(p); | 116 | block_expr(p); |
116 | m.complete(p, EFFECT_EXPR) | 117 | m.complete(p, EFFECT_EXPR) |
117 | } | 118 | } |
119 | // test const_block | ||
120 | // fn f() { const { } } | ||
121 | T![const] if la == T!['{'] => { | ||
122 | let m = p.start(); | ||
123 | p.bump(T![const]); | ||
124 | block_expr(p); | ||
125 | m.complete(p, EFFECT_EXPR) | ||
126 | } | ||
118 | T!['{'] => { | 127 | T!['{'] => { |
119 | // test for_range_from | 128 | // test for_range_from |
120 | // fn foo() { | 129 | // fn foo() { |
diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs index 8999829b4..cf4168d32 100644 --- a/crates/parser/src/grammar/items.rs +++ b/crates/parser/src/grammar/items.rs | |||
@@ -96,7 +96,10 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker) -> Result<(), Marker> { | |||
96 | let mut has_mods = false; | 96 | let mut has_mods = false; |
97 | 97 | ||
98 | // modifiers | 98 | // modifiers |
99 | has_mods |= p.eat(T![const]); | 99 | if p.at(T![const]) && p.nth(1) != T!['{'] { |
100 | p.eat(T![const]); | ||
101 | has_mods = true; | ||
102 | } | ||
100 | 103 | ||
101 | // test_err async_without_semicolon | 104 | // test_err async_without_semicolon |
102 | // fn foo() { let _ = async {} } | 105 | // fn foo() { let _ = async {} } |
@@ -167,7 +170,7 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker) -> Result<(), Marker> { | |||
167 | m.complete(p, TRAIT); | 170 | m.complete(p, TRAIT); |
168 | } | 171 | } |
169 | 172 | ||
170 | T![const] => { | 173 | T![const] if p.nth(1) != T!['{'] => { |
171 | consts::konst(p, m); | 174 | consts::konst(p, m); |
172 | } | 175 | } |
173 | 176 | ||
@@ -386,10 +389,15 @@ fn macro_rules(p: &mut Parser, m: Marker) { | |||
386 | } | 389 | } |
387 | 390 | ||
388 | match p.current() { | 391 | match p.current() { |
389 | T!['{'] => { | 392 | // test macro_rules_non_brace |
393 | // macro_rules! m ( ($i:ident) => {} ); | ||
394 | // macro_rules! m [ ($i:ident) => {} ]; | ||
395 | T!['['] | T!['('] => { | ||
390 | token_tree(p); | 396 | token_tree(p); |
397 | p.expect(T![;]); | ||
391 | } | 398 | } |
392 | _ => p.error("expected `{`"), | 399 | T!['{'] => token_tree(p), |
400 | _ => p.error("expected `{`, `[`, `(`"), | ||
393 | } | 401 | } |
394 | m.complete(p, MACRO_RULES); | 402 | m.complete(p, MACRO_RULES); |
395 | } | 403 | } |
diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index 7e7f73dee..b53d5749f 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs | |||
@@ -89,6 +89,7 @@ fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { | |||
89 | let m = match p.nth(0) { | 89 | let m = match p.nth(0) { |
90 | T![box] => box_pat(p), | 90 | T![box] => box_pat(p), |
91 | T![ref] | T![mut] => ident_pat(p, true), | 91 | T![ref] | T![mut] => ident_pat(p, true), |
92 | T![const] => const_block_pat(p), | ||
92 | IDENT => match p.nth(1) { | 93 | IDENT => match p.nth(1) { |
93 | // Checks the token after an IDENT to see if a pattern is a path (Struct { .. }) or macro | 94 | // Checks the token after an IDENT to see if a pattern is a path (Struct { .. }) or macro |
94 | // (T![x]). | 95 | // (T![x]). |
@@ -386,3 +387,16 @@ fn box_pat(p: &mut Parser) -> CompletedMarker { | |||
386 | pattern_single(p); | 387 | pattern_single(p); |
387 | m.complete(p, BOX_PAT) | 388 | m.complete(p, BOX_PAT) |
388 | } | 389 | } |
390 | |||
391 | // test const_block_pat | ||
392 | // fn main() { | ||
393 | // let const { 15 } = (); | ||
394 | // let const { foo(); bar() } = (); | ||
395 | // } | ||
396 | fn const_block_pat(p: &mut Parser) -> CompletedMarker { | ||
397 | assert!(p.at(T![const])); | ||
398 | let m = p.start(); | ||
399 | p.bump(T![const]); | ||
400 | expressions::block_expr(p); | ||
401 | m.complete(p, CONST_BLOCK_PAT) | ||
402 | } | ||
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index 980aa5979..f69e71bdb 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs | |||
@@ -170,6 +170,7 @@ pub enum SyntaxKind { | |||
170 | RANGE_PAT, | 170 | RANGE_PAT, |
171 | LITERAL_PAT, | 171 | LITERAL_PAT, |
172 | MACRO_PAT, | 172 | MACRO_PAT, |
173 | CONST_BLOCK_PAT, | ||
173 | TUPLE_EXPR, | 174 | TUPLE_EXPR, |
174 | ARRAY_EXPR, | 175 | ARRAY_EXPR, |
175 | PAREN_EXPR, | 176 | PAREN_EXPR, |
diff --git a/crates/proc_macro_srv/Cargo.toml b/crates/proc_macro_srv/Cargo.toml index 1bfa6c3fc..df9a55c10 100644 --- a/crates/proc_macro_srv/Cargo.toml +++ b/crates/proc_macro_srv/Cargo.toml | |||
@@ -10,7 +10,7 @@ edition = "2018" | |||
10 | doctest = false | 10 | doctest = false |
11 | 11 | ||
12 | [dependencies] | 12 | [dependencies] |
13 | object = { version = "0.23", default-features = false, features = ["std", "read_core", "elf", "macho", "pe", "unaligned"] } | 13 | object = { version = "0.23", default-features = false, features = ["std", "read_core", "elf", "macho", "pe"] } |
14 | libloading = "0.6.0" | 14 | libloading = "0.6.0" |
15 | memmap = "0.7" | 15 | memmap = "0.7" |
16 | 16 | ||
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 0b4d3f4eb..53e70eaf7 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml | |||
@@ -28,6 +28,7 @@ oorandom = "11.1.2" | |||
28 | rustc-hash = "1.1.0" | 28 | rustc-hash = "1.1.0" |
29 | serde = { version = "1.0.106", features = ["derive"] } | 29 | serde = { version = "1.0.106", features = ["derive"] } |
30 | serde_json = { version = "1.0.48", features = ["preserve_order"] } | 30 | serde_json = { version = "1.0.48", features = ["preserve_order"] } |
31 | serde_path_to_error = "0.1" | ||
31 | threadpool = "1.7.1" | 32 | threadpool = "1.7.1" |
32 | rayon = "1.5" | 33 | rayon = "1.5" |
33 | mimalloc = { version = "0.1.19", default-features = false, optional = true } | 34 | mimalloc = { version = "0.1.19", default-features = false, optional = true } |
diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index de5eb93b5..80e46bf7f 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs | |||
@@ -5,12 +5,14 @@ use ide::CompletionResolveCapability; | |||
5 | use lsp_types::{ | 5 | use lsp_types::{ |
6 | CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions, | 6 | CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions, |
7 | CodeActionProviderCapability, CodeLensOptions, CompletionOptions, | 7 | CodeActionProviderCapability, CodeLensOptions, CompletionOptions, |
8 | DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability, HoverProviderCapability, | 8 | DocumentOnTypeFormattingOptions, FileOperationFilter, FileOperationPattern, |
9 | ImplementationProviderCapability, OneOf, RenameOptions, SaveOptions, | 9 | FileOperationPatternKind, FileOperationRegistrationOptions, FoldingRangeProviderCapability, |
10 | HoverProviderCapability, ImplementationProviderCapability, OneOf, RenameOptions, SaveOptions, | ||
10 | SelectionRangeProviderCapability, SemanticTokensFullOptions, SemanticTokensLegend, | 11 | SelectionRangeProviderCapability, SemanticTokensFullOptions, SemanticTokensLegend, |
11 | SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability, | 12 | SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability, |
12 | TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability, | 13 | TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability, |
13 | WorkDoneProgressOptions, | 14 | WorkDoneProgressOptions, WorkspaceFileOperationsServerCapabilities, |
15 | WorkspaceServerCapabilities, | ||
14 | }; | 16 | }; |
15 | use rustc_hash::FxHashSet; | 17 | use rustc_hash::FxHashSet; |
16 | use serde_json::json; | 18 | use serde_json::json; |
@@ -68,7 +70,26 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti | |||
68 | document_link_provider: None, | 70 | document_link_provider: None, |
69 | color_provider: None, | 71 | color_provider: None, |
70 | execute_command_provider: None, | 72 | execute_command_provider: None, |
71 | workspace: None, | 73 | workspace: Some(WorkspaceServerCapabilities { |
74 | workspace_folders: None, | ||
75 | file_operations: Some(WorkspaceFileOperationsServerCapabilities { | ||
76 | did_create: None, | ||
77 | will_create: None, | ||
78 | did_rename: None, | ||
79 | will_rename: Some(FileOperationRegistrationOptions { | ||
80 | filters: vec![FileOperationFilter { | ||
81 | scheme: Some(String::from("file")), | ||
82 | pattern: FileOperationPattern { | ||
83 | glob: String::from("**/*.rs"), | ||
84 | matches: Some(FileOperationPatternKind::File), | ||
85 | options: None, | ||
86 | }, | ||
87 | }], | ||
88 | }), | ||
89 | did_delete: None, | ||
90 | will_delete: None, | ||
91 | }), | ||
92 | }), | ||
72 | call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)), | 93 | call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)), |
73 | semantic_tokens_provider: Some( | 94 | semantic_tokens_provider: Some( |
74 | SemanticTokensOptions { | 95 | SemanticTokensOptions { |
diff --git a/crates/rust-analyzer/src/cli/progress_report.rs b/crates/rust-analyzer/src/cli/progress_report.rs index bdbe565e6..5a2dc39d5 100644 --- a/crates/rust-analyzer/src/cli/progress_report.rs +++ b/crates/rust-analyzer/src/cli/progress_report.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! A simple progress bar | 1 | //! A simple progress bar |
2 | //! | 2 | //! |
3 | //! A single thread non-optimized progress bar | 3 | //! A single thread non-optimized progress bar |
4 | use std::io::Write; | 4 | use std::io::{self, Write}; |
5 | 5 | ||
6 | /// A Simple ASCII Progress Bar | 6 | /// A Simple ASCII Progress Bar |
7 | pub(crate) struct ProgressReport { | 7 | pub(crate) struct ProgressReport { |
@@ -97,8 +97,8 @@ impl ProgressReport { | |||
97 | } | 97 | } |
98 | } | 98 | } |
99 | 99 | ||
100 | let _ = std::io::stdout().write(output.as_bytes()); | 100 | let _ = io::stdout().write(output.as_bytes()); |
101 | let _ = std::io::stdout().flush(); | 101 | let _ = io::stdout().flush(); |
102 | self.text = text.to_string(); | 102 | self.text = text.to_string(); |
103 | } | 103 | } |
104 | 104 | ||
@@ -115,6 +115,8 @@ impl ProgressReport { | |||
115 | let spaces = " ".repeat(self.text.len()); | 115 | let spaces = " ".repeat(self.text.len()); |
116 | let backspaces = "\x08".repeat(self.text.len()); | 116 | let backspaces = "\x08".repeat(self.text.len()); |
117 | print!("{}{}{}", backspaces, spaces, backspaces); | 117 | print!("{}{}{}", backspaces, spaces, backspaces); |
118 | let _ = io::stdout().flush(); | ||
119 | |||
118 | self.text = String::new(); | 120 | self.text = String::new(); |
119 | } | 121 | } |
120 | } | 122 | } |
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 1f4b5c24c..1db5b4e7d 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs | |||
@@ -33,7 +33,7 @@ config_data! { | |||
33 | callInfo_full: bool = "true", | 33 | callInfo_full: bool = "true", |
34 | 34 | ||
35 | /// Automatically refresh project info via `cargo metadata` on | 35 | /// Automatically refresh project info via `cargo metadata` on |
36 | /// Cargo.toml changes. | 36 | /// `Cargo.toml` changes. |
37 | cargo_autoreload: bool = "true", | 37 | cargo_autoreload: bool = "true", |
38 | /// Activate all available features. | 38 | /// Activate all available features. |
39 | cargo_allFeatures: bool = "false", | 39 | cargo_allFeatures: bool = "false", |
@@ -52,7 +52,7 @@ config_data! { | |||
52 | /// Run specified `cargo check` command for diagnostics on save. | 52 | /// Run specified `cargo check` command for diagnostics on save. |
53 | checkOnSave_enable: bool = "true", | 53 | checkOnSave_enable: bool = "true", |
54 | /// Check with all features (will be passed as `--all-features`). | 54 | /// Check with all features (will be passed as `--all-features`). |
55 | /// Defaults to `rust-analyzer.cargo.allFeatures`. | 55 | /// Defaults to `#rust-analyzer.cargo.allFeatures#`. |
56 | checkOnSave_allFeatures: Option<bool> = "null", | 56 | checkOnSave_allFeatures: Option<bool> = "null", |
57 | /// Check all targets and tests (will be passed as `--all-targets`). | 57 | /// Check all targets and tests (will be passed as `--all-targets`). |
58 | checkOnSave_allTargets: bool = "true", | 58 | checkOnSave_allTargets: bool = "true", |
@@ -61,12 +61,12 @@ config_data! { | |||
61 | /// Do not activate the `default` feature. | 61 | /// Do not activate the `default` feature. |
62 | checkOnSave_noDefaultFeatures: Option<bool> = "null", | 62 | checkOnSave_noDefaultFeatures: Option<bool> = "null", |
63 | /// Check for a specific target. Defaults to | 63 | /// Check for a specific target. Defaults to |
64 | /// `rust-analyzer.cargo.target`. | 64 | /// `#rust-analyzer.cargo.target#`. |
65 | checkOnSave_target: Option<String> = "null", | 65 | checkOnSave_target: Option<String> = "null", |
66 | /// Extra arguments for `cargo check`. | 66 | /// Extra arguments for `cargo check`. |
67 | checkOnSave_extraArgs: Vec<String> = "[]", | 67 | checkOnSave_extraArgs: Vec<String> = "[]", |
68 | /// List of features to activate. Defaults to | 68 | /// List of features to activate. Defaults to |
69 | /// `rust-analyzer.cargo.features`. | 69 | /// `#rust-analyzer.cargo.features#`. |
70 | checkOnSave_features: Option<Vec<String>> = "null", | 70 | checkOnSave_features: Option<Vec<String>> = "null", |
71 | /// Advanced option, fully override the command rust-analyzer uses for | 71 | /// Advanced option, fully override the command rust-analyzer uses for |
72 | /// checking. The command should include `--message-format=json` or | 72 | /// checking. The command should include `--message-format=json` or |
@@ -80,7 +80,7 @@ config_data! { | |||
80 | /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc. | 80 | /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc. |
81 | completion_postfix_enable: bool = "true", | 81 | completion_postfix_enable: bool = "true", |
82 | /// Toggles the additional completions that automatically add imports when completed. | 82 | /// Toggles the additional completions that automatically add imports when completed. |
83 | /// Note that your client have to specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. | 83 | /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. |
84 | completion_autoimport_enable: bool = "true", | 84 | completion_autoimport_enable: bool = "true", |
85 | 85 | ||
86 | /// Whether to show native rust-analyzer diagnostics. | 86 | /// Whether to show native rust-analyzer diagnostics. |
@@ -90,13 +90,13 @@ config_data! { | |||
90 | diagnostics_enableExperimental: bool = "true", | 90 | diagnostics_enableExperimental: bool = "true", |
91 | /// List of rust-analyzer diagnostics to disable. | 91 | /// List of rust-analyzer diagnostics to disable. |
92 | diagnostics_disabled: FxHashSet<String> = "[]", | 92 | diagnostics_disabled: FxHashSet<String> = "[]", |
93 | /// List of warnings that should be displayed with info severity.\nThe | 93 | /// List of warnings that should be displayed with info severity.\n\nThe |
94 | /// warnings will be indicated by a blue squiggly underline in code and | 94 | /// warnings will be indicated by a blue squiggly underline in code and |
95 | /// a blue icon in the problems panel. | 95 | /// a blue icon in the `Problems Panel`. |
96 | diagnostics_warningsAsHint: Vec<String> = "[]", | 96 | diagnostics_warningsAsHint: Vec<String> = "[]", |
97 | /// List of warnings that should be displayed with hint severity.\nThe | 97 | /// List of warnings that should be displayed with hint severity.\n\nThe |
98 | /// warnings will be indicated by faded text or three dots in code and | 98 | /// warnings will be indicated by faded text or three dots in code and |
99 | /// will not show up in the problems panel. | 99 | /// will not show up in the `Problems Panel`. |
100 | diagnostics_warningsAsInfo: Vec<String> = "[]", | 100 | diagnostics_warningsAsInfo: Vec<String> = "[]", |
101 | 101 | ||
102 | /// Controls file watching implementation. | 102 | /// Controls file watching implementation. |
@@ -121,7 +121,7 @@ config_data! { | |||
121 | 121 | ||
122 | /// Whether to show inlay type hints for method chains. | 122 | /// Whether to show inlay type hints for method chains. |
123 | inlayHints_chainingHints: bool = "true", | 123 | inlayHints_chainingHints: bool = "true", |
124 | /// Maximum length for inlay hints. | 124 | /// Maximum length for inlay hints. Default is unlimited. |
125 | inlayHints_maxLength: Option<usize> = "null", | 125 | inlayHints_maxLength: Option<usize> = "null", |
126 | /// Whether to show function parameter name inlay hints at the call | 126 | /// Whether to show function parameter name inlay hints at the call |
127 | /// site. | 127 | /// site. |
@@ -145,27 +145,27 @@ config_data! { | |||
145 | lens_methodReferences: bool = "false", | 145 | lens_methodReferences: bool = "false", |
146 | 146 | ||
147 | /// Disable project auto-discovery in favor of explicitly specified set | 147 | /// Disable project auto-discovery in favor of explicitly specified set |
148 | /// of projects. \nElements must be paths pointing to Cargo.toml, | 148 | /// of projects.\n\nElements must be paths pointing to `Cargo.toml`, |
149 | /// rust-project.json, or JSON objects in rust-project.json format. | 149 | /// `rust-project.json`, or JSON objects in `rust-project.json` format. |
150 | linkedProjects: Vec<ManifestOrProjectJson> = "[]", | 150 | linkedProjects: Vec<ManifestOrProjectJson> = "[]", |
151 | /// Number of syntax trees rust-analyzer keeps in memory. | 151 | /// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128. |
152 | lruCapacity: Option<usize> = "null", | 152 | lruCapacity: Option<usize> = "null", |
153 | /// Whether to show `can't find Cargo.toml` error message. | 153 | /// Whether to show `can't find Cargo.toml` error message. |
154 | notifications_cargoTomlNotFound: bool = "true", | 154 | notifications_cargoTomlNotFound: bool = "true", |
155 | /// Enable Proc macro support, cargo.loadOutDirsFromCheck must be | 155 | /// Enable Proc macro support, `#rust-analyzer.cargo.loadOutDirsFromCheck#` must be |
156 | /// enabled. | 156 | /// enabled. |
157 | procMacro_enable: bool = "false", | 157 | procMacro_enable: bool = "false", |
158 | 158 | ||
159 | /// Command to be executed instead of 'cargo' for runnables. | 159 | /// Command to be executed instead of 'cargo' for runnables. |
160 | runnables_overrideCargo: Option<String> = "null", | 160 | runnables_overrideCargo: Option<String> = "null", |
161 | /// Additional arguments to be passed to cargo for runnables such as | 161 | /// Additional arguments to be passed to cargo for runnables such as |
162 | /// tests or binaries.\nFor example, it may be '--release'. | 162 | /// tests or binaries.\nFor example, it may be `--release`. |
163 | runnables_cargoExtraArgs: Vec<String> = "[]", | 163 | runnables_cargoExtraArgs: Vec<String> = "[]", |
164 | 164 | ||
165 | /// Path to the rust compiler sources, for usage in rustc_private projects. | 165 | /// Path to the rust compiler sources, for usage in rustc_private projects. |
166 | rustcSource : Option<String> = "null", | 166 | rustcSource : Option<String> = "null", |
167 | 167 | ||
168 | /// Additional arguments to rustfmt. | 168 | /// Additional arguments to `rustfmt`. |
169 | rustfmt_extraArgs: Vec<String> = "[]", | 169 | rustfmt_extraArgs: Vec<String> = "[]", |
170 | /// Advanced option, fully override the command rust-analyzer uses for | 170 | /// Advanced option, fully override the command rust-analyzer uses for |
171 | /// formatting. | 171 | /// formatting. |
@@ -349,12 +349,12 @@ impl Config { | |||
349 | res | 349 | res |
350 | } | 350 | } |
351 | pub fn update(&mut self, json: serde_json::Value) { | 351 | pub fn update(&mut self, json: serde_json::Value) { |
352 | log::info!("Config::update({:#})", json); | 352 | log::info!("updating config from JSON: {:#}", json); |
353 | if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) { | 353 | if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) { |
354 | return; | 354 | return; |
355 | } | 355 | } |
356 | self.do_update(json); | 356 | self.do_update(json); |
357 | log::info!("Config::update() = {:#?}", self); | 357 | log::info!("updated config: {:#?}", self); |
358 | } | 358 | } |
359 | fn do_update(&mut self, json: serde_json::Value) { | 359 | fn do_update(&mut self, json: serde_json::Value) { |
360 | let data = ConfigData::from_json(json); | 360 | let data = ConfigData::from_json(json); |
@@ -758,7 +758,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json | |||
758 | ], | 758 | ], |
759 | "enumDescriptions": [ | 759 | "enumDescriptions": [ |
760 | "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.", | 760 | "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.", |
761 | "Prefix all import paths with `self` if they don't begin with `self`, `super`, `crate` or a crate name", | 761 | "Prefix all import paths with `self` if they don't begin with `self`, `super`, `crate` or a crate name.", |
762 | "Force import paths to be absolute by always starting them with `crate` or the crate name they refer to." | 762 | "Force import paths to be absolute by always starting them with `crate` or the crate name they refer to." |
763 | ], | 763 | ], |
764 | }, | 764 | }, |
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 66f8bee99..1207b31c4 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs | |||
@@ -5,11 +5,13 @@ | |||
5 | use std::{ | 5 | use std::{ |
6 | io::Write as _, | 6 | io::Write as _, |
7 | process::{self, Stdio}, | 7 | process::{self, Stdio}, |
8 | sync::Arc, | ||
8 | }; | 9 | }; |
9 | 10 | ||
10 | use ide::{ | 11 | use ide::{ |
11 | CompletionResolveCapability, FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, | 12 | AssistConfig, CompletionResolveCapability, FileId, FilePosition, FileRange, HoverAction, |
12 | NavigationTarget, Query, RangeInfo, Runnable, RunnableKind, SearchScope, SymbolKind, TextEdit, | 13 | HoverGotoTypeData, LineIndex, NavigationTarget, Query, RangeInfo, Runnable, RunnableKind, |
14 | SearchScope, SourceChange, SymbolKind, TextEdit, | ||
13 | }; | 15 | }; |
14 | use itertools::Itertools; | 16 | use itertools::Itertools; |
15 | use lsp_server::ErrorCode; | 17 | use lsp_server::ErrorCode; |
@@ -400,6 +402,45 @@ pub(crate) fn handle_workspace_symbol( | |||
400 | } | 402 | } |
401 | } | 403 | } |
402 | 404 | ||
405 | pub(crate) fn handle_will_rename_files( | ||
406 | snap: GlobalStateSnapshot, | ||
407 | params: lsp_types::RenameFilesParams, | ||
408 | ) -> Result<Option<lsp_types::WorkspaceEdit>> { | ||
409 | let _p = profile::span("handle_will_rename_files"); | ||
410 | |||
411 | let source_changes: Vec<SourceChange> = params | ||
412 | .files | ||
413 | .into_iter() | ||
414 | .filter_map(|file_rename| { | ||
415 | let from = Url::parse(&file_rename.old_uri).ok()?; | ||
416 | let to = Url::parse(&file_rename.new_uri).ok()?; | ||
417 | |||
418 | let from_path = from.to_file_path().ok()?; | ||
419 | let to_path = to.to_file_path().ok()?; | ||
420 | |||
421 | // Limit to single-level moves for now. | ||
422 | match (from_path.parent(), to_path.parent()) { | ||
423 | (Some(p1), Some(p2)) if p1 == p2 => { | ||
424 | let new_name = to_path.file_stem()?; | ||
425 | let new_name = new_name.to_str()?; | ||
426 | Some((snap.url_to_file_id(&from).ok()?, new_name.to_string())) | ||
427 | } | ||
428 | _ => None, | ||
429 | } | ||
430 | }) | ||
431 | .filter_map(|(file_id, new_name)| { | ||
432 | snap.analysis.will_rename_file(file_id, &new_name).ok()? | ||
433 | }) | ||
434 | .collect(); | ||
435 | |||
436 | // Drop file system edits since we're just renaming things on the same level | ||
437 | let edits = source_changes.into_iter().map(|it| it.source_file_edits).flatten().collect(); | ||
438 | let source_change = SourceChange::from_edits(edits, Vec::new()); | ||
439 | |||
440 | let workspace_edit = to_proto::workspace_edit(&snap, source_change)?; | ||
441 | Ok(Some(workspace_edit)) | ||
442 | } | ||
443 | |||
403 | pub(crate) fn handle_goto_definition( | 444 | pub(crate) fn handle_goto_definition( |
404 | snap: GlobalStateSnapshot, | 445 | snap: GlobalStateSnapshot, |
405 | params: lsp_types::GotoDefinitionParams, | 446 | params: lsp_types::GotoDefinitionParams, |
@@ -865,58 +906,8 @@ pub(crate) fn handle_formatting( | |||
865 | } | 906 | } |
866 | } | 907 | } |
867 | 908 | ||
868 | fn handle_fixes( | ||
869 | snap: &GlobalStateSnapshot, | ||
870 | params: &lsp_types::CodeActionParams, | ||
871 | res: &mut Vec<lsp_ext::CodeAction>, | ||
872 | ) -> Result<()> { | ||
873 | let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; | ||
874 | let line_index = snap.analysis.file_line_index(file_id)?; | ||
875 | let range = from_proto::text_range(&line_index, params.range); | ||
876 | |||
877 | match ¶ms.context.only { | ||
878 | Some(v) => { | ||
879 | if !v.iter().any(|it| { | ||
880 | it == &lsp_types::CodeActionKind::EMPTY | ||
881 | || it == &lsp_types::CodeActionKind::QUICKFIX | ||
882 | }) { | ||
883 | return Ok(()); | ||
884 | } | ||
885 | } | ||
886 | None => {} | ||
887 | }; | ||
888 | |||
889 | let diagnostics = snap.analysis.diagnostics(&snap.config.diagnostics, file_id)?; | ||
890 | |||
891 | for fix in diagnostics | ||
892 | .into_iter() | ||
893 | .filter_map(|d| d.fix) | ||
894 | .filter(|fix| fix.fix_trigger_range.intersect(range).is_some()) | ||
895 | { | ||
896 | let edit = to_proto::snippet_workspace_edit(&snap, fix.source_change)?; | ||
897 | let action = lsp_ext::CodeAction { | ||
898 | title: fix.label.to_string(), | ||
899 | group: None, | ||
900 | kind: Some(CodeActionKind::QUICKFIX), | ||
901 | edit: Some(edit), | ||
902 | is_preferred: Some(false), | ||
903 | data: None, | ||
904 | }; | ||
905 | res.push(action); | ||
906 | } | ||
907 | |||
908 | for fix in snap.check_fixes.get(&file_id).into_iter().flatten() { | ||
909 | let fix_range = from_proto::text_range(&line_index, fix.range); | ||
910 | if fix_range.intersect(range).is_none() { | ||
911 | continue; | ||
912 | } | ||
913 | res.push(fix.action.clone()); | ||
914 | } | ||
915 | Ok(()) | ||
916 | } | ||
917 | |||
918 | pub(crate) fn handle_code_action( | 909 | pub(crate) fn handle_code_action( |
919 | mut snap: GlobalStateSnapshot, | 910 | snap: GlobalStateSnapshot, |
920 | params: lsp_types::CodeActionParams, | 911 | params: lsp_types::CodeActionParams, |
921 | ) -> Result<Option<Vec<lsp_ext::CodeAction>>> { | 912 | ) -> Result<Option<Vec<lsp_ext::CodeAction>>> { |
922 | let _p = profile::span("handle_code_action"); | 913 | let _p = profile::span("handle_code_action"); |
@@ -932,24 +923,35 @@ pub(crate) fn handle_code_action( | |||
932 | let range = from_proto::text_range(&line_index, params.range); | 923 | let range = from_proto::text_range(&line_index, params.range); |
933 | let frange = FileRange { file_id, range }; | 924 | let frange = FileRange { file_id, range }; |
934 | 925 | ||
935 | snap.config.assist.allowed = params | 926 | let assists_config = AssistConfig { |
936 | .clone() | 927 | allowed: params |
937 | .context | 928 | .clone() |
938 | .only | 929 | .context |
939 | .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()); | 930 | .only |
931 | .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()), | ||
932 | ..snap.config.assist | ||
933 | }; | ||
940 | 934 | ||
941 | let mut res: Vec<lsp_ext::CodeAction> = Vec::new(); | 935 | let mut res: Vec<lsp_ext::CodeAction> = Vec::new(); |
942 | 936 | ||
943 | handle_fixes(&snap, ¶ms, &mut res)?; | 937 | let include_quick_fixes = match ¶ms.context.only { |
938 | Some(v) => v.iter().any(|it| { | ||
939 | it == &lsp_types::CodeActionKind::EMPTY || it == &lsp_types::CodeActionKind::QUICKFIX | ||
940 | }), | ||
941 | None => true, | ||
942 | }; | ||
943 | if include_quick_fixes { | ||
944 | add_quick_fixes(&snap, frange, &line_index, &mut res)?; | ||
945 | } | ||
944 | 946 | ||
945 | if snap.config.client_caps.code_action_resolve { | 947 | if snap.config.client_caps.code_action_resolve { |
946 | for (index, assist) in | 948 | for (index, assist) in |
947 | snap.analysis.unresolved_assists(&snap.config.assist, frange)?.into_iter().enumerate() | 949 | snap.analysis.assists(&assists_config, frange)?.into_iter().enumerate() |
948 | { | 950 | { |
949 | res.push(to_proto::unresolved_code_action(&snap, params.clone(), assist, index)?); | 951 | res.push(to_proto::unresolved_code_action(&snap, params.clone(), assist, index)?); |
950 | } | 952 | } |
951 | } else { | 953 | } else { |
952 | for assist in snap.analysis.resolved_assists(&snap.config.assist, frange)?.into_iter() { | 954 | for assist in snap.analysis.resolve_assists(&assists_config, frange)?.into_iter() { |
953 | res.push(to_proto::resolved_code_action(&snap, assist)?); | 955 | res.push(to_proto::resolved_code_action(&snap, assist)?); |
954 | } | 956 | } |
955 | } | 957 | } |
@@ -957,6 +959,40 @@ pub(crate) fn handle_code_action( | |||
957 | Ok(Some(res)) | 959 | Ok(Some(res)) |
958 | } | 960 | } |
959 | 961 | ||
962 | fn add_quick_fixes( | ||
963 | snap: &GlobalStateSnapshot, | ||
964 | frange: FileRange, | ||
965 | line_index: &Arc<LineIndex>, | ||
966 | acc: &mut Vec<lsp_ext::CodeAction>, | ||
967 | ) -> Result<()> { | ||
968 | let diagnostics = snap.analysis.diagnostics(&snap.config.diagnostics, frange.file_id)?; | ||
969 | |||
970 | for fix in diagnostics | ||
971 | .into_iter() | ||
972 | .filter_map(|d| d.fix) | ||
973 | .filter(|fix| fix.fix_trigger_range.intersect(frange.range).is_some()) | ||
974 | { | ||
975 | let edit = to_proto::snippet_workspace_edit(&snap, fix.source_change)?; | ||
976 | let action = lsp_ext::CodeAction { | ||
977 | title: fix.label.to_string(), | ||
978 | group: None, | ||
979 | kind: Some(CodeActionKind::QUICKFIX), | ||
980 | edit: Some(edit), | ||
981 | is_preferred: Some(false), | ||
982 | data: None, | ||
983 | }; | ||
984 | acc.push(action); | ||
985 | } | ||
986 | |||
987 | for fix in snap.check_fixes.get(&frange.file_id).into_iter().flatten() { | ||
988 | let fix_range = from_proto::text_range(&line_index, fix.range); | ||
989 | if fix_range.intersect(frange.range).is_some() { | ||
990 | acc.push(fix.action.clone()); | ||
991 | } | ||
992 | } | ||
993 | Ok(()) | ||
994 | } | ||
995 | |||
960 | pub(crate) fn handle_code_action_resolve( | 996 | pub(crate) fn handle_code_action_resolve( |
961 | mut snap: GlobalStateSnapshot, | 997 | mut snap: GlobalStateSnapshot, |
962 | mut code_action: lsp_ext::CodeAction, | 998 | mut code_action: lsp_ext::CodeAction, |
@@ -978,7 +1014,7 @@ pub(crate) fn handle_code_action_resolve( | |||
978 | .only | 1014 | .only |
979 | .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()); | 1015 | .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()); |
980 | 1016 | ||
981 | let assists = snap.analysis.resolved_assists(&snap.config.assist, frange)?; | 1017 | let assists = snap.analysis.resolve_assists(&snap.config.assist, frange)?; |
982 | let (id, index) = split_once(¶ms.id, ':').unwrap(); | 1018 | let (id, index) = split_once(¶ms.id, ':').unwrap(); |
983 | let index = index.parse::<usize>().unwrap(); | 1019 | let index = index.parse::<usize>().unwrap(); |
984 | let assist = &assists[index]; | 1020 | let assist = &assists[index]; |
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index 79fe30e53..d538ad69a 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs | |||
@@ -46,7 +46,7 @@ pub type Error = Box<dyn std::error::Error + Send + Sync>; | |||
46 | pub type Result<T, E = Error> = std::result::Result<T, E>; | 46 | pub type Result<T, E = Error> = std::result::Result<T, E>; |
47 | 47 | ||
48 | pub fn from_json<T: DeserializeOwned>(what: &'static str, json: serde_json::Value) -> Result<T> { | 48 | pub fn from_json<T: DeserializeOwned>(what: &'static str, json: serde_json::Value) -> Result<T> { |
49 | let res = T::deserialize(&json) | 49 | let res = serde_path_to_error::deserialize(&json) |
50 | .map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?; | 50 | .map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?; |
51 | Ok(res) | 51 | Ok(res) |
52 | } | 52 | } |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index ec3d5e060..5d55dc96e 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -485,6 +485,7 @@ impl GlobalState { | |||
485 | .on::<lsp_types::request::SemanticTokensRangeRequest>( | 485 | .on::<lsp_types::request::SemanticTokensRangeRequest>( |
486 | handlers::handle_semantic_tokens_range, | 486 | handlers::handle_semantic_tokens_range, |
487 | ) | 487 | ) |
488 | .on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files) | ||
488 | .on::<lsp_ext::Ssr>(handlers::handle_ssr) | 489 | .on::<lsp_ext::Ssr>(handlers::handle_ssr) |
489 | .finish(); | 490 | .finish(); |
490 | Ok(()) | 491 | Ok(()) |
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index c6a6f11e1..21015591c 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml | |||
@@ -13,7 +13,7 @@ doctest = false | |||
13 | [dependencies] | 13 | [dependencies] |
14 | itertools = "0.9.0" | 14 | itertools = "0.9.0" |
15 | rowan = "0.10.0" | 15 | rowan = "0.10.0" |
16 | rustc_lexer = { version = "691.0.0", package = "rustc-ap-rustc_lexer" } | 16 | rustc_lexer = { version = "695.0.0", package = "rustc-ap-rustc_lexer" } |
17 | rustc-hash = "1.1.0" | 17 | rustc-hash = "1.1.0" |
18 | arrayvec = "0.5.1" | 18 | arrayvec = "0.5.1" |
19 | once_cell = "1.3.1" | 19 | once_cell = "1.3.1" |
diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs index e4a9b945c..636ce166d 100644 --- a/crates/syntax/src/ast/expr_ext.rs +++ b/crates/syntax/src/ast/expr_ext.rs | |||
@@ -358,6 +358,7 @@ pub enum Effect { | |||
358 | Async(SyntaxToken), | 358 | Async(SyntaxToken), |
359 | Unsafe(SyntaxToken), | 359 | Unsafe(SyntaxToken), |
360 | Try(SyntaxToken), | 360 | Try(SyntaxToken), |
361 | Const(SyntaxToken), | ||
361 | // Very much not an effect, but we stuff it into this node anyway | 362 | // Very much not an effect, but we stuff it into this node anyway |
362 | Label(ast::Label), | 363 | Label(ast::Label), |
363 | } | 364 | } |
@@ -373,6 +374,9 @@ impl ast::EffectExpr { | |||
373 | if let Some(token) = self.try_token() { | 374 | if let Some(token) = self.try_token() { |
374 | return Effect::Try(token); | 375 | return Effect::Try(token); |
375 | } | 376 | } |
377 | if let Some(token) = self.const_token() { | ||
378 | return Effect::Const(token); | ||
379 | } | ||
376 | if let Some(label) = self.label() { | 380 | if let Some(label) = self.label() { |
377 | return Effect::Label(label); | 381 | return Effect::Label(label); |
378 | } | 382 | } |
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 1588ba93e..c5b80bffe 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs | |||
@@ -763,6 +763,7 @@ impl EffectExpr { | |||
763 | pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) } | 763 | pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) } |
764 | pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } | 764 | pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } |
765 | pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) } | 765 | pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) } |
766 | pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } | ||
766 | pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) } | 767 | pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) } |
767 | } | 768 | } |
768 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 769 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
@@ -1251,6 +1252,14 @@ impl TupleStructPat { | |||
1251 | pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } | 1252 | pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } |
1252 | } | 1253 | } |
1253 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 1254 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1255 | pub struct ConstBlockPat { | ||
1256 | pub(crate) syntax: SyntaxNode, | ||
1257 | } | ||
1258 | impl ConstBlockPat { | ||
1259 | pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } | ||
1260 | pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) } | ||
1261 | } | ||
1262 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
1254 | pub struct RecordPatFieldList { | 1263 | pub struct RecordPatFieldList { |
1255 | pub(crate) syntax: SyntaxNode, | 1264 | pub(crate) syntax: SyntaxNode, |
1256 | } | 1265 | } |
@@ -1369,6 +1378,7 @@ pub enum Pat { | |||
1369 | SlicePat(SlicePat), | 1378 | SlicePat(SlicePat), |
1370 | TuplePat(TuplePat), | 1379 | TuplePat(TuplePat), |
1371 | TupleStructPat(TupleStructPat), | 1380 | TupleStructPat(TupleStructPat), |
1381 | ConstBlockPat(ConstBlockPat), | ||
1372 | } | 1382 | } |
1373 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 1383 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1374 | pub enum FieldList { | 1384 | pub enum FieldList { |
@@ -2772,6 +2782,17 @@ impl AstNode for TupleStructPat { | |||
2772 | } | 2782 | } |
2773 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 2783 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
2774 | } | 2784 | } |
2785 | impl AstNode for ConstBlockPat { | ||
2786 | fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_BLOCK_PAT } | ||
2787 | fn cast(syntax: SyntaxNode) -> Option<Self> { | ||
2788 | if Self::can_cast(syntax.kind()) { | ||
2789 | Some(Self { syntax }) | ||
2790 | } else { | ||
2791 | None | ||
2792 | } | ||
2793 | } | ||
2794 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
2795 | } | ||
2775 | impl AstNode for RecordPatFieldList { | 2796 | impl AstNode for RecordPatFieldList { |
2776 | fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD_LIST } | 2797 | fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD_LIST } |
2777 | fn cast(syntax: SyntaxNode) -> Option<Self> { | 2798 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
@@ -3242,12 +3263,15 @@ impl From<TuplePat> for Pat { | |||
3242 | impl From<TupleStructPat> for Pat { | 3263 | impl From<TupleStructPat> for Pat { |
3243 | fn from(node: TupleStructPat) -> Pat { Pat::TupleStructPat(node) } | 3264 | fn from(node: TupleStructPat) -> Pat { Pat::TupleStructPat(node) } |
3244 | } | 3265 | } |
3266 | impl From<ConstBlockPat> for Pat { | ||
3267 | fn from(node: ConstBlockPat) -> Pat { Pat::ConstBlockPat(node) } | ||
3268 | } | ||
3245 | impl AstNode for Pat { | 3269 | impl AstNode for Pat { |
3246 | fn can_cast(kind: SyntaxKind) -> bool { | 3270 | fn can_cast(kind: SyntaxKind) -> bool { |
3247 | match kind { | 3271 | match kind { |
3248 | IDENT_PAT | BOX_PAT | REST_PAT | LITERAL_PAT | MACRO_PAT | OR_PAT | PAREN_PAT | 3272 | IDENT_PAT | BOX_PAT | REST_PAT | LITERAL_PAT | MACRO_PAT | OR_PAT | PAREN_PAT |
3249 | | PATH_PAT | WILDCARD_PAT | RANGE_PAT | RECORD_PAT | REF_PAT | SLICE_PAT | 3273 | | PATH_PAT | WILDCARD_PAT | RANGE_PAT | RECORD_PAT | REF_PAT | SLICE_PAT |
3250 | | TUPLE_PAT | TUPLE_STRUCT_PAT => true, | 3274 | | TUPLE_PAT | TUPLE_STRUCT_PAT | CONST_BLOCK_PAT => true, |
3251 | _ => false, | 3275 | _ => false, |
3252 | } | 3276 | } |
3253 | } | 3277 | } |
@@ -3268,6 +3292,7 @@ impl AstNode for Pat { | |||
3268 | SLICE_PAT => Pat::SlicePat(SlicePat { syntax }), | 3292 | SLICE_PAT => Pat::SlicePat(SlicePat { syntax }), |
3269 | TUPLE_PAT => Pat::TuplePat(TuplePat { syntax }), | 3293 | TUPLE_PAT => Pat::TuplePat(TuplePat { syntax }), |
3270 | TUPLE_STRUCT_PAT => Pat::TupleStructPat(TupleStructPat { syntax }), | 3294 | TUPLE_STRUCT_PAT => Pat::TupleStructPat(TupleStructPat { syntax }), |
3295 | CONST_BLOCK_PAT => Pat::ConstBlockPat(ConstBlockPat { syntax }), | ||
3271 | _ => return None, | 3296 | _ => return None, |
3272 | }; | 3297 | }; |
3273 | Some(res) | 3298 | Some(res) |
@@ -3289,6 +3314,7 @@ impl AstNode for Pat { | |||
3289 | Pat::SlicePat(it) => &it.syntax, | 3314 | Pat::SlicePat(it) => &it.syntax, |
3290 | Pat::TuplePat(it) => &it.syntax, | 3315 | Pat::TuplePat(it) => &it.syntax, |
3291 | Pat::TupleStructPat(it) => &it.syntax, | 3316 | Pat::TupleStructPat(it) => &it.syntax, |
3317 | Pat::ConstBlockPat(it) => &it.syntax, | ||
3292 | } | 3318 | } |
3293 | } | 3319 | } |
3294 | } | 3320 | } |
@@ -4137,6 +4163,11 @@ impl std::fmt::Display for TupleStructPat { | |||
4137 | std::fmt::Display::fmt(self.syntax(), f) | 4163 | std::fmt::Display::fmt(self.syntax(), f) |
4138 | } | 4164 | } |
4139 | } | 4165 | } |
4166 | impl std::fmt::Display for ConstBlockPat { | ||
4167 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
4168 | std::fmt::Display::fmt(self.syntax(), f) | ||
4169 | } | ||
4170 | } | ||
4140 | impl std::fmt::Display for RecordPatFieldList { | 4171 | impl std::fmt::Display for RecordPatFieldList { |
4141 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | 4172 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { |
4142 | std::fmt::Display::fmt(self.syntax(), f) | 4173 | std::fmt::Display::fmt(self.syntax(), f) |
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index ba7e5d2fb..cafa4c198 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs | |||
@@ -4,6 +4,11 @@ | |||
4 | //! Note that all functions here intended to be stupid constructors, which just | 4 | //! Note that all functions here intended to be stupid constructors, which just |
5 | //! assemble a finish node from immediate children. If you want to do something | 5 | //! assemble a finish node from immediate children. If you want to do something |
6 | //! smarter than that, it probably doesn't belong in this module. | 6 | //! smarter than that, it probably doesn't belong in this module. |
7 | //! | ||
8 | //! Keep in mind that `from_text` functions should be kept private. The public | ||
9 | //! API should require to assemble every node piecewise. The trick of | ||
10 | //! `parse(format!())` we use internally is an implementation detail -- long | ||
11 | //! term, it will be replaced with direct tree manipulation. | ||
7 | use itertools::Itertools; | 12 | use itertools::Itertools; |
8 | use stdx::format_to; | 13 | use stdx::format_to; |
9 | 14 | ||
@@ -16,7 +21,8 @@ pub fn name(text: &str) -> ast::Name { | |||
16 | pub fn name_ref(text: &str) -> ast::NameRef { | 21 | pub fn name_ref(text: &str) -> ast::NameRef { |
17 | ast_from_text(&format!("fn f() {{ {}; }}", text)) | 22 | ast_from_text(&format!("fn f() {{ {}; }}", text)) |
18 | } | 23 | } |
19 | 24 | // FIXME: replace stringly-typed constructor with a family of typed ctors, a-la | |
25 | // `expr_xxx`. | ||
20 | pub fn ty(text: &str) -> ast::Type { | 26 | pub fn ty(text: &str) -> ast::Type { |
21 | ast_from_text(&format!("impl {} for D {{}};", text)) | 27 | ast_from_text(&format!("impl {} for D {{}};", text)) |
22 | } | 28 | } |
diff --git a/crates/syntax/test_data/parser/inline/ok/0156_const_block_pat.rast b/crates/syntax/test_data/parser/inline/ok/0156_const_block_pat.rast new file mode 100644 index 000000000..8ff4822c4 --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0156_const_block_pat.rast | |||
@@ -0,0 +1,76 @@ | |||
1 | [email protected] | ||
2 | [email protected] | ||
3 | [email protected] "fn" | ||
4 | [email protected] " " | ||
5 | [email protected] | ||
6 | [email protected] "main" | ||
7 | [email protected] | ||
8 | [email protected] "(" | ||
9 | [email protected] ")" | ||
10 | [email protected] " " | ||
11 | [email protected] | ||
12 | [email protected] "{" | ||
13 | [email protected] "\n " | ||
14 | [email protected] | ||
15 | [email protected] "let" | ||
16 | [email protected] " " | ||
17 | [email protected] | ||
18 | [email protected] "const" | ||
19 | [email protected] " " | ||
20 | [email protected] | ||
21 | [email protected] "{" | ||
22 | [email protected] " " | ||
23 | [email protected] | ||
24 | [email protected] "15" | ||
25 | [email protected] " " | ||
26 | [email protected] "}" | ||
27 | [email protected] " " | ||
28 | [email protected] "=" | ||
29 | [email protected] " " | ||
30 | [email protected] | ||
31 | [email protected] "(" | ||
32 | [email protected] ")" | ||
33 | [email protected] ";" | ||
34 | [email protected] "\n " | ||
35 | [email protected] | ||
36 | [email protected] "let" | ||
37 | [email protected] " " | ||
38 | [email protected] | ||
39 | [email protected] "const" | ||
40 | [email protected] " " | ||
41 | [email protected] | ||
42 | [email protected] "{" | ||
43 | [email protected] " " | ||
44 | [email protected] | ||
45 | [email protected] | ||
46 | [email protected] | ||
47 | [email protected] | ||
48 | [email protected] | ||
49 | [email protected] | ||
50 | [email protected] "foo" | ||
51 | [email protected] | ||
52 | [email protected] "(" | ||
53 | [email protected] ")" | ||
54 | [email protected] ";" | ||
55 | [email protected] " " | ||
56 | [email protected] | ||
57 | [email protected] | ||
58 | [email protected] | ||
59 | [email protected] | ||
60 | [email protected] | ||
61 | [email protected] "bar" | ||
62 | [email protected] | ||
63 | [email protected] "(" | ||
64 | [email protected] ")" | ||
65 | [email protected] " " | ||
66 | [email protected] "}" | ||
67 | [email protected] " " | ||
68 | [email protected] "=" | ||
69 | [email protected] " " | ||
70 | [email protected] | ||
71 | [email protected] "(" | ||
72 | [email protected] ")" | ||
73 | [email protected] ";" | ||
74 | [email protected] "\n" | ||
75 | [email protected] "}" | ||
76 | [email protected] "\n" | ||
diff --git a/crates/syntax/test_data/parser/inline/ok/0156_const_block_pat.rs b/crates/syntax/test_data/parser/inline/ok/0156_const_block_pat.rs new file mode 100644 index 000000000..dce9defac --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0156_const_block_pat.rs | |||
@@ -0,0 +1,4 @@ | |||
1 | fn main() { | ||
2 | let const { 15 } = (); | ||
3 | let const { foo(); bar() } = (); | ||
4 | } | ||
diff --git a/crates/syntax/test_data/parser/inline/ok/0157_const_block.rast b/crates/syntax/test_data/parser/inline/ok/0157_const_block.rast new file mode 100644 index 000000000..d5d2c8fe3 --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0157_const_block.rast | |||
@@ -0,0 +1,23 @@ | |||
1 | [email protected] | ||
2 | [email protected] | ||
3 | [email protected] "fn" | ||
4 | [email protected] " " | ||
5 | [email protected] | ||
6 | [email protected] "f" | ||
7 | [email protected] | ||
8 | [email protected] "(" | ||
9 | [email protected] ")" | ||
10 | [email protected] " " | ||
11 | [email protected] | ||
12 | [email protected] "{" | ||
13 | [email protected] " " | ||
14 | [email protected] | ||
15 | [email protected] "const" | ||
16 | [email protected] " " | ||
17 | [email protected] | ||
18 | [email protected] "{" | ||
19 | [email protected] " " | ||
20 | [email protected] "}" | ||
21 | [email protected] " " | ||
22 | [email protected] "}" | ||
23 | [email protected] "\n" | ||
diff --git a/crates/syntax/test_data/parser/inline/ok/0157_const_block.rs b/crates/syntax/test_data/parser/inline/ok/0157_const_block.rs new file mode 100644 index 000000000..a2e3565a3 --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0157_const_block.rs | |||
@@ -0,0 +1 @@ | |||
fn f() { const { } } | |||
diff --git a/crates/syntax/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast b/crates/syntax/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast new file mode 100644 index 000000000..4a1f712aa --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast | |||
@@ -0,0 +1,57 @@ | |||
1 | [email protected] | ||
2 | [email protected] | ||
3 | [email protected] "macro_rules" | ||
4 | [email protected] "!" | ||
5 | [email protected] " " | ||
6 | [email protected] | ||
7 | [email protected] "m" | ||
8 | [email protected] " " | ||
9 | [email protected] | ||
10 | [email protected] "(" | ||
11 | [email protected] " " | ||
12 | [email protected] | ||
13 | [email protected] "(" | ||
14 | [email protected] "$" | ||
15 | [email protected] "i" | ||
16 | [email protected] ":" | ||
17 | [email protected] "ident" | ||
18 | [email protected] ")" | ||
19 | [email protected] " " | ||
20 | [email protected] "=" | ||
21 | [email protected] ">" | ||
22 | [email protected] " " | ||
23 | [email protected] | ||
24 | [email protected] "{" | ||
25 | [email protected] "}" | ||
26 | [email protected] " " | ||
27 | [email protected] ")" | ||
28 | [email protected] ";" | ||
29 | [email protected] "\n" | ||
30 | [email protected] | ||
31 | [email protected] "macro_rules" | ||
32 | [email protected] "!" | ||
33 | [email protected] " " | ||
34 | [email protected] | ||
35 | [email protected] "m" | ||
36 | [email protected] " " | ||
37 | [email protected] | ||
38 | [email protected] "[" | ||
39 | [email protected] " " | ||
40 | [email protected] | ||
41 | [email protected] "(" | ||
42 | [email protected] "$" | ||
43 | [email protected] "i" | ||
44 | [email protected] ":" | ||
45 | [email protected] "ident" | ||
46 | [email protected] ")" | ||
47 | [email protected] " " | ||
48 | [email protected] "=" | ||
49 | [email protected] ">" | ||
50 | [email protected] " " | ||
51 | [email protected] | ||
52 | [email protected] "{" | ||
53 | [email protected] "}" | ||
54 | [email protected] " " | ||
55 | [email protected] "]" | ||
56 | [email protected] ";" | ||
57 | [email protected] "\n" | ||
diff --git a/crates/syntax/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs b/crates/syntax/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs new file mode 100644 index 000000000..6033a28cd --- /dev/null +++ b/crates/syntax/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs | |||
@@ -0,0 +1,2 @@ | |||
1 | macro_rules! m ( ($i:ident) => {} ); | ||
2 | macro_rules! m [ ($i:ident) => {} ]; | ||
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index cb2ae6fc1..3025dc8d6 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc | |||
@@ -5,7 +5,7 @@ rust-analyzer.assist.importPrefix (default: `"plain"`):: | |||
5 | rust-analyzer.callInfo.full (default: `true`):: | 5 | rust-analyzer.callInfo.full (default: `true`):: |
6 | Show function name and docs in parameter hints. | 6 | Show function name and docs in parameter hints. |
7 | rust-analyzer.cargo.autoreload (default: `true`):: | 7 | rust-analyzer.cargo.autoreload (default: `true`):: |
8 | Automatically refresh project info via `cargo metadata` on Cargo.toml changes. | 8 | Automatically refresh project info via `cargo metadata` on `Cargo.toml` changes. |
9 | rust-analyzer.cargo.allFeatures (default: `false`):: | 9 | rust-analyzer.cargo.allFeatures (default: `false`):: |
10 | Activate all available features. | 10 | Activate all available features. |
11 | rust-analyzer.cargo.features (default: `[]`):: | 11 | rust-analyzer.cargo.features (default: `[]`):: |
@@ -21,7 +21,7 @@ rust-analyzer.cargo.noSysroot (default: `false`):: | |||
21 | rust-analyzer.checkOnSave.enable (default: `true`):: | 21 | rust-analyzer.checkOnSave.enable (default: `true`):: |
22 | Run specified `cargo check` command for diagnostics on save. | 22 | Run specified `cargo check` command for diagnostics on save. |
23 | rust-analyzer.checkOnSave.allFeatures (default: `null`):: | 23 | rust-analyzer.checkOnSave.allFeatures (default: `null`):: |
24 | Check with all features (will be passed as `--all-features`). Defaults to `rust-analyzer.cargo.allFeatures`. | 24 | Check with all features (will be passed as `--all-features`). Defaults to `#rust-analyzer.cargo.allFeatures#`. |
25 | rust-analyzer.checkOnSave.allTargets (default: `true`):: | 25 | rust-analyzer.checkOnSave.allTargets (default: `true`):: |
26 | Check all targets and tests (will be passed as `--all-targets`). | 26 | Check all targets and tests (will be passed as `--all-targets`). |
27 | rust-analyzer.checkOnSave.command (default: `"check"`):: | 27 | rust-analyzer.checkOnSave.command (default: `"check"`):: |
@@ -29,11 +29,11 @@ rust-analyzer.checkOnSave.command (default: `"check"`):: | |||
29 | rust-analyzer.checkOnSave.noDefaultFeatures (default: `null`):: | 29 | rust-analyzer.checkOnSave.noDefaultFeatures (default: `null`):: |
30 | Do not activate the `default` feature. | 30 | Do not activate the `default` feature. |
31 | rust-analyzer.checkOnSave.target (default: `null`):: | 31 | rust-analyzer.checkOnSave.target (default: `null`):: |
32 | Check for a specific target. Defaults to `rust-analyzer.cargo.target`. | 32 | Check for a specific target. Defaults to `#rust-analyzer.cargo.target#`. |
33 | rust-analyzer.checkOnSave.extraArgs (default: `[]`):: | 33 | rust-analyzer.checkOnSave.extraArgs (default: `[]`):: |
34 | Extra arguments for `cargo check`. | 34 | Extra arguments for `cargo check`. |
35 | rust-analyzer.checkOnSave.features (default: `null`):: | 35 | rust-analyzer.checkOnSave.features (default: `null`):: |
36 | List of features to activate. Defaults to `rust-analyzer.cargo.features`. | 36 | List of features to activate. Defaults to `#rust-analyzer.cargo.features#`. |
37 | rust-analyzer.checkOnSave.overrideCommand (default: `null`):: | 37 | rust-analyzer.checkOnSave.overrideCommand (default: `null`):: |
38 | Advanced option, fully override the command rust-analyzer uses for checking. The command should include `--message-format=json` or similar option. | 38 | Advanced option, fully override the command rust-analyzer uses for checking. The command should include `--message-format=json` or similar option. |
39 | rust-analyzer.completion.addCallArgumentSnippets (default: `true`):: | 39 | rust-analyzer.completion.addCallArgumentSnippets (default: `true`):: |
@@ -43,7 +43,7 @@ rust-analyzer.completion.addCallParenthesis (default: `true`):: | |||
43 | rust-analyzer.completion.postfix.enable (default: `true`):: | 43 | rust-analyzer.completion.postfix.enable (default: `true`):: |
44 | Whether to show postfix snippets like `dbg`, `if`, `not`, etc. | 44 | Whether to show postfix snippets like `dbg`, `if`, `not`, etc. |
45 | rust-analyzer.completion.autoimport.enable (default: `true`):: | 45 | rust-analyzer.completion.autoimport.enable (default: `true`):: |
46 | Toggles the additional completions that automatically add imports when completed. Note that your client have to specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. | 46 | Toggles the additional completions that automatically add imports when completed. Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. |
47 | rust-analyzer.diagnostics.enable (default: `true`):: | 47 | rust-analyzer.diagnostics.enable (default: `true`):: |
48 | Whether to show native rust-analyzer diagnostics. | 48 | Whether to show native rust-analyzer diagnostics. |
49 | rust-analyzer.diagnostics.enableExperimental (default: `true`):: | 49 | rust-analyzer.diagnostics.enableExperimental (default: `true`):: |
@@ -51,9 +51,9 @@ rust-analyzer.diagnostics.enableExperimental (default: `true`):: | |||
51 | rust-analyzer.diagnostics.disabled (default: `[]`):: | 51 | rust-analyzer.diagnostics.disabled (default: `[]`):: |
52 | List of rust-analyzer diagnostics to disable. | 52 | List of rust-analyzer diagnostics to disable. |
53 | rust-analyzer.diagnostics.warningsAsHint (default: `[]`):: | 53 | rust-analyzer.diagnostics.warningsAsHint (default: `[]`):: |
54 | List of warnings that should be displayed with info severity.\nThe warnings will be indicated by a blue squiggly underline in code and a blue icon in the problems panel. | 54 | List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code and a blue icon in the `Problems Panel`. |
55 | rust-analyzer.diagnostics.warningsAsInfo (default: `[]`):: | 55 | rust-analyzer.diagnostics.warningsAsInfo (default: `[]`):: |
56 | List of warnings that should be displayed with hint severity.\nThe warnings will be indicated by faded text or three dots in code and will not show up in the problems panel. | 56 | List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code and will not show up in the `Problems Panel`. |
57 | rust-analyzer.files.watcher (default: `"client"`):: | 57 | rust-analyzer.files.watcher (default: `"client"`):: |
58 | Controls file watching implementation. | 58 | Controls file watching implementation. |
59 | rust-analyzer.hoverActions.debug (default: `true`):: | 59 | rust-analyzer.hoverActions.debug (default: `true`):: |
@@ -71,7 +71,7 @@ rust-analyzer.hoverActions.linksInHover (default: `true`):: | |||
71 | rust-analyzer.inlayHints.chainingHints (default: `true`):: | 71 | rust-analyzer.inlayHints.chainingHints (default: `true`):: |
72 | Whether to show inlay type hints for method chains. | 72 | Whether to show inlay type hints for method chains. |
73 | rust-analyzer.inlayHints.maxLength (default: `null`):: | 73 | rust-analyzer.inlayHints.maxLength (default: `null`):: |
74 | Maximum length for inlay hints. | 74 | Maximum length for inlay hints. Default is unlimited. |
75 | rust-analyzer.inlayHints.parameterHints (default: `true`):: | 75 | rust-analyzer.inlayHints.parameterHints (default: `true`):: |
76 | Whether to show function parameter name inlay hints at the call site. | 76 | Whether to show function parameter name inlay hints at the call site. |
77 | rust-analyzer.inlayHints.typeHints (default: `true`):: | 77 | rust-analyzer.inlayHints.typeHints (default: `true`):: |
@@ -87,20 +87,20 @@ rust-analyzer.lens.run (default: `true`):: | |||
87 | rust-analyzer.lens.methodReferences (default: `false`):: | 87 | rust-analyzer.lens.methodReferences (default: `false`):: |
88 | Whether to show `Method References` lens. Only applies when `#rust-analyzer.lens.enable#` is set. | 88 | Whether to show `Method References` lens. Only applies when `#rust-analyzer.lens.enable#` is set. |
89 | rust-analyzer.linkedProjects (default: `[]`):: | 89 | rust-analyzer.linkedProjects (default: `[]`):: |
90 | Disable project auto-discovery in favor of explicitly specified set of projects. \nElements must be paths pointing to Cargo.toml, rust-project.json, or JSON objects in rust-project.json format. | 90 | Disable project auto-discovery in favor of explicitly specified set of projects.\n\nElements must be paths pointing to `Cargo.toml`, `rust-project.json`, or JSON objects in `rust-project.json` format. |
91 | rust-analyzer.lruCapacity (default: `null`):: | 91 | rust-analyzer.lruCapacity (default: `null`):: |
92 | Number of syntax trees rust-analyzer keeps in memory. | 92 | Number of syntax trees rust-analyzer keeps in memory. Defaults to 128. |
93 | rust-analyzer.notifications.cargoTomlNotFound (default: `true`):: | 93 | rust-analyzer.notifications.cargoTomlNotFound (default: `true`):: |
94 | Whether to show `can't find Cargo.toml` error message. | 94 | Whether to show `can't find Cargo.toml` error message. |
95 | rust-analyzer.procMacro.enable (default: `false`):: | 95 | rust-analyzer.procMacro.enable (default: `false`):: |
96 | Enable Proc macro support, cargo.loadOutDirsFromCheck must be enabled. | 96 | Enable Proc macro support, `#rust-analyzer.cargo.loadOutDirsFromCheck#` must be enabled. |
97 | rust-analyzer.runnables.overrideCargo (default: `null`):: | 97 | rust-analyzer.runnables.overrideCargo (default: `null`):: |
98 | Command to be executed instead of 'cargo' for runnables. | 98 | Command to be executed instead of 'cargo' for runnables. |
99 | rust-analyzer.runnables.cargoExtraArgs (default: `[]`):: | 99 | rust-analyzer.runnables.cargoExtraArgs (default: `[]`):: |
100 | Additional arguments to be passed to cargo for runnables such as tests or binaries.\nFor example, it may be '--release'. | 100 | Additional arguments to be passed to cargo for runnables such as tests or binaries.\nFor example, it may be `--release`. |
101 | rust-analyzer.rustcSource (default: `null`):: | 101 | rust-analyzer.rustcSource (default: `null`):: |
102 | Path to the rust compiler sources, for usage in rustc_private projects. | 102 | Path to the rust compiler sources, for usage in rustc_private projects. |
103 | rust-analyzer.rustfmt.extraArgs (default: `[]`):: | 103 | rust-analyzer.rustfmt.extraArgs (default: `[]`):: |
104 | Additional arguments to rustfmt. | 104 | Additional arguments to `rustfmt`. |
105 | rust-analyzer.rustfmt.overrideCommand (default: `null`):: | 105 | rust-analyzer.rustfmt.overrideCommand (default: `null`):: |
106 | Advanced option, fully override the command rust-analyzer uses for formatting. | 106 | Advanced option, fully override the command rust-analyzer uses for formatting. |
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 40f10972f..d4121b401 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc | |||
@@ -218,7 +218,7 @@ The are several LSP client implementations for vim or neovim: | |||
218 | * automatically install and upgrade stable/nightly releases | 218 | * automatically install and upgrade stable/nightly releases |
219 | * same configurations as VSCode extension, `rust-analyzer.serverPath`, `rust-analyzer.cargo.features` etc. | 219 | * same configurations as VSCode extension, `rust-analyzer.serverPath`, `rust-analyzer.cargo.features` etc. |
220 | * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc. | 220 | * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc. |
221 | * inlay hints for method chaining support, _Neovim Only_ | 221 | * inlay hints for variables and method chaining, _Neovim Only_ |
222 | * semantic highlighting is not implemented yet | 222 | * semantic highlighting is not implemented yet |
223 | 223 | ||
224 | ==== LanguageClient-neovim | 224 | ==== LanguageClient-neovim |
@@ -302,6 +302,9 @@ If the LSP binary is not available, GNOME Builder can install it when opening a | |||
302 | rust-analyzer is configured via LSP messages, which means that it's up to the editor to decide on the exact format and location of configuration files. | 302 | rust-analyzer is configured via LSP messages, which means that it's up to the editor to decide on the exact format and location of configuration files. |
303 | Please consult your editor's documentation to learn how to configure LSP servers. | 303 | Please consult your editor's documentation to learn how to configure LSP servers. |
304 | 304 | ||
305 | To verify which configuration is actually used by rust-analyzer, set `RA_LOG` environment variable to `rust_analyzer=info` and look for config-related messages. | ||
306 | Logs should show both the JSON that rust-analyzer sees as well as the updated config. | ||
307 | |||
305 | This is the list of config options rust-analyzer supports: | 308 | This is the list of config options rust-analyzer supports: |
306 | 309 | ||
307 | include::./generated_config.adoc[] | 310 | include::./generated_config.adoc[] |
diff --git a/editors/code/package.json b/editors/code/package.json index abcc84eda..13749a084 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -250,12 +250,12 @@ | |||
250 | } | 250 | } |
251 | ], | 251 | ], |
252 | "default": null, | 252 | "default": null, |
253 | "description": "Environment variables passed to the runnable launched using `Test ` or `Debug` lens or `rust-analyzer.run` command." | 253 | "markdownDescription": "Environment variables passed to the runnable launched using `Test` or `Debug` lens or `rust-analyzer.run` command." |
254 | }, | 254 | }, |
255 | "rust-analyzer.inlayHints.enable": { | 255 | "rust-analyzer.inlayHints.enable": { |
256 | "type": "boolean", | 256 | "type": "boolean", |
257 | "default": true, | 257 | "default": true, |
258 | "description": "Whether to show inlay hints" | 258 | "description": "Whether to show inlay hints." |
259 | }, | 259 | }, |
260 | "rust-analyzer.updates.channel": { | 260 | "rust-analyzer.updates.channel": { |
261 | "type": "string", | 261 | "type": "string", |
@@ -265,15 +265,15 @@ | |||
265 | ], | 265 | ], |
266 | "default": "stable", | 266 | "default": "stable", |
267 | "markdownEnumDescriptions": [ | 267 | "markdownEnumDescriptions": [ |
268 | "`\"stable\"` updates are shipped weekly, they don't contain cutting-edge features from VSCode proposed APIs but have less bugs in general", | 268 | "`stable` updates are shipped weekly, they don't contain cutting-edge features from VSCode proposed APIs but have less bugs in general.", |
269 | "`\"nightly\"` updates are shipped daily (extension updates automatically by downloading artifacts directly from GitHub), they contain cutting-edge features and latest bug fixes. These releases help us get your feedback very quickly and speed up rust-analyzer development **drastically**" | 269 | "`nightly` updates are shipped daily (extension updates automatically by downloading artifacts directly from GitHub), they contain cutting-edge features and latest bug fixes. These releases help us get your feedback very quickly and speed up rust-analyzer development **drastically**." |
270 | ], | 270 | ], |
271 | "markdownDescription": "Choose `\"nightly\"` updates to get the latest features and bug fixes every day. While `\"stable\"` releases occur weekly and don't contain cutting-edge features from VSCode proposed APIs" | 271 | "markdownDescription": "Choose `nightly` updates to get the latest features and bug fixes every day. While `stable` releases occur weekly and don't contain cutting-edge features from VSCode proposed APIs." |
272 | }, | 272 | }, |
273 | "rust-analyzer.updates.askBeforeDownload": { | 273 | "rust-analyzer.updates.askBeforeDownload": { |
274 | "type": "boolean", | 274 | "type": "boolean", |
275 | "default": true, | 275 | "default": true, |
276 | "description": "Whether to ask for permission before downloading any files from the Internet" | 276 | "description": "Whether to ask for permission before downloading any files from the Internet." |
277 | }, | 277 | }, |
278 | "rust-analyzer.serverPath": { | 278 | "rust-analyzer.serverPath": { |
279 | "type": [ | 279 | "type": [ |
@@ -281,7 +281,7 @@ | |||
281 | "string" | 281 | "string" |
282 | ], | 282 | ], |
283 | "default": null, | 283 | "default": null, |
284 | "description": "Path to rust-analyzer executable (points to bundled binary by default). If this is set, then \"rust-analyzer.updates.channel\" setting is not used" | 284 | "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default). If this is set, then `#rust-analyzer.updates.channel#` setting is not used" |
285 | }, | 285 | }, |
286 | "rust-analyzer.trace.server": { | 286 | "rust-analyzer.trace.server": { |
287 | "type": "string", | 287 | "type": "string", |
@@ -297,10 +297,10 @@ | |||
297 | "Full log" | 297 | "Full log" |
298 | ], | 298 | ], |
299 | "default": "off", | 299 | "default": "off", |
300 | "description": "Trace requests to the rust-analyzer (this is usually overly verbose and not recommended for regular users)" | 300 | "description": "Trace requests to the rust-analyzer (this is usually overly verbose and not recommended for regular users)." |
301 | }, | 301 | }, |
302 | "rust-analyzer.trace.extension": { | 302 | "rust-analyzer.trace.extension": { |
303 | "description": "Enable logging of VS Code extensions itself", | 303 | "description": "Enable logging of VS Code extensions itself.", |
304 | "type": "boolean", | 304 | "type": "boolean", |
305 | "default": false | 305 | "default": false |
306 | }, | 306 | }, |
@@ -327,14 +327,14 @@ | |||
327 | } | 327 | } |
328 | }, | 328 | }, |
329 | "rust-analyzer.debug.openDebugPane": { | 329 | "rust-analyzer.debug.openDebugPane": { |
330 | "description": "Whether to open up the Debug Pane on debugging start.", | 330 | "markdownDescription": "Whether to open up the `Debug Panel` on debugging start.", |
331 | "type": "boolean", | 331 | "type": "boolean", |
332 | "default": false | 332 | "default": false |
333 | }, | 333 | }, |
334 | "rust-analyzer.debug.engineSettings": { | 334 | "rust-analyzer.debug.engineSettings": { |
335 | "type": "object", | 335 | "type": "object", |
336 | "default": {}, | 336 | "default": {}, |
337 | "description": "Optional settings passed to the debug engine. Example:\n{ \"lldb\": { \"terminal\":\"external\"} }" | 337 | "markdownDescription": "Optional settings passed to the debug engine. Example: `{ \"lldb\": { \"terminal\":\"external\"} }`" |
338 | }, | 338 | }, |
339 | "rust-analyzer.assist.importMergeBehaviour": { | 339 | "rust-analyzer.assist.importMergeBehaviour": { |
340 | "markdownDescription": "The strategy to use when inserting new imports or merging imports.", | 340 | "markdownDescription": "The strategy to use when inserting new imports or merging imports.", |
@@ -362,7 +362,7 @@ | |||
362 | ], | 362 | ], |
363 | "enumDescriptions": [ | 363 | "enumDescriptions": [ |
364 | "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.", | 364 | "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.", |
365 | "Prefix all import paths with `self` if they don't begin with `self`, `super`, `crate` or a crate name", | 365 | "Prefix all import paths with `self` if they don't begin with `self`, `super`, `crate` or a crate name.", |
366 | "Force import paths to be absolute by always starting them with `crate` or the crate name they refer to." | 366 | "Force import paths to be absolute by always starting them with `crate` or the crate name they refer to." |
367 | ] | 367 | ] |
368 | }, | 368 | }, |
@@ -372,7 +372,7 @@ | |||
372 | "type": "boolean" | 372 | "type": "boolean" |
373 | }, | 373 | }, |
374 | "rust-analyzer.cargo.autoreload": { | 374 | "rust-analyzer.cargo.autoreload": { |
375 | "markdownDescription": "Automatically refresh project info via `cargo metadata` on Cargo.toml changes.", | 375 | "markdownDescription": "Automatically refresh project info via `cargo metadata` on `Cargo.toml` changes.", |
376 | "default": true, | 376 | "default": true, |
377 | "type": "boolean" | 377 | "type": "boolean" |
378 | }, | 378 | }, |
@@ -418,7 +418,7 @@ | |||
418 | "type": "boolean" | 418 | "type": "boolean" |
419 | }, | 419 | }, |
420 | "rust-analyzer.checkOnSave.allFeatures": { | 420 | "rust-analyzer.checkOnSave.allFeatures": { |
421 | "markdownDescription": "Check with all features (will be passed as `--all-features`). Defaults to `rust-analyzer.cargo.allFeatures`.", | 421 | "markdownDescription": "Check with all features (will be passed as `--all-features`). Defaults to `#rust-analyzer.cargo.allFeatures#`.", |
422 | "default": null, | 422 | "default": null, |
423 | "type": [ | 423 | "type": [ |
424 | "null", | 424 | "null", |
@@ -444,7 +444,7 @@ | |||
444 | ] | 444 | ] |
445 | }, | 445 | }, |
446 | "rust-analyzer.checkOnSave.target": { | 446 | "rust-analyzer.checkOnSave.target": { |
447 | "markdownDescription": "Check for a specific target. Defaults to `rust-analyzer.cargo.target`.", | 447 | "markdownDescription": "Check for a specific target. Defaults to `#rust-analyzer.cargo.target#`.", |
448 | "default": null, | 448 | "default": null, |
449 | "type": [ | 449 | "type": [ |
450 | "null", | 450 | "null", |
@@ -460,7 +460,7 @@ | |||
460 | } | 460 | } |
461 | }, | 461 | }, |
462 | "rust-analyzer.checkOnSave.features": { | 462 | "rust-analyzer.checkOnSave.features": { |
463 | "markdownDescription": "List of features to activate. Defaults to `rust-analyzer.cargo.features`.", | 463 | "markdownDescription": "List of features to activate. Defaults to `#rust-analyzer.cargo.features#`.", |
464 | "default": null, | 464 | "default": null, |
465 | "type": [ | 465 | "type": [ |
466 | "null", | 466 | "null", |
@@ -497,7 +497,7 @@ | |||
497 | "type": "boolean" | 497 | "type": "boolean" |
498 | }, | 498 | }, |
499 | "rust-analyzer.completion.autoimport.enable": { | 499 | "rust-analyzer.completion.autoimport.enable": { |
500 | "markdownDescription": "Toggles the additional completions that automatically add imports when completed. Note that your client have to specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.", | 500 | "markdownDescription": "Toggles the additional completions that automatically add imports when completed. Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.", |
501 | "default": true, | 501 | "default": true, |
502 | "type": "boolean" | 502 | "type": "boolean" |
503 | }, | 503 | }, |
@@ -521,7 +521,7 @@ | |||
521 | "uniqueItems": true | 521 | "uniqueItems": true |
522 | }, | 522 | }, |
523 | "rust-analyzer.diagnostics.warningsAsHint": { | 523 | "rust-analyzer.diagnostics.warningsAsHint": { |
524 | "markdownDescription": "List of warnings that should be displayed with info severity.\\nThe warnings will be indicated by a blue squiggly underline in code and a blue icon in the problems panel.", | 524 | "markdownDescription": "List of warnings that should be displayed with info severity.\\n\\nThe warnings will be indicated by a blue squiggly underline in code and a blue icon in the `Problems Panel`.", |
525 | "default": [], | 525 | "default": [], |
526 | "type": "array", | 526 | "type": "array", |
527 | "items": { | 527 | "items": { |
@@ -529,7 +529,7 @@ | |||
529 | } | 529 | } |
530 | }, | 530 | }, |
531 | "rust-analyzer.diagnostics.warningsAsInfo": { | 531 | "rust-analyzer.diagnostics.warningsAsInfo": { |
532 | "markdownDescription": "List of warnings that should be displayed with hint severity.\\nThe warnings will be indicated by faded text or three dots in code and will not show up in the problems panel.", | 532 | "markdownDescription": "List of warnings that should be displayed with hint severity.\\n\\nThe warnings will be indicated by faded text or three dots in code and will not show up in the `Problems Panel`.", |
533 | "default": [], | 533 | "default": [], |
534 | "type": "array", | 534 | "type": "array", |
535 | "items": { | 535 | "items": { |
@@ -577,7 +577,7 @@ | |||
577 | "type": "boolean" | 577 | "type": "boolean" |
578 | }, | 578 | }, |
579 | "rust-analyzer.inlayHints.maxLength": { | 579 | "rust-analyzer.inlayHints.maxLength": { |
580 | "markdownDescription": "Maximum length for inlay hints.", | 580 | "markdownDescription": "Maximum length for inlay hints. Default is unlimited.", |
581 | "default": null, | 581 | "default": null, |
582 | "type": [ | 582 | "type": [ |
583 | "null", | 583 | "null", |
@@ -621,7 +621,7 @@ | |||
621 | "type": "boolean" | 621 | "type": "boolean" |
622 | }, | 622 | }, |
623 | "rust-analyzer.linkedProjects": { | 623 | "rust-analyzer.linkedProjects": { |
624 | "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set of projects. \\nElements must be paths pointing to Cargo.toml, rust-project.json, or JSON objects in rust-project.json format.", | 624 | "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set of projects.\\n\\nElements must be paths pointing to `Cargo.toml`, `rust-project.json`, or JSON objects in `rust-project.json` format.", |
625 | "default": [], | 625 | "default": [], |
626 | "type": "array", | 626 | "type": "array", |
627 | "items": { | 627 | "items": { |
@@ -632,7 +632,7 @@ | |||
632 | } | 632 | } |
633 | }, | 633 | }, |
634 | "rust-analyzer.lruCapacity": { | 634 | "rust-analyzer.lruCapacity": { |
635 | "markdownDescription": "Number of syntax trees rust-analyzer keeps in memory.", | 635 | "markdownDescription": "Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.", |
636 | "default": null, | 636 | "default": null, |
637 | "type": [ | 637 | "type": [ |
638 | "null", | 638 | "null", |
@@ -646,7 +646,7 @@ | |||
646 | "type": "boolean" | 646 | "type": "boolean" |
647 | }, | 647 | }, |
648 | "rust-analyzer.procMacro.enable": { | 648 | "rust-analyzer.procMacro.enable": { |
649 | "markdownDescription": "Enable Proc macro support, cargo.loadOutDirsFromCheck must be enabled.", | 649 | "markdownDescription": "Enable Proc macro support, `#rust-analyzer.cargo.loadOutDirsFromCheck#` must be enabled.", |
650 | "default": false, | 650 | "default": false, |
651 | "type": "boolean" | 651 | "type": "boolean" |
652 | }, | 652 | }, |
@@ -659,7 +659,7 @@ | |||
659 | ] | 659 | ] |
660 | }, | 660 | }, |
661 | "rust-analyzer.runnables.cargoExtraArgs": { | 661 | "rust-analyzer.runnables.cargoExtraArgs": { |
662 | "markdownDescription": "Additional arguments to be passed to cargo for runnables such as tests or binaries.\\nFor example, it may be '--release'.", | 662 | "markdownDescription": "Additional arguments to be passed to cargo for runnables such as tests or binaries.\\nFor example, it may be `--release`.", |
663 | "default": [], | 663 | "default": [], |
664 | "type": "array", | 664 | "type": "array", |
665 | "items": { | 665 | "items": { |
@@ -675,7 +675,7 @@ | |||
675 | ] | 675 | ] |
676 | }, | 676 | }, |
677 | "rust-analyzer.rustfmt.extraArgs": { | 677 | "rust-analyzer.rustfmt.extraArgs": { |
678 | "markdownDescription": "Additional arguments to rustfmt.", | 678 | "markdownDescription": "Additional arguments to `rustfmt`.", |
679 | "default": [], | 679 | "default": [], |
680 | "type": "array", | 680 | "type": "array", |
681 | "items": { | 681 | "items": { |
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 4b2d3c8a5..282240d84 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts | |||
@@ -131,7 +131,7 @@ async function tryActivate(context: vscode.ExtensionContext) { | |||
131 | ctx.pushCleanup(activateTaskProvider(workspaceFolder, ctx.config)); | 131 | ctx.pushCleanup(activateTaskProvider(workspaceFolder, ctx.config)); |
132 | 132 | ||
133 | activateInlayHints(ctx); | 133 | activateInlayHints(ctx); |
134 | warnAboutRustLangExtensionConflict(); | 134 | warnAboutExtensionConflicts(); |
135 | 135 | ||
136 | vscode.workspace.onDidChangeConfiguration( | 136 | vscode.workspace.onDidChangeConfiguration( |
137 | _ => ctx?.client?.sendNotification('workspace/didChangeConfiguration', { settings: "" }), | 137 | _ => ctx?.client?.sendNotification('workspace/didChangeConfiguration', { settings: "" }), |
@@ -287,12 +287,14 @@ async function getServer(config: Config, state: PersistentState): Promise<string | |||
287 | if (config.package.releaseTag === null) return "rust-analyzer"; | 287 | if (config.package.releaseTag === null) return "rust-analyzer"; |
288 | 288 | ||
289 | let platform: string | undefined; | 289 | let platform: string | undefined; |
290 | if (process.arch === "x64" || process.arch === "ia32") { | 290 | if ((process.arch === "x64" || process.arch === "ia32") && process.platform === "win32") { |
291 | if (process.platform === "linux") platform = "linux"; | 291 | platform = "x86_64-pc-windows-msvc"; |
292 | if (process.platform === "darwin") platform = "mac"; | 292 | } else if (process.arch === "x64" && process.platform === "linux") { |
293 | if (process.platform === "win32") platform = "windows"; | 293 | platform = "x86_64-unknown-linux-gnu"; |
294 | } else if (process.arch === "x64" && process.platform === "darwin") { | ||
295 | platform = "x86_64-apple-darwin"; | ||
294 | } else if (process.arch === "arm64" && process.platform === "darwin") { | 296 | } else if (process.arch === "arm64" && process.platform === "darwin") { |
295 | platform = "mac"; | 297 | platform = "aarch64-apple-darwin"; |
296 | } | 298 | } |
297 | if (platform === undefined) { | 299 | if (platform === undefined) { |
298 | vscode.window.showErrorMessage( | 300 | vscode.window.showErrorMessage( |
@@ -305,7 +307,7 @@ async function getServer(config: Config, state: PersistentState): Promise<string | |||
305 | ); | 307 | ); |
306 | return undefined; | 308 | return undefined; |
307 | } | 309 | } |
308 | const ext = platform === "windows" ? ".exe" : ""; | 310 | const ext = platform.indexOf("-windows-") !== -1 ? ".exe" : ""; |
309 | const dest = path.join(config.globalStoragePath, `rust-analyzer-${platform}${ext}`); | 311 | const dest = path.join(config.globalStoragePath, `rust-analyzer-${platform}${ext}`); |
310 | const exists = await fs.stat(dest).then(() => true, () => false); | 312 | const exists = await fs.stat(dest).then(() => true, () => false); |
311 | if (!exists) { | 313 | if (!exists) { |
@@ -411,11 +413,21 @@ async function queryForGithubToken(state: PersistentState): Promise<void> { | |||
411 | } | 413 | } |
412 | } | 414 | } |
413 | 415 | ||
414 | function warnAboutRustLangExtensionConflict() { | 416 | function warnAboutExtensionConflicts() { |
415 | const rustLangExt = vscode.extensions.getExtension("rust-lang.rust"); | 417 | const conflicting = [ |
416 | if (rustLangExt !== undefined) { | 418 | ["rust-analyzer", "matklad.rust-analyzer"], |
419 | ["Rust", "rust-lang.rust"], | ||
420 | ["Rust", "kalitaalexey.vscode-rust"], | ||
421 | ]; | ||
422 | |||
423 | const found = conflicting.filter( | ||
424 | nameId => vscode.extensions.getExtension(nameId[1]) !== undefined); | ||
425 | |||
426 | if (found.length > 1) { | ||
427 | const fst = found[0]; | ||
428 | const sec = found[1]; | ||
417 | vscode.window.showWarningMessage( | 429 | vscode.window.showWarningMessage( |
418 | "You have both rust-analyzer (matklad.rust-analyzer) and Rust (rust-lang.rust) " + | 430 | `You have both the ${fst[0]} (${fst[1]}) and ${sec[0]} (${sec[1]}) ` + |
419 | "plugins enabled. These are known to conflict and cause various functions of " + | 431 | "plugins enabled. These are known to conflict and cause various functions of " + |
420 | "both plugins to not work correctly. You should disable one of them.", "Got it"); | 432 | "both plugins to not work correctly. You should disable one of them.", "Got it"); |
421 | }; | 433 | }; |
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 78a0b54ba..96b4ea448 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml | |||
@@ -15,7 +15,7 @@ flate2 = "1.0" | |||
15 | pico-args = "0.3.1" | 15 | pico-args = "0.3.1" |
16 | proc-macro2 = "1.0.8" | 16 | proc-macro2 = "1.0.8" |
17 | quote = "1.0.2" | 17 | quote = "1.0.2" |
18 | ungrammar = "1.4" | 18 | ungrammar = "1.5" |
19 | walkdir = "2.3.1" | 19 | walkdir = "2.3.1" |
20 | write-json = "0.1.0" | 20 | write-json = "0.1.0" |
21 | xshell = "0.1" | 21 | xshell = "0.1" |
diff --git a/xtask/src/ast_src.rs b/xtask/src/ast_src.rs index a69ced5cc..2b8012bdd 100644 --- a/xtask/src/ast_src.rs +++ b/xtask/src/ast_src.rs | |||
@@ -132,6 +132,7 @@ pub(crate) const KINDS_SRC: KindsSrc = KindsSrc { | |||
132 | "RANGE_PAT", | 132 | "RANGE_PAT", |
133 | "LITERAL_PAT", | 133 | "LITERAL_PAT", |
134 | "MACRO_PAT", | 134 | "MACRO_PAT", |
135 | "CONST_BLOCK_PAT", | ||
135 | // atoms | 136 | // atoms |
136 | "TUPLE_EXPR", | 137 | "TUPLE_EXPR", |
137 | "ARRAY_EXPR", | 138 | "ARRAY_EXPR", |
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs index d07ad9420..d59b88131 100644 --- a/xtask/src/dist.rs +++ b/xtask/src/dist.rs | |||
@@ -70,7 +70,6 @@ fn dist_server() -> Result<()> { | |||
70 | let src = | 70 | let src = |
71 | Path::new("target").join(&target).join("release").join(format!("rust-analyzer{}", suffix)); | 71 | Path::new("target").join(&target).join("release").join(format!("rust-analyzer{}", suffix)); |
72 | let dst = Path::new("dist").join(format!("rust-analyzer-{}{}", target, suffix)); | 72 | let dst = Path::new("dist").join(format!("rust-analyzer-{}{}", target, suffix)); |
73 | cp(&src, &dst)?; | ||
74 | gzip(&src, &dst.with_extension("gz"))?; | 73 | gzip(&src, &dst.with_extension("gz"))?; |
75 | 74 | ||
76 | // FIXME: the old names are temporarily kept for client compatibility, but they should be removed | 75 | // FIXME: the old names are temporarily kept for client compatibility, but they should be removed |