diff options
-rw-r--r-- | .github/ISSUE_TEMPLATE/critical_nightly_regression.md | 17 | ||||
-rw-r--r-- | Cargo.lock | 33 | ||||
-rw-r--r-- | crates/flycheck/Cargo.toml | 1 | ||||
-rw-r--r-- | crates/flycheck/src/lib.rs | 68 | ||||
-rw-r--r-- | crates/hir_def/src/item_tree.rs | 7 | ||||
-rw-r--r-- | crates/hir_ty/Cargo.toml | 6 | ||||
-rw-r--r-- | crates/ide_assists/src/handlers/extract_function.rs | 36 | ||||
-rw-r--r-- | crates/rust-analyzer/src/config.rs | 9 | ||||
-rw-r--r-- | crates/rust-analyzer/src/diagnostics.rs | 1 | ||||
-rw-r--r-- | crates/rust-analyzer/src/diagnostics/to_proto.rs | 56 | ||||
-rw-r--r-- | crates/syntax/Cargo.toml | 2 | ||||
-rw-r--r-- | docs/dev/README.md | 22 | ||||
-rw-r--r-- | docs/user/generated_config.adoc | 6 | ||||
-rw-r--r-- | editors/code/package.json | 7 | ||||
-rw-r--r-- | xtask/src/release.rs | 39 | ||||
-rw-r--r-- | xtask/src/release/changelog.rs | 159 |
16 files changed, 346 insertions, 123 deletions
diff --git a/.github/ISSUE_TEMPLATE/critical_nightly_regression.md b/.github/ISSUE_TEMPLATE/critical_nightly_regression.md new file mode 100644 index 000000000..a0b1627d7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/critical_nightly_regression.md | |||
@@ -0,0 +1,17 @@ | |||
1 | --- | ||
2 | name: Critical Nightly Regression | ||
3 | about: You are using nightly rust-analyzer and the latest version is unusable. | ||
4 | title: '' | ||
5 | labels: '' | ||
6 | assignees: 'matklad' | ||
7 | |||
8 | --- | ||
9 | |||
10 | <!-- | ||
11 | Troubleshooting guide: https://rust-analyzer.github.io/manual.html#troubleshooting | ||
12 | |||
13 | Please try to provide information which will help us to fix the issue faster. Minimal reproducible examples with few dependencies are especially lovely <3. | ||
14 | --> | ||
15 | |||
16 | This is a serious regression in nightly and it's important to fix it before the next release. | ||
17 | @matklad, please take a look. | ||
diff --git a/Cargo.lock b/Cargo.lock index 72dcb9ba7..907973412 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -72,9 +72,9 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" | |||
72 | 72 | ||
73 | [[package]] | 73 | [[package]] |
74 | name = "backtrace" | 74 | name = "backtrace" |
75 | version = "0.3.56" | 75 | version = "0.3.57" |
76 | source = "registry+https://github.com/rust-lang/crates.io-index" | 76 | source = "registry+https://github.com/rust-lang/crates.io-index" |
77 | checksum = "9d117600f438b1707d4e4ae15d3595657288f8235a0eb593e80ecc98ab34e1bc" | 77 | checksum = "78ed203b9ba68b242c62b3fb7480f589dd49829be1edb3fe8fc8b4ffda2dcb8d" |
78 | dependencies = [ | 78 | dependencies = [ |
79 | "addr2line", | 79 | "addr2line", |
80 | "cfg-if", | 80 | "cfg-if", |
@@ -168,9 +168,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" | |||
168 | 168 | ||
169 | [[package]] | 169 | [[package]] |
170 | name = "chalk-derive" | 170 | name = "chalk-derive" |
171 | version = "0.60.0" | 171 | version = "0.64.0" |
172 | source = "registry+https://github.com/rust-lang/crates.io-index" | 172 | source = "registry+https://github.com/rust-lang/crates.io-index" |
173 | checksum = "ab0f74445d4fbeaf0217bc1d23978cc73b95b28e8a738b81894580dd646822d2" | 173 | checksum = "d9acf2a9eab79ae7d44cd77ad86a8b1569d7a5e6d9a7db4a0a57a7344dd82c24" |
174 | dependencies = [ | 174 | dependencies = [ |
175 | "proc-macro2", | 175 | "proc-macro2", |
176 | "quote", | 176 | "quote", |
@@ -180,9 +180,9 @@ dependencies = [ | |||
180 | 180 | ||
181 | [[package]] | 181 | [[package]] |
182 | name = "chalk-ir" | 182 | name = "chalk-ir" |
183 | version = "0.60.0" | 183 | version = "0.64.0" |
184 | source = "registry+https://github.com/rust-lang/crates.io-index" | 184 | source = "registry+https://github.com/rust-lang/crates.io-index" |
185 | checksum = "294b1fc6210a5b3bd06c1d01dda48a581e2cafec80b8d659139ce45456644be2" | 185 | checksum = "877661627f54ba3666a72943c43b326cb170d60899e50a8426111e7a657ff032" |
186 | dependencies = [ | 186 | dependencies = [ |
187 | "bitflags", | 187 | "bitflags", |
188 | "chalk-derive", | 188 | "chalk-derive", |
@@ -191,9 +191,9 @@ dependencies = [ | |||
191 | 191 | ||
192 | [[package]] | 192 | [[package]] |
193 | name = "chalk-recursive" | 193 | name = "chalk-recursive" |
194 | version = "0.60.0" | 194 | version = "0.64.0" |
195 | source = "registry+https://github.com/rust-lang/crates.io-index" | 195 | source = "registry+https://github.com/rust-lang/crates.io-index" |
196 | checksum = "1b9386936070be4545bfa22b094b7065af79aa2aeaccc945438f1c5ffe74c30a" | 196 | checksum = "072ffcf17243c2aa3e4b9ea6de3d29e7ef64cfdb0ceccaa431965070a1dc1475" |
197 | dependencies = [ | 197 | dependencies = [ |
198 | "chalk-derive", | 198 | "chalk-derive", |
199 | "chalk-ir", | 199 | "chalk-ir", |
@@ -204,9 +204,9 @@ dependencies = [ | |||
204 | 204 | ||
205 | [[package]] | 205 | [[package]] |
206 | name = "chalk-solve" | 206 | name = "chalk-solve" |
207 | version = "0.60.0" | 207 | version = "0.64.0" |
208 | source = "registry+https://github.com/rust-lang/crates.io-index" | 208 | source = "registry+https://github.com/rust-lang/crates.io-index" |
209 | checksum = "7c12a1ec7e850b50a049f27ef9cf5df3056bbd1acbb3eeb44d024e501a641f3a" | 209 | checksum = "97d4920c9ef2b26dd0b98ffdf070e27fa31e0b6f637463132083cee597e3d326" |
210 | dependencies = [ | 210 | dependencies = [ |
211 | "chalk-derive", | 211 | "chalk-derive", |
212 | "chalk-ir", | 212 | "chalk-ir", |
@@ -396,6 +396,7 @@ dependencies = [ | |||
396 | "crossbeam-channel", | 396 | "crossbeam-channel", |
397 | "jod-thread", | 397 | "jod-thread", |
398 | "log", | 398 | "log", |
399 | "serde", | ||
399 | "serde_json", | 400 | "serde_json", |
400 | "stdx", | 401 | "stdx", |
401 | "toolchain", | 402 | "toolchain", |
@@ -684,9 +685,9 @@ dependencies = [ | |||
684 | 685 | ||
685 | [[package]] | 686 | [[package]] |
686 | name = "idna" | 687 | name = "idna" |
687 | version = "0.2.2" | 688 | version = "0.2.3" |
688 | source = "registry+https://github.com/rust-lang/crates.io-index" | 689 | source = "registry+https://github.com/rust-lang/crates.io-index" |
689 | checksum = "89829a5d69c23d348314a7ac337fe39173b61149a9864deabd260983aed48c21" | 690 | checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" |
690 | dependencies = [ | 691 | dependencies = [ |
691 | "matches", | 692 | "matches", |
692 | "unicode-bidi", | 693 | "unicode-bidi", |
@@ -1293,9 +1294,9 @@ dependencies = [ | |||
1293 | 1294 | ||
1294 | [[package]] | 1295 | [[package]] |
1295 | name = "redox_syscall" | 1296 | name = "redox_syscall" |
1296 | version = "0.2.5" | 1297 | version = "0.2.6" |
1297 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1298 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1298 | checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9" | 1299 | checksum = "8270314b5ccceb518e7e578952f0b72b88222d02e8f77f5ecf7abbb673539041" |
1299 | dependencies = [ | 1300 | dependencies = [ |
1300 | "bitflags", | 1301 | "bitflags", |
1301 | ] | 1302 | ] |
@@ -1392,9 +1393,9 @@ dependencies = [ | |||
1392 | 1393 | ||
1393 | [[package]] | 1394 | [[package]] |
1394 | name = "rustc-ap-rustc_lexer" | 1395 | name = "rustc-ap-rustc_lexer" |
1395 | version = "714.0.0" | 1396 | version = "716.0.0" |
1396 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1397 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1397 | checksum = "a35856f140bed0dc7c7d6ba2134099d337377a3a4e11bfc79bccabf1fd4c9d42" | 1398 | checksum = "12eac7554c1d3f49f105f14d53c0f3402220e875983113562701d8e597a0995c" |
1398 | dependencies = [ | 1399 | dependencies = [ |
1399 | "unicode-xid", | 1400 | "unicode-xid", |
1400 | ] | 1401 | ] |
diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml index 2a1a21b28..18b9ce7df 100644 --- a/crates/flycheck/Cargo.toml +++ b/crates/flycheck/Cargo.toml | |||
@@ -13,6 +13,7 @@ doctest = false | |||
13 | crossbeam-channel = "0.5.0" | 13 | crossbeam-channel = "0.5.0" |
14 | log = "0.4.8" | 14 | log = "0.4.8" |
15 | cargo_metadata = "0.13" | 15 | cargo_metadata = "0.13" |
16 | serde = { version = "1.0.106", features = ["derive"] } | ||
16 | serde_json = "1.0.48" | 17 | serde_json = "1.0.48" |
17 | jod-thread = "0.1.1" | 18 | jod-thread = "0.1.1" |
18 | 19 | ||
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index e2a59497a..1682d8bde 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs | |||
@@ -4,13 +4,14 @@ | |||
4 | 4 | ||
5 | use std::{ | 5 | use std::{ |
6 | fmt, | 6 | fmt, |
7 | io::{self, BufReader}, | 7 | io::{self, BufRead, BufReader}, |
8 | path::PathBuf, | 8 | path::PathBuf, |
9 | process::{self, Command, Stdio}, | 9 | process::{self, Command, Stdio}, |
10 | time::Duration, | 10 | time::Duration, |
11 | }; | 11 | }; |
12 | 12 | ||
13 | use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; | 13 | use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; |
14 | use serde::Deserialize; | ||
14 | use stdx::JodChild; | 15 | use stdx::JodChild; |
15 | 16 | ||
16 | pub use cargo_metadata::diagnostic::{ | 17 | pub use cargo_metadata::diagnostic::{ |
@@ -128,7 +129,7 @@ struct FlycheckActor { | |||
128 | 129 | ||
129 | enum Event { | 130 | enum Event { |
130 | Restart(Restart), | 131 | Restart(Restart), |
131 | CheckEvent(Option<cargo_metadata::Message>), | 132 | CheckEvent(Option<CargoMessage>), |
132 | } | 133 | } |
133 | 134 | ||
134 | impl FlycheckActor { | 135 | impl FlycheckActor { |
@@ -180,21 +181,16 @@ impl FlycheckActor { | |||
180 | self.progress(Progress::DidFinish(res)); | 181 | self.progress(Progress::DidFinish(res)); |
181 | } | 182 | } |
182 | Event::CheckEvent(Some(message)) => match message { | 183 | Event::CheckEvent(Some(message)) => match message { |
183 | cargo_metadata::Message::CompilerArtifact(msg) => { | 184 | CargoMessage::CompilerArtifact(msg) => { |
184 | self.progress(Progress::DidCheckCrate(msg.target.name)); | 185 | self.progress(Progress::DidCheckCrate(msg.target.name)); |
185 | } | 186 | } |
186 | 187 | ||
187 | cargo_metadata::Message::CompilerMessage(msg) => { | 188 | CargoMessage::Diagnostic(msg) => { |
188 | self.send(Message::AddDiagnostic { | 189 | self.send(Message::AddDiagnostic { |
189 | workspace_root: self.workspace_root.clone(), | 190 | workspace_root: self.workspace_root.clone(), |
190 | diagnostic: msg.message, | 191 | diagnostic: msg, |
191 | }); | 192 | }); |
192 | } | 193 | } |
193 | |||
194 | cargo_metadata::Message::BuildScriptExecuted(_) | ||
195 | | cargo_metadata::Message::BuildFinished(_) | ||
196 | | cargo_metadata::Message::TextLine(_) | ||
197 | | _ => {} | ||
198 | }, | 194 | }, |
199 | } | 195 | } |
200 | } | 196 | } |
@@ -261,7 +257,7 @@ struct CargoHandle { | |||
261 | child: JodChild, | 257 | child: JodChild, |
262 | #[allow(unused)] | 258 | #[allow(unused)] |
263 | thread: jod_thread::JoinHandle<io::Result<bool>>, | 259 | thread: jod_thread::JoinHandle<io::Result<bool>>, |
264 | receiver: Receiver<cargo_metadata::Message>, | 260 | receiver: Receiver<CargoMessage>, |
265 | } | 261 | } |
266 | 262 | ||
267 | impl CargoHandle { | 263 | impl CargoHandle { |
@@ -294,14 +290,11 @@ impl CargoHandle { | |||
294 | 290 | ||
295 | struct CargoActor { | 291 | struct CargoActor { |
296 | child_stdout: process::ChildStdout, | 292 | child_stdout: process::ChildStdout, |
297 | sender: Sender<cargo_metadata::Message>, | 293 | sender: Sender<CargoMessage>, |
298 | } | 294 | } |
299 | 295 | ||
300 | impl CargoActor { | 296 | impl CargoActor { |
301 | fn new( | 297 | fn new(child_stdout: process::ChildStdout, sender: Sender<CargoMessage>) -> CargoActor { |
302 | child_stdout: process::ChildStdout, | ||
303 | sender: Sender<cargo_metadata::Message>, | ||
304 | ) -> CargoActor { | ||
305 | CargoActor { child_stdout, sender } | 298 | CargoActor { child_stdout, sender } |
306 | } | 299 | } |
307 | fn run(self) -> io::Result<bool> { | 300 | fn run(self) -> io::Result<bool> { |
@@ -315,7 +308,7 @@ impl CargoActor { | |||
315 | // erroneus output. | 308 | // erroneus output. |
316 | let stdout = BufReader::new(self.child_stdout); | 309 | let stdout = BufReader::new(self.child_stdout); |
317 | let mut read_at_least_one_message = false; | 310 | let mut read_at_least_one_message = false; |
318 | for message in cargo_metadata::Message::parse_stream(stdout) { | 311 | for message in stdout.lines() { |
319 | let message = match message { | 312 | let message = match message { |
320 | Ok(message) => message, | 313 | Ok(message) => message, |
321 | Err(err) => { | 314 | Err(err) => { |
@@ -326,13 +319,44 @@ impl CargoActor { | |||
326 | 319 | ||
327 | read_at_least_one_message = true; | 320 | read_at_least_one_message = true; |
328 | 321 | ||
329 | // Skip certain kinds of messages to only spend time on what's useful | 322 | // Try to deserialize a message from Cargo or Rustc. |
330 | match &message { | 323 | let mut deserializer = serde_json::Deserializer::from_str(&message); |
331 | cargo_metadata::Message::CompilerArtifact(artifact) if artifact.fresh => (), | 324 | deserializer.disable_recursion_limit(); |
332 | cargo_metadata::Message::BuildScriptExecuted(_) => (), | 325 | if let Ok(message) = JsonMessage::deserialize(&mut deserializer) { |
333 | _ => self.sender.send(message).unwrap(), | 326 | match message { |
327 | // Skip certain kinds of messages to only spend time on what's useful | ||
328 | JsonMessage::Cargo(message) => match message { | ||
329 | cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => { | ||
330 | self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap() | ||
331 | } | ||
332 | cargo_metadata::Message::CompilerMessage(msg) => { | ||
333 | self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap() | ||
334 | } | ||
335 | |||
336 | cargo_metadata::Message::CompilerArtifact(_) | ||
337 | | cargo_metadata::Message::BuildScriptExecuted(_) | ||
338 | | cargo_metadata::Message::BuildFinished(_) | ||
339 | | cargo_metadata::Message::TextLine(_) | ||
340 | | _ => (), | ||
341 | }, | ||
342 | JsonMessage::Rustc(message) => { | ||
343 | self.sender.send(CargoMessage::Diagnostic(message)).unwrap() | ||
344 | } | ||
345 | } | ||
334 | } | 346 | } |
335 | } | 347 | } |
336 | Ok(read_at_least_one_message) | 348 | Ok(read_at_least_one_message) |
337 | } | 349 | } |
338 | } | 350 | } |
351 | |||
352 | enum CargoMessage { | ||
353 | CompilerArtifact(cargo_metadata::Artifact), | ||
354 | Diagnostic(Diagnostic), | ||
355 | } | ||
356 | |||
357 | #[derive(Deserialize)] | ||
358 | #[serde(untagged)] | ||
359 | enum JsonMessage { | ||
360 | Cargo(cargo_metadata::Message), | ||
361 | Rustc(Diagnostic), | ||
362 | } | ||
diff --git a/crates/hir_def/src/item_tree.rs b/crates/hir_def/src/item_tree.rs index 16a94a058..eaeca01bd 100644 --- a/crates/hir_def/src/item_tree.rs +++ b/crates/hir_def/src/item_tree.rs | |||
@@ -196,13 +196,6 @@ impl ItemTree { | |||
196 | self.raw_attrs(of).clone().filter(db, krate) | 196 | self.raw_attrs(of).clone().filter(db, krate) |
197 | } | 197 | } |
198 | 198 | ||
199 | pub fn all_inner_items(&self) -> impl Iterator<Item = ModItem> + '_ { | ||
200 | match &self.data { | ||
201 | Some(data) => Some(data.inner_items.values().flatten().copied()).into_iter().flatten(), | ||
202 | None => None.into_iter().flatten(), | ||
203 | } | ||
204 | } | ||
205 | |||
206 | pub fn inner_items_of_block(&self, block: FileAstId<ast::BlockExpr>) -> &[ModItem] { | 199 | pub fn inner_items_of_block(&self, block: FileAstId<ast::BlockExpr>) -> &[ModItem] { |
207 | match &self.data { | 200 | match &self.data { |
208 | Some(data) => data.inner_items.get(&block).map(|it| &**it).unwrap_or(&[]), | 201 | Some(data) => data.inner_items.get(&block).map(|it| &**it).unwrap_or(&[]), |
diff --git a/crates/hir_ty/Cargo.toml b/crates/hir_ty/Cargo.toml index abc0e7532..66b3418f2 100644 --- a/crates/hir_ty/Cargo.toml +++ b/crates/hir_ty/Cargo.toml | |||
@@ -18,9 +18,9 @@ ena = "0.14.0" | |||
18 | log = "0.4.8" | 18 | log = "0.4.8" |
19 | rustc-hash = "1.1.0" | 19 | rustc-hash = "1.1.0" |
20 | scoped-tls = "1" | 20 | scoped-tls = "1" |
21 | chalk-solve = { version = "0.60", default-features = false } | 21 | chalk-solve = { version = "0.64", default-features = false } |
22 | chalk-ir = "0.60" | 22 | chalk-ir = "0.64" |
23 | chalk-recursive = "0.60" | 23 | chalk-recursive = "0.64" |
24 | la-arena = { version = "0.2.0", path = "../../lib/arena" } | 24 | la-arena = { version = "0.2.0", path = "../../lib/arena" } |
25 | 25 | ||
26 | stdx = { path = "../stdx", version = "0.0.0" } | 26 | stdx = { path = "../stdx", version = "0.0.0" } |
diff --git a/crates/ide_assists/src/handlers/extract_function.rs b/crates/ide_assists/src/handlers/extract_function.rs index 78a57fbdc..5f80a40c8 100644 --- a/crates/ide_assists/src/handlers/extract_function.rs +++ b/crates/ide_assists/src/handlers/extract_function.rs | |||
@@ -1227,9 +1227,19 @@ fn make_body( | |||
1227 | FunctionBody::Expr(expr) => { | 1227 | FunctionBody::Expr(expr) => { |
1228 | let expr = rewrite_body_segment(ctx, &fun.params, &handler, expr.syntax()); | 1228 | let expr = rewrite_body_segment(ctx, &fun.params, &handler, expr.syntax()); |
1229 | let expr = ast::Expr::cast(expr).unwrap(); | 1229 | let expr = ast::Expr::cast(expr).unwrap(); |
1230 | let expr = expr.dedent(old_indent).indent(IndentLevel(1)); | 1230 | match expr { |
1231 | ast::Expr::BlockExpr(block) => { | ||
1232 | // If the extracted expression is itself a block, there is no need to wrap it inside another block. | ||
1233 | let block = block.dedent(old_indent); | ||
1234 | // Recreate the block for formatting consistency with other extracted functions. | ||
1235 | make::block_expr(block.statements(), block.tail_expr()) | ||
1236 | } | ||
1237 | _ => { | ||
1238 | let expr = expr.dedent(old_indent).indent(IndentLevel(1)); | ||
1231 | 1239 | ||
1232 | make::block_expr(Vec::new(), Some(expr)) | 1240 | make::block_expr(Vec::new(), Some(expr)) |
1241 | } | ||
1242 | } | ||
1233 | } | 1243 | } |
1234 | FunctionBody::Span { parent, text_range } => { | 1244 | FunctionBody::Span { parent, text_range } => { |
1235 | let mut elements: Vec<_> = parent | 1245 | let mut elements: Vec<_> = parent |
@@ -1544,7 +1554,7 @@ fn foo() { | |||
1544 | } | 1554 | } |
1545 | 1555 | ||
1546 | fn $0fun_name() -> i32 { | 1556 | fn $0fun_name() -> i32 { |
1547 | { 1 + 1 } | 1557 | 1 + 1 |
1548 | }"#, | 1558 | }"#, |
1549 | ); | 1559 | ); |
1550 | } | 1560 | } |
@@ -2526,17 +2536,15 @@ fn foo() { | |||
2526 | } | 2536 | } |
2527 | 2537 | ||
2528 | fn $0fun_name(n: &mut i32) { | 2538 | fn $0fun_name(n: &mut i32) { |
2529 | { | 2539 | *n += *n; |
2530 | *n += *n; | 2540 | bar(*n); |
2531 | bar(*n); | 2541 | bar(*n+1); |
2532 | bar(*n+1); | 2542 | bar(*n**n); |
2533 | bar(*n**n); | 2543 | bar(&*n); |
2534 | bar(&*n); | 2544 | n.inc(); |
2535 | n.inc(); | 2545 | let v = n; |
2536 | let v = n; | 2546 | *v = v.succ(); |
2537 | *v = v.succ(); | 2547 | n.succ(); |
2538 | n.succ(); | ||
2539 | } | ||
2540 | }", | 2548 | }", |
2541 | ); | 2549 | ); |
2542 | } | 2550 | } |
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 7ddea22c8..1109d2daf 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs | |||
@@ -17,7 +17,7 @@ use ide_db::helpers::{ | |||
17 | }; | 17 | }; |
18 | use lsp_types::{ClientCapabilities, MarkupKind}; | 18 | use lsp_types::{ClientCapabilities, MarkupKind}; |
19 | use project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource}; | 19 | use project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource}; |
20 | use rustc_hash::FxHashSet; | 20 | use rustc_hash::{FxHashMap, FxHashSet}; |
21 | use serde::{de::DeserializeOwned, Deserialize}; | 21 | use serde::{de::DeserializeOwned, Deserialize}; |
22 | use vfs::AbsPathBuf; | 22 | use vfs::AbsPathBuf; |
23 | 23 | ||
@@ -99,6 +99,9 @@ config_data! { | |||
99 | diagnostics_enableExperimental: bool = "true", | 99 | diagnostics_enableExperimental: bool = "true", |
100 | /// List of rust-analyzer diagnostics to disable. | 100 | /// List of rust-analyzer diagnostics to disable. |
101 | diagnostics_disabled: FxHashSet<String> = "[]", | 101 | diagnostics_disabled: FxHashSet<String> = "[]", |
102 | /// Map of prefixes to be substituted when parsing diagnostic file paths. | ||
103 | /// This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. | ||
104 | diagnostics_remapPrefix: FxHashMap<String, String> = "{}", | ||
102 | /// List of warnings that should be displayed with info severity. | 105 | /// List of warnings that should be displayed with info severity. |
103 | /// | 106 | /// |
104 | /// The warnings will be indicated by a blue squiggly underline in code | 107 | /// The warnings will be indicated by a blue squiggly underline in code |
@@ -474,6 +477,7 @@ impl Config { | |||
474 | } | 477 | } |
475 | pub fn diagnostics_map(&self) -> DiagnosticsMapConfig { | 478 | pub fn diagnostics_map(&self) -> DiagnosticsMapConfig { |
476 | DiagnosticsMapConfig { | 479 | DiagnosticsMapConfig { |
480 | remap_prefix: self.data.diagnostics_remapPrefix.clone(), | ||
477 | warnings_as_info: self.data.diagnostics_warningsAsInfo.clone(), | 481 | warnings_as_info: self.data.diagnostics_warningsAsInfo.clone(), |
478 | warnings_as_hint: self.data.diagnostics_warningsAsHint.clone(), | 482 | warnings_as_hint: self.data.diagnostics_warningsAsHint.clone(), |
479 | } | 483 | } |
@@ -835,6 +839,9 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json | |||
835 | "items": { "type": "string" }, | 839 | "items": { "type": "string" }, |
836 | "uniqueItems": true, | 840 | "uniqueItems": true, |
837 | }, | 841 | }, |
842 | "FxHashMap<String, String>" => set! { | ||
843 | "type": "object", | ||
844 | }, | ||
838 | "Option<usize>" => set! { | 845 | "Option<usize>" => set! { |
839 | "type": ["null", "integer"], | 846 | "type": ["null", "integer"], |
840 | "minimum": 0, | 847 | "minimum": 0, |
diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index f01548c50..d4b9db362 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs | |||
@@ -12,6 +12,7 @@ pub(crate) type CheckFixes = Arc<FxHashMap<FileId, Vec<Fix>>>; | |||
12 | 12 | ||
13 | #[derive(Debug, Default, Clone)] | 13 | #[derive(Debug, Default, Clone)] |
14 | pub struct DiagnosticsMapConfig { | 14 | pub struct DiagnosticsMapConfig { |
15 | pub remap_prefix: FxHashMap<String, String>, | ||
15 | pub warnings_as_info: Vec<String>, | 16 | pub warnings_as_info: Vec<String>, |
16 | pub warnings_as_hint: Vec<String>, | 17 | pub warnings_as_hint: Vec<String>, |
17 | } | 18 | } |
diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index ca18997e4..82dd0da9a 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs | |||
@@ -1,6 +1,9 @@ | |||
1 | //! This module provides the functionality needed to convert diagnostics from | 1 | //! This module provides the functionality needed to convert diagnostics from |
2 | //! `cargo check` json format to the LSP diagnostic format. | 2 | //! `cargo check` json format to the LSP diagnostic format. |
3 | use std::{collections::HashMap, path::Path}; | 3 | use std::{ |
4 | collections::HashMap, | ||
5 | path::{Path, PathBuf}, | ||
6 | }; | ||
4 | 7 | ||
5 | use flycheck::{DiagnosticLevel, DiagnosticSpan}; | 8 | use flycheck::{DiagnosticLevel, DiagnosticSpan}; |
6 | use stdx::format_to; | 9 | use stdx::format_to; |
@@ -41,8 +44,12 @@ fn is_dummy_macro_file(file_name: &str) -> bool { | |||
41 | } | 44 | } |
42 | 45 | ||
43 | /// Converts a Rust span to a LSP location | 46 | /// Converts a Rust span to a LSP location |
44 | fn location(workspace_root: &Path, span: &DiagnosticSpan) -> lsp_types::Location { | 47 | fn location( |
45 | let file_name = workspace_root.join(&span.file_name); | 48 | config: &DiagnosticsMapConfig, |
49 | workspace_root: &Path, | ||
50 | span: &DiagnosticSpan, | ||
51 | ) -> lsp_types::Location { | ||
52 | let file_name = resolve_path(config, workspace_root, &span.file_name); | ||
46 | let uri = url_from_abs_path(&file_name); | 53 | let uri = url_from_abs_path(&file_name); |
47 | 54 | ||
48 | // FIXME: this doesn't handle UTF16 offsets correctly | 55 | // FIXME: this doesn't handle UTF16 offsets correctly |
@@ -58,32 +65,50 @@ fn location(workspace_root: &Path, span: &DiagnosticSpan) -> lsp_types::Location | |||
58 | /// | 65 | /// |
59 | /// This takes locations pointing into the standard library, or generally outside the current | 66 | /// This takes locations pointing into the standard library, or generally outside the current |
60 | /// workspace into account and tries to avoid those, in case macros are involved. | 67 | /// workspace into account and tries to avoid those, in case macros are involved. |
61 | fn primary_location(workspace_root: &Path, span: &DiagnosticSpan) -> lsp_types::Location { | 68 | fn primary_location( |
69 | config: &DiagnosticsMapConfig, | ||
70 | workspace_root: &Path, | ||
71 | span: &DiagnosticSpan, | ||
72 | ) -> lsp_types::Location { | ||
62 | let span_stack = std::iter::successors(Some(span), |span| Some(&span.expansion.as_ref()?.span)); | 73 | let span_stack = std::iter::successors(Some(span), |span| Some(&span.expansion.as_ref()?.span)); |
63 | for span in span_stack.clone() { | 74 | for span in span_stack.clone() { |
64 | let abs_path = workspace_root.join(&span.file_name); | 75 | let abs_path = resolve_path(config, workspace_root, &span.file_name); |
65 | if !is_dummy_macro_file(&span.file_name) && abs_path.starts_with(workspace_root) { | 76 | if !is_dummy_macro_file(&span.file_name) && abs_path.starts_with(workspace_root) { |
66 | return location(workspace_root, span); | 77 | return location(config, workspace_root, span); |
67 | } | 78 | } |
68 | } | 79 | } |
69 | 80 | ||
70 | // Fall back to the outermost macro invocation if no suitable span comes up. | 81 | // Fall back to the outermost macro invocation if no suitable span comes up. |
71 | let last_span = span_stack.last().unwrap(); | 82 | let last_span = span_stack.last().unwrap(); |
72 | location(workspace_root, last_span) | 83 | location(config, workspace_root, last_span) |
73 | } | 84 | } |
74 | 85 | ||
75 | /// Converts a secondary Rust span to a LSP related information | 86 | /// Converts a secondary Rust span to a LSP related information |
76 | /// | 87 | /// |
77 | /// If the span is unlabelled this will return `None`. | 88 | /// If the span is unlabelled this will return `None`. |
78 | fn diagnostic_related_information( | 89 | fn diagnostic_related_information( |
90 | config: &DiagnosticsMapConfig, | ||
79 | workspace_root: &Path, | 91 | workspace_root: &Path, |
80 | span: &DiagnosticSpan, | 92 | span: &DiagnosticSpan, |
81 | ) -> Option<lsp_types::DiagnosticRelatedInformation> { | 93 | ) -> Option<lsp_types::DiagnosticRelatedInformation> { |
82 | let message = span.label.clone()?; | 94 | let message = span.label.clone()?; |
83 | let location = location(workspace_root, span); | 95 | let location = location(config, workspace_root, span); |
84 | Some(lsp_types::DiagnosticRelatedInformation { location, message }) | 96 | Some(lsp_types::DiagnosticRelatedInformation { location, message }) |
85 | } | 97 | } |
86 | 98 | ||
99 | /// Resolves paths applying any matching path prefix remappings, and then | ||
100 | /// joining the path to the workspace root. | ||
101 | fn resolve_path(config: &DiagnosticsMapConfig, workspace_root: &Path, file_name: &str) -> PathBuf { | ||
102 | match config | ||
103 | .remap_prefix | ||
104 | .iter() | ||
105 | .find_map(|(from, to)| file_name.strip_prefix(from).map(|file_name| (to, file_name))) | ||
106 | { | ||
107 | Some((to, file_name)) => workspace_root.join(format!("{}{}", to, file_name)), | ||
108 | None => workspace_root.join(file_name), | ||
109 | } | ||
110 | } | ||
111 | |||
87 | struct SubDiagnostic { | 112 | struct SubDiagnostic { |
88 | related: lsp_types::DiagnosticRelatedInformation, | 113 | related: lsp_types::DiagnosticRelatedInformation, |
89 | suggested_fix: Option<lsp_ext::CodeAction>, | 114 | suggested_fix: Option<lsp_ext::CodeAction>, |
@@ -95,6 +120,7 @@ enum MappedRustChildDiagnostic { | |||
95 | } | 120 | } |
96 | 121 | ||
97 | fn map_rust_child_diagnostic( | 122 | fn map_rust_child_diagnostic( |
123 | config: &DiagnosticsMapConfig, | ||
98 | workspace_root: &Path, | 124 | workspace_root: &Path, |
99 | rd: &flycheck::Diagnostic, | 125 | rd: &flycheck::Diagnostic, |
100 | ) -> MappedRustChildDiagnostic { | 126 | ) -> MappedRustChildDiagnostic { |
@@ -108,7 +134,7 @@ fn map_rust_child_diagnostic( | |||
108 | let mut edit_map: HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = HashMap::new(); | 134 | let mut edit_map: HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = HashMap::new(); |
109 | for &span in &spans { | 135 | for &span in &spans { |
110 | if let Some(suggested_replacement) = &span.suggested_replacement { | 136 | if let Some(suggested_replacement) = &span.suggested_replacement { |
111 | let location = location(workspace_root, span); | 137 | let location = location(config, workspace_root, span); |
112 | let edit = lsp_types::TextEdit::new(location.range, suggested_replacement.clone()); | 138 | let edit = lsp_types::TextEdit::new(location.range, suggested_replacement.clone()); |
113 | edit_map.entry(location.uri).or_default().push(edit); | 139 | edit_map.entry(location.uri).or_default().push(edit); |
114 | } | 140 | } |
@@ -117,7 +143,7 @@ fn map_rust_child_diagnostic( | |||
117 | if edit_map.is_empty() { | 143 | if edit_map.is_empty() { |
118 | MappedRustChildDiagnostic::SubDiagnostic(SubDiagnostic { | 144 | MappedRustChildDiagnostic::SubDiagnostic(SubDiagnostic { |
119 | related: lsp_types::DiagnosticRelatedInformation { | 145 | related: lsp_types::DiagnosticRelatedInformation { |
120 | location: location(workspace_root, spans[0]), | 146 | location: location(config, workspace_root, spans[0]), |
121 | message: rd.message.clone(), | 147 | message: rd.message.clone(), |
122 | }, | 148 | }, |
123 | suggested_fix: None, | 149 | suggested_fix: None, |
@@ -125,7 +151,7 @@ fn map_rust_child_diagnostic( | |||
125 | } else { | 151 | } else { |
126 | MappedRustChildDiagnostic::SubDiagnostic(SubDiagnostic { | 152 | MappedRustChildDiagnostic::SubDiagnostic(SubDiagnostic { |
127 | related: lsp_types::DiagnosticRelatedInformation { | 153 | related: lsp_types::DiagnosticRelatedInformation { |
128 | location: location(workspace_root, spans[0]), | 154 | location: location(config, workspace_root, spans[0]), |
129 | message: rd.message.clone(), | 155 | message: rd.message.clone(), |
130 | }, | 156 | }, |
131 | suggested_fix: Some(lsp_ext::CodeAction { | 157 | suggested_fix: Some(lsp_ext::CodeAction { |
@@ -190,7 +216,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
190 | let mut tags = Vec::new(); | 216 | let mut tags = Vec::new(); |
191 | 217 | ||
192 | for secondary_span in rd.spans.iter().filter(|s| !s.is_primary) { | 218 | for secondary_span in rd.spans.iter().filter(|s| !s.is_primary) { |
193 | let related = diagnostic_related_information(workspace_root, secondary_span); | 219 | let related = diagnostic_related_information(config, workspace_root, secondary_span); |
194 | if let Some(related) = related { | 220 | if let Some(related) = related { |
195 | subdiagnostics.push(SubDiagnostic { related, suggested_fix: None }); | 221 | subdiagnostics.push(SubDiagnostic { related, suggested_fix: None }); |
196 | } | 222 | } |
@@ -198,7 +224,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
198 | 224 | ||
199 | let mut message = rd.message.clone(); | 225 | let mut message = rd.message.clone(); |
200 | for child in &rd.children { | 226 | for child in &rd.children { |
201 | let child = map_rust_child_diagnostic(workspace_root, &child); | 227 | let child = map_rust_child_diagnostic(config, workspace_root, &child); |
202 | match child { | 228 | match child { |
203 | MappedRustChildDiagnostic::SubDiagnostic(sub) => { | 229 | MappedRustChildDiagnostic::SubDiagnostic(sub) => { |
204 | subdiagnostics.push(sub); | 230 | subdiagnostics.push(sub); |
@@ -242,7 +268,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
242 | primary_spans | 268 | primary_spans |
243 | .iter() | 269 | .iter() |
244 | .flat_map(|primary_span| { | 270 | .flat_map(|primary_span| { |
245 | let primary_location = primary_location(workspace_root, &primary_span); | 271 | let primary_location = primary_location(config, workspace_root, &primary_span); |
246 | 272 | ||
247 | let mut message = message.clone(); | 273 | let mut message = message.clone(); |
248 | if needs_primary_span_label { | 274 | if needs_primary_span_label { |
@@ -272,7 +298,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
272 | // generated that code. | 298 | // generated that code. |
273 | let is_in_macro_call = i != 0; | 299 | let is_in_macro_call = i != 0; |
274 | 300 | ||
275 | let secondary_location = location(workspace_root, &span); | 301 | let secondary_location = location(config, workspace_root, &span); |
276 | if secondary_location == primary_location { | 302 | if secondary_location == primary_location { |
277 | continue; | 303 | continue; |
278 | } | 304 | } |
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index a8c1a8075..556f80882 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml | |||
@@ -14,7 +14,7 @@ doctest = false | |||
14 | cov-mark = { version = "1.1", features = ["thread-local"] } | 14 | cov-mark = { version = "1.1", features = ["thread-local"] } |
15 | itertools = "0.10.0" | 15 | itertools = "0.10.0" |
16 | rowan = "=0.13.0-pre.3" | 16 | rowan = "=0.13.0-pre.3" |
17 | rustc_lexer = { version = "714.0.0", package = "rustc-ap-rustc_lexer" } | 17 | rustc_lexer = { version = "716.0.0", package = "rustc-ap-rustc_lexer" } |
18 | rustc-hash = "1.1.0" | 18 | rustc-hash = "1.1.0" |
19 | arrayvec = "0.7" | 19 | arrayvec = "0.7" |
20 | once_cell = "1.3.1" | 20 | once_cell = "1.3.1" |
diff --git a/docs/dev/README.md b/docs/dev/README.md index 7e4488a41..16b23adc6 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md | |||
@@ -208,20 +208,26 @@ Release process is handled by `release`, `dist` and `promote` xtasks, `release` | |||
208 | 208 | ||
209 | Additionally, it assumes that remote for `rust-analyzer` is called `upstream` (I use `origin` to point to my fork). | 209 | Additionally, it assumes that remote for `rust-analyzer` is called `upstream` (I use `origin` to point to my fork). |
210 | 210 | ||
211 | `release` calls the GitHub API calls to scrape pull request comments and categorize them in the changelog. | ||
212 | This step uses the `curl` and `jq` applications, which need to be available in `PATH`. | ||
213 | Finally, you need to obtain a GitHub personal access token and set the `GITHUB_TOKEN` environment variable. | ||
214 | |||
211 | Release steps: | 215 | Release steps: |
212 | 216 | ||
213 | 1. Inside rust-analyzer, run `cargo xtask release`. This will: | 217 | 1. Set the `GITHUB_TOKEN` environment variable. |
218 | 2. Inside rust-analyzer, run `cargo xtask release`. This will: | ||
214 | * checkout the `release` branch | 219 | * checkout the `release` branch |
215 | * reset it to `upstream/nightly` | 220 | * reset it to `upstream/nightly` |
216 | * push it to `upstream`. This triggers GitHub Actions which: | 221 | * push it to `upstream`. This triggers GitHub Actions which: |
217 | * runs `cargo xtask dist` to package binaries and VS Code extension | 222 | * runs `cargo xtask dist` to package binaries and VS Code extension |
218 | * makes a GitHub release | 223 | * makes a GitHub release |
219 | * pushes VS Code extension to the marketplace | 224 | * pushes VS Code extension to the marketplace |
220 | * create new changelog in `rust-analyzer.github.io` | 225 | * call the GitHub API for PR details |
221 | 2. While the release is in progress, fill in the changelog | 226 | * create a new changelog in `rust-analyzer.github.io` |
222 | 3. Commit & push the changelog | 227 | 3. While the release is in progress, fill in the changelog |
223 | 4. Tweet | 228 | 4. Commit & push the changelog |
224 | 5. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's submodule. | 229 | 5. Tweet |
230 | 6. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's submodule. | ||
225 | Self-approve the PR. | 231 | Self-approve the PR. |
226 | 232 | ||
227 | If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console. | 233 | If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console. |
@@ -229,7 +235,11 @@ If it fails because of something that needs to be fixed, remove the release tag | |||
229 | Make sure to remove the new changelog post created when running `cargo xtask release` a second time. | 235 | Make sure to remove the new changelog post created when running `cargo xtask release` a second time. |
230 | 236 | ||
231 | We release "nightly" every night automatically and promote the latest nightly to "stable" manually, every week. | 237 | We release "nightly" every night automatically and promote the latest nightly to "stable" manually, every week. |
238 | |||
232 | We don't do "patch" releases, unless something truly egregious comes up. | 239 | We don't do "patch" releases, unless something truly egregious comes up. |
240 | To do a patch release, cherry-pick the fix on top of the current `release` branch and push the branch. | ||
241 | There's no need to write a changelog for a patch release, it's OK to include the notes about the fix into the next weekly one. | ||
242 | Note: we tag releases by dates, releasing a patch release on the same day should work (by overwriting a tag), but I am not 100% sure. | ||
233 | 243 | ||
234 | ## Permissions | 244 | ## Permissions |
235 | 245 | ||
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index e0ee35b4e..e28423e99 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc | |||
@@ -147,6 +147,12 @@ have more false positives than usual. | |||
147 | -- | 147 | -- |
148 | List of rust-analyzer diagnostics to disable. | 148 | List of rust-analyzer diagnostics to disable. |
149 | -- | 149 | -- |
150 | [[rust-analyzer.diagnostics.remapPrefix]]rust-analyzer.diagnostics.remapPrefix (default: `{}`):: | ||
151 | + | ||
152 | -- | ||
153 | Map of prefixes to be substituted when parsing diagnostic file paths. | ||
154 | This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. | ||
155 | -- | ||
150 | [[rust-analyzer.diagnostics.warningsAsHint]]rust-analyzer.diagnostics.warningsAsHint (default: `[]`):: | 156 | [[rust-analyzer.diagnostics.warningsAsHint]]rust-analyzer.diagnostics.warningsAsHint (default: `[]`):: |
151 | + | 157 | + |
152 | -- | 158 | -- |
diff --git a/editors/code/package.json b/editors/code/package.json index 06ed62d8d..fa5632f90 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -565,6 +565,11 @@ | |||
565 | }, | 565 | }, |
566 | "uniqueItems": true | 566 | "uniqueItems": true |
567 | }, | 567 | }, |
568 | "rust-analyzer.diagnostics.remapPrefix": { | ||
569 | "markdownDescription": "Map of prefixes to be substituted when parsing diagnostic file paths.\nThis should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.", | ||
570 | "default": {}, | ||
571 | "type": "object" | ||
572 | }, | ||
568 | "rust-analyzer.diagnostics.warningsAsHint": { | 573 | "rust-analyzer.diagnostics.warningsAsHint": { |
569 | "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code\nand a blue icon in the `Problems Panel`.", | 574 | "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code\nand a blue icon in the `Problems Panel`.", |
570 | "default": [], | 575 | "default": [], |
@@ -1195,4 +1200,4 @@ | |||
1195 | ] | 1200 | ] |
1196 | } | 1201 | } |
1197 | } | 1202 | } |
1198 | } \ No newline at end of file | 1203 | } |
diff --git a/xtask/src/release.rs b/xtask/src/release.rs index dde5d14ee..22bb50467 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use std::fmt::Write; | 1 | mod changelog; |
2 | 2 | ||
3 | use xshell::{cmd, cp, pushd, read_dir, write_file}; | 3 | use xshell::{cmd, cp, pushd, read_dir, write_file}; |
4 | 4 | ||
@@ -38,42 +38,7 @@ impl flags::Release { | |||
38 | let tags = cmd!("git tag --list").read()?; | 38 | let tags = cmd!("git tag --list").read()?; |
39 | let prev_tag = tags.lines().filter(|line| is_release_tag(line)).last().unwrap(); | 39 | let prev_tag = tags.lines().filter(|line| is_release_tag(line)).last().unwrap(); |
40 | 40 | ||
41 | let git_log = cmd!("git log {prev_tag}..HEAD --merges --reverse").read()?; | 41 | let contents = changelog::get_changelog(changelog_n, &commit, prev_tag, &today)?; |
42 | let mut git_log_summary = String::new(); | ||
43 | for line in git_log.lines() { | ||
44 | let line = line.trim_start(); | ||
45 | if let Some(p) = line.find(':') { | ||
46 | if let Ok(pr) = line[..p].parse::<u32>() { | ||
47 | writeln!(git_log_summary, "* pr:{}[]{}", pr, &line[p + 1..]).unwrap(); | ||
48 | } | ||
49 | } | ||
50 | } | ||
51 | |||
52 | let contents = format!( | ||
53 | "\ | ||
54 | = Changelog #{} | ||
55 | :sectanchors: | ||
56 | :page-layout: post | ||
57 | |||
58 | Commit: commit:{}[] + | ||
59 | Release: release:{}[] | ||
60 | |||
61 | == Sponsors | ||
62 | |||
63 | **Become a sponsor:** On https://opencollective.com/rust-analyzer/[OpenCollective] or | ||
64 | https://github.com/sponsors/rust-analyzer[GitHub Sponsors]. | ||
65 | |||
66 | == New Features | ||
67 | |||
68 | {} | ||
69 | |||
70 | == Fixes | ||
71 | |||
72 | == Internal Improvements | ||
73 | ", | ||
74 | changelog_n, commit, today, git_log_summary | ||
75 | ); | ||
76 | |||
77 | let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n)); | 42 | let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n)); |
78 | write_file(&path, &contents)?; | 43 | write_file(&path, &contents)?; |
79 | 44 | ||
diff --git a/xtask/src/release/changelog.rs b/xtask/src/release/changelog.rs new file mode 100644 index 000000000..ffcae2cf7 --- /dev/null +++ b/xtask/src/release/changelog.rs | |||
@@ -0,0 +1,159 @@ | |||
1 | use std::fmt::Write; | ||
2 | use std::{env, iter}; | ||
3 | |||
4 | use anyhow::{bail, Result}; | ||
5 | use xshell::cmd; | ||
6 | |||
7 | pub(crate) fn get_changelog( | ||
8 | changelog_n: usize, | ||
9 | commit: &str, | ||
10 | prev_tag: &str, | ||
11 | today: &str, | ||
12 | ) -> Result<String> { | ||
13 | let git_log = cmd!("git log {prev_tag}..HEAD --merges --reverse").read()?; | ||
14 | let mut features = String::new(); | ||
15 | let mut fixes = String::new(); | ||
16 | let mut internal = String::new(); | ||
17 | let mut others = String::new(); | ||
18 | for line in git_log.lines() { | ||
19 | let line = line.trim_start(); | ||
20 | if let Some(p) = line.find(':') { | ||
21 | let pr = &line[..p]; | ||
22 | if let Ok(pr_num) = pr.parse::<u32>() { | ||
23 | let accept = "Accept: application/vnd.github.v3+json"; | ||
24 | let token = match env::var("GITHUB_TOKEN") { | ||
25 | Ok(token) => token, | ||
26 | Err(_) => bail!("Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."), | ||
27 | }; | ||
28 | let authorization = format!("Authorization: token {}", token); | ||
29 | let pr_url = "https://api.github.com/repos/rust-analyzer/rust-analyzer/issues"; | ||
30 | |||
31 | // we don't use an HTTPS client or JSON parser to keep the build times low | ||
32 | let pr_json = | ||
33 | cmd!("curl -s -H {accept} -H {authorization} {pr_url}/{pr}").read()?; | ||
34 | let pr_title = cmd!("jq .title").stdin(&pr_json).read()?; | ||
35 | let pr_title = unescape(&pr_title[1..pr_title.len() - 1]); | ||
36 | let pr_comment = cmd!("jq .body").stdin(pr_json).read()?; | ||
37 | |||
38 | let comments_json = | ||
39 | cmd!("curl -s -H {accept} -H {authorization} {pr_url}/{pr}/comments").read()?; | ||
40 | let pr_comments = cmd!("jq .[].body").stdin(comments_json).read()?; | ||
41 | |||
42 | let l = iter::once(pr_comment.as_str()) | ||
43 | .chain(pr_comments.lines()) | ||
44 | .rev() | ||
45 | .find_map(|it| { | ||
46 | let it = unescape(&it[1..it.len() - 1]); | ||
47 | it.lines().find_map(parse_changelog_line) | ||
48 | }) | ||
49 | .into_iter() | ||
50 | .next() | ||
51 | .unwrap_or_else(|| parse_title_line(&pr_title)); | ||
52 | let s = match l.kind { | ||
53 | PrKind::Feature => &mut features, | ||
54 | PrKind::Fix => &mut fixes, | ||
55 | PrKind::Internal => &mut internal, | ||
56 | PrKind::Other => &mut others, | ||
57 | PrKind::Skip => continue, | ||
58 | }; | ||
59 | writeln!(s, "* pr:{}[] {}", pr_num, l.message.as_deref().unwrap_or(&pr_title)) | ||
60 | .unwrap(); | ||
61 | } | ||
62 | } | ||
63 | } | ||
64 | |||
65 | let contents = format!( | ||
66 | "\ | ||
67 | = Changelog #{} | ||
68 | :sectanchors: | ||
69 | :page-layout: post | ||
70 | |||
71 | Commit: commit:{}[] + | ||
72 | Release: release:{}[] | ||
73 | |||
74 | == Sponsors | ||
75 | |||
76 | **Become a sponsor:** On https://opencollective.com/rust-analyzer/[OpenCollective] or | ||
77 | https://github.com/sponsors/rust-analyzer[GitHub Sponsors]. | ||
78 | |||
79 | == New Features | ||
80 | |||
81 | {} | ||
82 | |||
83 | == Fixes | ||
84 | |||
85 | {} | ||
86 | |||
87 | == Internal Improvements | ||
88 | |||
89 | {} | ||
90 | |||
91 | == Others | ||
92 | |||
93 | {} | ||
94 | ", | ||
95 | changelog_n, commit, today, features, fixes, internal, others | ||
96 | ); | ||
97 | Ok(contents) | ||
98 | } | ||
99 | |||
100 | #[derive(Clone, Copy)] | ||
101 | enum PrKind { | ||
102 | Feature, | ||
103 | Fix, | ||
104 | Internal, | ||
105 | Other, | ||
106 | Skip, | ||
107 | } | ||
108 | |||
109 | struct PrInfo { | ||
110 | message: Option<String>, | ||
111 | kind: PrKind, | ||
112 | } | ||
113 | |||
114 | fn unescape(s: &str) -> String { | ||
115 | s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "") | ||
116 | } | ||
117 | |||
118 | fn parse_changelog_line(s: &str) -> Option<PrInfo> { | ||
119 | let parts = s.splitn(3, ' ').collect::<Vec<_>>(); | ||
120 | if parts.len() < 2 || parts[0] != "changelog" { | ||
121 | return None; | ||
122 | } | ||
123 | let message = parts.get(2).map(|it| it.to_string()); | ||
124 | let kind = match parts[1].trim_end_matches(':') { | ||
125 | "feature" => PrKind::Feature, | ||
126 | "fix" => PrKind::Fix, | ||
127 | "internal" => PrKind::Internal, | ||
128 | "skip" => PrKind::Skip, | ||
129 | _ => { | ||
130 | let kind = PrKind::Other; | ||
131 | let message = format!("{} {}", parts[1], message.unwrap_or_default()); | ||
132 | return Some(PrInfo { kind, message: Some(message) }); | ||
133 | } | ||
134 | }; | ||
135 | let res = PrInfo { kind, message }; | ||
136 | Some(res) | ||
137 | } | ||
138 | |||
139 | fn parse_title_line(s: &str) -> PrInfo { | ||
140 | let lower = s.to_ascii_lowercase(); | ||
141 | const PREFIXES: [(&str, PrKind); 5] = [ | ||
142 | ("feat: ", PrKind::Feature), | ||
143 | ("feature: ", PrKind::Feature), | ||
144 | ("fix: ", PrKind::Fix), | ||
145 | ("internal: ", PrKind::Internal), | ||
146 | ("minor: ", PrKind::Skip), | ||
147 | ]; | ||
148 | |||
149 | for &(prefix, kind) in &PREFIXES { | ||
150 | if lower.starts_with(prefix) { | ||
151 | let message = match &kind { | ||
152 | PrKind::Skip => None, | ||
153 | _ => Some(s[prefix.len()..].to_string()), | ||
154 | }; | ||
155 | return PrInfo { kind, message }; | ||
156 | } | ||
157 | } | ||
158 | PrInfo { kind: PrKind::Other, message: Some(s.to_string()) } | ||
159 | } | ||