diff options
-rw-r--r-- | crates/ra_ide/src/snapshots/highlight_doctest.html | 5 | ||||
-rw-r--r-- | crates/ra_ide/src/ssr.rs | 1 | ||||
-rw-r--r-- | crates/ra_ide/src/syntax_highlighting/injection.rs | 12 | ||||
-rw-r--r-- | crates/ra_ide/src/syntax_highlighting/tests.rs | 3 | ||||
-rw-r--r-- | crates/ra_ssr/src/lib.rs | 18 | ||||
-rw-r--r-- | crates/ra_ssr/src/matching.rs | 105 | ||||
-rw-r--r-- | crates/ra_ssr/src/replacing.rs | 10 | ||||
-rw-r--r-- | crates/ra_ssr/src/tests.rs | 53 | ||||
-rw-r--r-- | docs/user/manual.adoc | 4 | ||||
-rw-r--r-- | editors/code/package.json | 2 | ||||
-rw-r--r-- | editors/code/src/main.ts | 25 | ||||
-rw-r--r-- | editors/code/src/net.ts | 88 | ||||
-rw-r--r-- | editors/code/src/util.ts | 1 |
13 files changed, 259 insertions, 68 deletions
diff --git a/crates/ra_ide/src/snapshots/highlight_doctest.html b/crates/ra_ide/src/snapshots/highlight_doctest.html index 63199cdbe..ac546806e 100644 --- a/crates/ra_ide/src/snapshots/highlight_doctest.html +++ b/crates/ra_ide/src/snapshots/highlight_doctest.html | |||
@@ -32,7 +32,10 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
32 | .keyword.unsafe { color: #BC8383; font-weight: bold; } | 32 | .keyword.unsafe { color: #BC8383; font-weight: bold; } |
33 | .control { font-style: italic; } | 33 | .control { font-style: italic; } |
34 | </style> | 34 | </style> |
35 | <pre><code><span class="keyword">struct</span> <span class="struct declaration">Foo</span> { | 35 | <pre><code><span class="comment documentation">/// ```</span> |
36 | <span class="comment documentation">/// </span><span class="keyword">let</span> _ = <span class="string_literal">"early doctests should not go boom"</span>; | ||
37 | <span class="comment documentation">/// ```</span> | ||
38 | <span class="keyword">struct</span> <span class="struct declaration">Foo</span> { | ||
36 | <span class="field declaration">bar</span>: <span class="builtin_type">bool</span>, | 39 | <span class="field declaration">bar</span>: <span class="builtin_type">bool</span>, |
37 | } | 40 | } |
38 | 41 | ||
diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs index 59c230f6c..03f18c617 100644 --- a/crates/ra_ide/src/ssr.rs +++ b/crates/ra_ide/src/ssr.rs | |||
@@ -9,6 +9,7 @@ use ra_ssr::{MatchFinder, SsrError, SsrRule}; | |||
9 | // Search and replace with named wildcards that will match any expression, type, path, pattern or item. | 9 | // Search and replace with named wildcards that will match any expression, type, path, pattern or item. |
10 | // The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`. | 10 | // The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`. |
11 | // A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. | 11 | // A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. |
12 | // Within a macro call, a placeholder will match up until whatever token follows the placeholder. | ||
12 | // Available via the command `rust-analyzer.ssr`. | 13 | // Available via the command `rust-analyzer.ssr`. |
13 | // | 14 | // |
14 | // ```rust | 15 | // ```rust |
diff --git a/crates/ra_ide/src/syntax_highlighting/injection.rs b/crates/ra_ide/src/syntax_highlighting/injection.rs index 415f24a6d..9d82b4009 100644 --- a/crates/ra_ide/src/syntax_highlighting/injection.rs +++ b/crates/ra_ide/src/syntax_highlighting/injection.rs | |||
@@ -155,17 +155,21 @@ pub(super) fn highlight_doc_comment( | |||
155 | let mut start_offset = None; | 155 | let mut start_offset = None; |
156 | let mut end_offset = None; | 156 | let mut end_offset = None; |
157 | for (line_start, orig_line_start) in range_mapping.range(..h.range.end()).rev() { | 157 | for (line_start, orig_line_start) in range_mapping.range(..h.range.end()).rev() { |
158 | // It's possible for orig_line_start - line_start to be negative. Add h.range.start() | ||
159 | // here and remove it from the end range after the loop below so that the values are | ||
160 | // always non-negative. | ||
161 | let offset = h.range.start() + orig_line_start - line_start; | ||
158 | if line_start <= &h.range.start() { | 162 | if line_start <= &h.range.start() { |
159 | start_offset.get_or_insert(orig_line_start - line_start); | 163 | start_offset.get_or_insert(offset); |
160 | break; | 164 | break; |
161 | } else { | 165 | } else { |
162 | end_offset.get_or_insert(orig_line_start - line_start); | 166 | end_offset.get_or_insert(offset); |
163 | } | 167 | } |
164 | } | 168 | } |
165 | if let Some(start_offset) = start_offset { | 169 | if let Some(start_offset) = start_offset { |
166 | h.range = TextRange::new( | 170 | h.range = TextRange::new( |
167 | h.range.start() + start_offset, | 171 | start_offset, |
168 | h.range.end() + end_offset.unwrap_or(start_offset), | 172 | h.range.end() + end_offset.unwrap_or(start_offset) - h.range.start(), |
169 | ); | 173 | ); |
170 | 174 | ||
171 | stack.add(h); | 175 | stack.add(h); |
diff --git a/crates/ra_ide/src/syntax_highlighting/tests.rs b/crates/ra_ide/src/syntax_highlighting/tests.rs index 93a276ffe..b1f48f03b 100644 --- a/crates/ra_ide/src/syntax_highlighting/tests.rs +++ b/crates/ra_ide/src/syntax_highlighting/tests.rs | |||
@@ -291,6 +291,9 @@ fn main() { | |||
291 | fn test_highlight_doctest() { | 291 | fn test_highlight_doctest() { |
292 | check_highlighting( | 292 | check_highlighting( |
293 | r#" | 293 | r#" |
294 | /// ``` | ||
295 | /// let _ = "early doctests should not go boom"; | ||
296 | /// ``` | ||
294 | struct Foo { | 297 | struct Foo { |
295 | bar: bool, | 298 | bar: bool, |
296 | } | 299 | } |
diff --git a/crates/ra_ssr/src/lib.rs b/crates/ra_ssr/src/lib.rs index fc716ae82..da26ee669 100644 --- a/crates/ra_ssr/src/lib.rs +++ b/crates/ra_ssr/src/lib.rs | |||
@@ -91,14 +91,16 @@ impl<'db> MatchFinder<'db> { | |||
91 | if let Ok(mut m) = matching::get_match(false, rule, &code, restrict_range, &self.sema) { | 91 | if let Ok(mut m) = matching::get_match(false, rule, &code, restrict_range, &self.sema) { |
92 | // Continue searching in each of our placeholders. | 92 | // Continue searching in each of our placeholders. |
93 | for placeholder_value in m.placeholder_values.values_mut() { | 93 | for placeholder_value in m.placeholder_values.values_mut() { |
94 | // Don't search our placeholder if it's the entire matched node, otherwise we'd | 94 | if let Some(placeholder_node) = &placeholder_value.node { |
95 | // find the same match over and over until we got a stack overflow. | 95 | // Don't search our placeholder if it's the entire matched node, otherwise we'd |
96 | if placeholder_value.node != *code { | 96 | // find the same match over and over until we got a stack overflow. |
97 | self.find_matches( | 97 | if placeholder_node != code { |
98 | &placeholder_value.node, | 98 | self.find_matches( |
99 | restrict_range, | 99 | placeholder_node, |
100 | &mut placeholder_value.inner_matches, | 100 | restrict_range, |
101 | ); | 101 | &mut placeholder_value.inner_matches, |
102 | ); | ||
103 | } | ||
102 | } | 104 | } |
103 | } | 105 | } |
104 | matches_out.matches.push(m); | 106 | matches_out.matches.push(m); |
diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs index 265b6d793..bdaba9f1b 100644 --- a/crates/ra_ssr/src/matching.rs +++ b/crates/ra_ssr/src/matching.rs | |||
@@ -61,8 +61,9 @@ pub(crate) struct Var(pub String); | |||
61 | /// Information about a placeholder bound in a match. | 61 | /// Information about a placeholder bound in a match. |
62 | #[derive(Debug)] | 62 | #[derive(Debug)] |
63 | pub(crate) struct PlaceholderMatch { | 63 | pub(crate) struct PlaceholderMatch { |
64 | /// The node that the placeholder matched to. | 64 | /// The node that the placeholder matched to. If set, then we'll search for further matches |
65 | pub(crate) node: SyntaxNode, | 65 | /// within this node. It isn't set when we match tokens within a macro call's token tree. |
66 | pub(crate) node: Option<SyntaxNode>, | ||
66 | pub(crate) range: FileRange, | 67 | pub(crate) range: FileRange, |
67 | /// More matches, found within `node`. | 68 | /// More matches, found within `node`. |
68 | pub(crate) inner_matches: SsrMatches, | 69 | pub(crate) inner_matches: SsrMatches, |
@@ -195,6 +196,7 @@ impl<'db, 'sema> MatchState<'db, 'sema> { | |||
195 | SyntaxKind::RECORD_FIELD_LIST => { | 196 | SyntaxKind::RECORD_FIELD_LIST => { |
196 | self.attempt_match_record_field_list(match_inputs, pattern, code) | 197 | self.attempt_match_record_field_list(match_inputs, pattern, code) |
197 | } | 198 | } |
199 | SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(match_inputs, pattern, code), | ||
198 | _ => self.attempt_match_node_children(match_inputs, pattern, code), | 200 | _ => self.attempt_match_node_children(match_inputs, pattern, code), |
199 | } | 201 | } |
200 | } | 202 | } |
@@ -340,6 +342,90 @@ impl<'db, 'sema> MatchState<'db, 'sema> { | |||
340 | Ok(()) | 342 | Ok(()) |
341 | } | 343 | } |
342 | 344 | ||
345 | /// Outside of token trees, a placeholder can only match a single AST node, whereas in a token | ||
346 | /// tree it can match a sequence of tokens. | ||
347 | fn attempt_match_token_tree( | ||
348 | &mut self, | ||
349 | match_inputs: &MatchInputs, | ||
350 | pattern: &SyntaxNode, | ||
351 | code: &ra_syntax::SyntaxNode, | ||
352 | ) -> Result<(), MatchFailed> { | ||
353 | let mut pattern = PatternIterator::new(pattern).peekable(); | ||
354 | let mut children = code.children_with_tokens(); | ||
355 | while let Some(child) = children.next() { | ||
356 | if let Some(placeholder) = pattern.peek().and_then(|p| match_inputs.get_placeholder(p)) | ||
357 | { | ||
358 | pattern.next(); | ||
359 | let next_pattern_token = pattern | ||
360 | .peek() | ||
361 | .and_then(|p| match p { | ||
362 | SyntaxElement::Token(t) => Some(t.clone()), | ||
363 | SyntaxElement::Node(n) => n.first_token(), | ||
364 | }) | ||
365 | .map(|p| p.text().to_string()); | ||
366 | let first_matched_token = child.clone(); | ||
367 | let mut last_matched_token = child; | ||
368 | // Read code tokens util we reach one equal to the next token from our pattern | ||
369 | // or we reach the end of the token tree. | ||
370 | while let Some(next) = children.next() { | ||
371 | match &next { | ||
372 | SyntaxElement::Token(t) => { | ||
373 | if Some(t.to_string()) == next_pattern_token { | ||
374 | pattern.next(); | ||
375 | break; | ||
376 | } | ||
377 | } | ||
378 | SyntaxElement::Node(n) => { | ||
379 | if let Some(first_token) = n.first_token() { | ||
380 | if Some(first_token.to_string()) == next_pattern_token { | ||
381 | if let Some(SyntaxElement::Node(p)) = pattern.next() { | ||
382 | // We have a subtree that starts with the next token in our pattern. | ||
383 | self.attempt_match_token_tree(match_inputs, &p, &n)?; | ||
384 | break; | ||
385 | } | ||
386 | } | ||
387 | } | ||
388 | } | ||
389 | }; | ||
390 | last_matched_token = next; | ||
391 | } | ||
392 | if let Some(match_out) = &mut self.match_out { | ||
393 | match_out.placeholder_values.insert( | ||
394 | Var(placeholder.ident.to_string()), | ||
395 | PlaceholderMatch::from_range(FileRange { | ||
396 | file_id: self.sema.original_range(code).file_id, | ||
397 | range: first_matched_token | ||
398 | .text_range() | ||
399 | .cover(last_matched_token.text_range()), | ||
400 | }), | ||
401 | ); | ||
402 | } | ||
403 | continue; | ||
404 | } | ||
405 | // Match literal (non-placeholder) tokens. | ||
406 | match child { | ||
407 | SyntaxElement::Token(token) => { | ||
408 | self.attempt_match_token(&mut pattern, &token)?; | ||
409 | } | ||
410 | SyntaxElement::Node(node) => match pattern.next() { | ||
411 | Some(SyntaxElement::Node(p)) => { | ||
412 | self.attempt_match_token_tree(match_inputs, &p, &node)?; | ||
413 | } | ||
414 | Some(SyntaxElement::Token(p)) => fail_match!( | ||
415 | "Pattern has token '{}', code has subtree '{}'", | ||
416 | p.text(), | ||
417 | node.text() | ||
418 | ), | ||
419 | None => fail_match!("Pattern has nothing, code has '{}'", node.text()), | ||
420 | }, | ||
421 | } | ||
422 | } | ||
423 | if let Some(p) = pattern.next() { | ||
424 | fail_match!("Reached end of token tree in code, but pattern still has {:?}", p); | ||
425 | } | ||
426 | Ok(()) | ||
427 | } | ||
428 | |||
343 | fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> { | 429 | fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> { |
344 | loop { | 430 | loop { |
345 | let c = code_it.next(); | 431 | let c = code_it.next(); |
@@ -399,7 +485,11 @@ fn recording_match_fail_reasons() -> bool { | |||
399 | 485 | ||
400 | impl PlaceholderMatch { | 486 | impl PlaceholderMatch { |
401 | fn new(node: &SyntaxNode, range: FileRange) -> Self { | 487 | fn new(node: &SyntaxNode, range: FileRange) -> Self { |
402 | Self { node: node.clone(), range, inner_matches: SsrMatches::default() } | 488 | Self { node: Some(node.clone()), range, inner_matches: SsrMatches::default() } |
489 | } | ||
490 | |||
491 | fn from_range(range: FileRange) -> Self { | ||
492 | Self { node: None, range, inner_matches: SsrMatches::default() } | ||
403 | } | 493 | } |
404 | } | 494 | } |
405 | 495 | ||
@@ -484,7 +574,14 @@ mod tests { | |||
484 | assert_eq!(matches.matches.len(), 1); | 574 | assert_eq!(matches.matches.len(), 1); |
485 | assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); | 575 | assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); |
486 | assert_eq!(matches.matches[0].placeholder_values.len(), 1); | 576 | assert_eq!(matches.matches[0].placeholder_values.len(), 1); |
487 | assert_eq!(matches.matches[0].placeholder_values[&Var("x".to_string())].node.text(), "1+2"); | 577 | assert_eq!( |
578 | matches.matches[0].placeholder_values[&Var("x".to_string())] | ||
579 | .node | ||
580 | .as_ref() | ||
581 | .unwrap() | ||
582 | .text(), | ||
583 | "1+2" | ||
584 | ); | ||
488 | 585 | ||
489 | let edit = crate::replacing::matches_to_edit(&matches); | 586 | let edit = crate::replacing::matches_to_edit(&matches); |
490 | let mut after = input.to_string(); | 587 | let mut after = input.to_string(); |
diff --git a/crates/ra_ssr/src/replacing.rs b/crates/ra_ssr/src/replacing.rs index 81a5e06a9..5dcde82a2 100644 --- a/crates/ra_ssr/src/replacing.rs +++ b/crates/ra_ssr/src/replacing.rs | |||
@@ -24,6 +24,7 @@ fn matches_to_edit_at_offset(matches: &SsrMatches, relative_start: TextSize) -> | |||
24 | 24 | ||
25 | fn render_replace(match_info: &Match) -> String { | 25 | fn render_replace(match_info: &Match) -> String { |
26 | let mut out = String::new(); | 26 | let mut out = String::new(); |
27 | let match_start = match_info.matched_node.text_range().start(); | ||
27 | for r in &match_info.template.tokens { | 28 | for r in &match_info.template.tokens { |
28 | match r { | 29 | match r { |
29 | PatternElement::Token(t) => out.push_str(t.text.as_str()), | 30 | PatternElement::Token(t) => out.push_str(t.text.as_str()), |
@@ -32,7 +33,14 @@ fn render_replace(match_info: &Match) -> String { | |||
32 | match_info.placeholder_values.get(&Var(p.ident.to_string())) | 33 | match_info.placeholder_values.get(&Var(p.ident.to_string())) |
33 | { | 34 | { |
34 | let range = &placeholder_value.range.range; | 35 | let range = &placeholder_value.range.range; |
35 | let mut matched_text = placeholder_value.node.text().to_string(); | 36 | let mut matched_text = if let Some(node) = &placeholder_value.node { |
37 | node.text().to_string() | ||
38 | } else { | ||
39 | let relative_range = range.checked_sub(match_start).unwrap(); | ||
40 | match_info.matched_node.text().to_string() | ||
41 | [usize::from(relative_range.start())..usize::from(relative_range.end())] | ||
42 | .to_string() | ||
43 | }; | ||
36 | let edit = | 44 | let edit = |
37 | matches_to_edit_at_offset(&placeholder_value.inner_matches, range.start()); | 45 | matches_to_edit_at_offset(&placeholder_value.inner_matches, range.start()); |
38 | edit.apply(&mut matched_text); | 46 | edit.apply(&mut matched_text); |
diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs index 4b747fe18..3ee1e74e9 100644 --- a/crates/ra_ssr/src/tests.rs +++ b/crates/ra_ssr/src/tests.rs | |||
@@ -427,6 +427,45 @@ fn match_reordered_struct_instantiation() { | |||
427 | } | 427 | } |
428 | 428 | ||
429 | #[test] | 429 | #[test] |
430 | fn match_macro_invocation() { | ||
431 | assert_matches("foo!($a)", "fn() {foo(foo!(foo()))}", &["foo!(foo())"]); | ||
432 | assert_matches("foo!(41, $a, 43)", "fn() {foo!(41, 42, 43)}", &["foo!(41, 42, 43)"]); | ||
433 | assert_no_match("foo!(50, $a, 43)", "fn() {foo!(41, 42, 43}"); | ||
434 | assert_no_match("foo!(41, $a, 50)", "fn() {foo!(41, 42, 43}"); | ||
435 | assert_matches("foo!($a())", "fn() {foo!(bar())}", &["foo!(bar())"]); | ||
436 | } | ||
437 | |||
438 | // When matching within a macro expansion, we only allow matches of nodes that originated from | ||
439 | // the macro call, not from the macro definition. | ||
440 | #[test] | ||
441 | fn no_match_expression_from_macro() { | ||
442 | assert_no_match( | ||
443 | "$a.clone()", | ||
444 | r#" | ||
445 | macro_rules! m1 { | ||
446 | () => {42.clone()} | ||
447 | } | ||
448 | fn f1() {m1!()} | ||
449 | "#, | ||
450 | ); | ||
451 | } | ||
452 | |||
453 | // We definitely don't want to allow matching of an expression that part originates from the | ||
454 | // macro call `42` and part from the macro definition `.clone()`. | ||
455 | #[test] | ||
456 | fn no_match_split_expression() { | ||
457 | assert_no_match( | ||
458 | "$a.clone()", | ||
459 | r#" | ||
460 | macro_rules! m1 { | ||
461 | ($x:expr) => {$x.clone()} | ||
462 | } | ||
463 | fn f1() {m1!(42)} | ||
464 | "#, | ||
465 | ); | ||
466 | } | ||
467 | |||
468 | #[test] | ||
430 | fn replace_function_call() { | 469 | fn replace_function_call() { |
431 | assert_ssr_transform("foo() ==>> bar()", "fn f1() {foo(); foo();}", "fn f1() {bar(); bar();}"); | 470 | assert_ssr_transform("foo() ==>> bar()", "fn f1() {foo(); foo();}", "fn f1() {bar(); bar();}"); |
432 | } | 471 | } |
@@ -468,6 +507,20 @@ fn replace_struct_init() { | |||
468 | } | 507 | } |
469 | 508 | ||
470 | #[test] | 509 | #[test] |
510 | fn replace_macro_invocations() { | ||
511 | assert_ssr_transform( | ||
512 | "try!($a) ==>> $a?", | ||
513 | "fn f1() -> Result<(), E> {bar(try!(foo()));}", | ||
514 | "fn f1() -> Result<(), E> {bar(foo()?);}", | ||
515 | ); | ||
516 | assert_ssr_transform( | ||
517 | "foo!($a($b)) ==>> foo($b, $a)", | ||
518 | "fn f1() {foo!(abc(def() + 2));}", | ||
519 | "fn f1() {foo(def() + 2, abc);}", | ||
520 | ); | ||
521 | } | ||
522 | |||
523 | #[test] | ||
471 | fn replace_binary_op() { | 524 | fn replace_binary_op() { |
472 | assert_ssr_transform( | 525 | assert_ssr_transform( |
473 | "$a + $b ==>> $b + $a", | 526 | "$a + $b ==>> $b + $a", |
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index ea714f49a..f1b7ed7fc 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc | |||
@@ -269,6 +269,10 @@ Gnome Builder currently has support for RLS, and there's no way to configure the | |||
269 | 1. Rename, symlink or copy the `rust-analyzer` binary to `rls` and place it somewhere Builder can find (in `PATH`, or under `~/.cargo/bin`). | 269 | 1. Rename, symlink or copy the `rust-analyzer` binary to `rls` and place it somewhere Builder can find (in `PATH`, or under `~/.cargo/bin`). |
270 | 2. Enable the Rust Builder plugin. | 270 | 2. Enable the Rust Builder plugin. |
271 | 271 | ||
272 | ==== GNOME Builder (Nightly) | ||
273 | |||
274 | https://nightly.gnome.org/repo/appstream/org.gnome.Builder.flatpakref[GNOME Builder (Nightly)] has now native support for `rust-analyzer` out of the box. If the `rust-analyzer` binary is not available, GNOME Builder can install it when opening a Rust source file. | ||
275 | |||
272 | == Non-Cargo Based Projects | 276 | == Non-Cargo Based Projects |
273 | 277 | ||
274 | rust-analyzer does not require Cargo. | 278 | rust-analyzer does not require Cargo. |
diff --git a/editors/code/package.json b/editors/code/package.json index e6ceb235f..68484a370 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -426,7 +426,7 @@ | |||
426 | "Full log" | 426 | "Full log" |
427 | ], | 427 | ], |
428 | "default": "off", | 428 | "default": "off", |
429 | "description": "Trace requests to the rust-analyzer" | 429 | "description": "Trace requests to the rust-analyzer (this is usually overly verbose and not recommended for regular users)" |
430 | }, | 430 | }, |
431 | "rust-analyzer.trace.extension": { | 431 | "rust-analyzer.trace.extension": { |
432 | "description": "Enable logging of VS Code extensions itself", | 432 | "description": "Enable logging of VS Code extensions itself", |
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 670f2ebfd..12b4d0510 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts | |||
@@ -43,12 +43,16 @@ export async function activate(context: vscode.ExtensionContext) { | |||
43 | const config = new Config(context); | 43 | const config = new Config(context); |
44 | const state = new PersistentState(context.globalState); | 44 | const state = new PersistentState(context.globalState); |
45 | const serverPath = await bootstrap(config, state).catch(err => { | 45 | const serverPath = await bootstrap(config, state).catch(err => { |
46 | let message = "Failed to bootstrap rust-analyzer."; | 46 | let message = "bootstrap error. "; |
47 | |||
47 | if (err.code === "EBUSY" || err.code === "ETXTBSY") { | 48 | if (err.code === "EBUSY" || err.code === "ETXTBSY") { |
48 | message += " Other vscode windows might be using rust-analyzer, " + | 49 | message += "Other vscode windows might be using rust-analyzer, "; |
49 | "you should close them and reload this window to retry."; | 50 | message += "you should close them and reload this window to retry. "; |
50 | } | 51 | } |
51 | message += " Open \"Help > Toggle Developer Tools > Console\" to see the logs"; | 52 | |
53 | message += 'Open "Help > Toggle Developer Tools > Console" to see the logs '; | ||
54 | message += '(enable verbose logs with "rust-analyzer.trace.extension")'; | ||
55 | |||
52 | log.error("Bootstrap error", err); | 56 | log.error("Bootstrap error", err); |
53 | throw new Error(message); | 57 | throw new Error(message); |
54 | }); | 58 | }); |
@@ -178,7 +182,11 @@ async function bootstrapExtension(config: Config, state: PersistentState): Promi | |||
178 | assert(!!artifact, `Bad release: ${JSON.stringify(release)}`); | 182 | assert(!!artifact, `Bad release: ${JSON.stringify(release)}`); |
179 | 183 | ||
180 | const dest = path.join(config.globalStoragePath, "rust-analyzer.vsix"); | 184 | const dest = path.join(config.globalStoragePath, "rust-analyzer.vsix"); |
181 | await download(artifact.browser_download_url, dest, "Downloading rust-analyzer extension"); | 185 | await download({ |
186 | url: artifact.browser_download_url, | ||
187 | dest, | ||
188 | progressTitle: "Downloading rust-analyzer extension", | ||
189 | }); | ||
182 | 190 | ||
183 | await vscode.commands.executeCommand("workbench.extensions.installExtension", vscode.Uri.file(dest)); | 191 | await vscode.commands.executeCommand("workbench.extensions.installExtension", vscode.Uri.file(dest)); |
184 | await fs.unlink(dest); | 192 | await fs.unlink(dest); |
@@ -299,7 +307,12 @@ async function getServer(config: Config, state: PersistentState): Promise<string | |||
299 | if (err.code !== "ENOENT") throw err; | 307 | if (err.code !== "ENOENT") throw err; |
300 | }); | 308 | }); |
301 | 309 | ||
302 | await download(artifact.browser_download_url, dest, "Downloading rust-analyzer server", { mode: 0o755 }); | 310 | await download({ |
311 | url: artifact.browser_download_url, | ||
312 | dest, | ||
313 | progressTitle: "Downloading rust-analyzer server", | ||
314 | mode: 0o755 | ||
315 | }); | ||
303 | 316 | ||
304 | // Patching executable if that's NixOS. | 317 | // Patching executable if that's NixOS. |
305 | if (await fs.stat("/etc/nixos").then(_ => true).catch(_ => false)) { | 318 | if (await fs.stat("/etc/nixos").then(_ => true).catch(_ => false)) { |
diff --git a/editors/code/src/net.ts b/editors/code/src/net.ts index 9debdc57b..e02fd6d4f 100644 --- a/editors/code/src/net.ts +++ b/editors/code/src/net.ts | |||
@@ -60,32 +60,40 @@ export interface GithubRelease { | |||
60 | }>; | 60 | }>; |
61 | } | 61 | } |
62 | 62 | ||
63 | interface DownloadOpts { | ||
64 | progressTitle: string; | ||
65 | url: string; | ||
66 | dest: string; | ||
67 | mode?: number; | ||
68 | } | ||
63 | 69 | ||
64 | export async function download( | 70 | export async function download(opts: DownloadOpts) { |
65 | downloadUrl: string, | 71 | // Put the artifact into a temporary folder to prevent partially downloaded files when user kills vscode |
66 | destinationPath: string, | 72 | await withTempDir(async tempDir => { |
67 | progressTitle: string, | 73 | const tempFile = path.join(tempDir, path.basename(opts.dest)); |
68 | { mode }: { mode?: number } = {}, | 74 | |
69 | ) { | 75 | await vscode.window.withProgress( |
70 | await vscode.window.withProgress( | 76 | { |
71 | { | 77 | location: vscode.ProgressLocation.Notification, |
72 | location: vscode.ProgressLocation.Notification, | 78 | cancellable: false, |
73 | cancellable: false, | 79 | title: opts.progressTitle |
74 | title: progressTitle | 80 | }, |
75 | }, | 81 | async (progress, _cancellationToken) => { |
76 | async (progress, _cancellationToken) => { | 82 | let lastPercentage = 0; |
77 | let lastPercentage = 0; | 83 | await downloadFile(opts.url, tempFile, opts.mode, (readBytes, totalBytes) => { |
78 | await downloadFile(downloadUrl, destinationPath, mode, (readBytes, totalBytes) => { | 84 | const newPercentage = (readBytes / totalBytes) * 100; |
79 | const newPercentage = (readBytes / totalBytes) * 100; | 85 | progress.report({ |
80 | progress.report({ | 86 | message: newPercentage.toFixed(0) + "%", |
81 | message: newPercentage.toFixed(0) + "%", | 87 | increment: newPercentage - lastPercentage |
82 | increment: newPercentage - lastPercentage | 88 | }); |
89 | |||
90 | lastPercentage = newPercentage; | ||
83 | }); | 91 | }); |
92 | } | ||
93 | ); | ||
84 | 94 | ||
85 | lastPercentage = newPercentage; | 95 | await moveFile(tempFile, opts.dest); |
86 | }); | 96 | }); |
87 | } | ||
88 | ); | ||
89 | } | 97 | } |
90 | 98 | ||
91 | /** | 99 | /** |
@@ -114,28 +122,23 @@ async function downloadFile( | |||
114 | 122 | ||
115 | log.debug("Downloading file of", totalBytes, "bytes size from", url, "to", destFilePath); | 123 | log.debug("Downloading file of", totalBytes, "bytes size from", url, "to", destFilePath); |
116 | 124 | ||
117 | // Put the artifact into a temporary folder to prevent partially downloaded files when user kills vscode | 125 | let readBytes = 0; |
118 | await withTempFile(async tempFilePath => { | 126 | res.body.on("data", (chunk: Buffer) => { |
119 | const destFileStream = fs.createWriteStream(tempFilePath, { mode }); | 127 | readBytes += chunk.length; |
120 | 128 | onProgress(readBytes, totalBytes); | |
121 | let readBytes = 0; | 129 | }); |
122 | res.body.on("data", (chunk: Buffer) => { | ||
123 | readBytes += chunk.length; | ||
124 | onProgress(readBytes, totalBytes); | ||
125 | }); | ||
126 | 130 | ||
127 | await pipeline(res.body, destFileStream); | 131 | const destFileStream = fs.createWriteStream(destFilePath, { mode }); |
128 | await new Promise<void>(resolve => { | 132 | await pipeline(res.body, destFileStream); |
129 | destFileStream.on("close", resolve); | 133 | await new Promise<void>(resolve => { |
130 | destFileStream.destroy(); | 134 | destFileStream.on("close", resolve); |
131 | // This workaround is awaiting to be removed when vscode moves to newer nodejs version: | 135 | destFileStream.destroy(); |
132 | // https://github.com/rust-analyzer/rust-analyzer/issues/3167 | 136 | // This workaround is awaiting to be removed when vscode moves to newer nodejs version: |
133 | }); | 137 | // https://github.com/rust-analyzer/rust-analyzer/issues/3167 |
134 | await moveFile(tempFilePath, destFilePath); | ||
135 | }); | 138 | }); |
136 | } | 139 | } |
137 | 140 | ||
138 | async function withTempFile(scope: (tempFilePath: string) => Promise<void>) { | 141 | async function withTempDir(scope: (tempDirPath: string) => Promise<void>) { |
139 | // Based on the great article: https://advancedweb.hu/secure-tempfiles-in-nodejs-without-dependencies/ | 142 | // Based on the great article: https://advancedweb.hu/secure-tempfiles-in-nodejs-without-dependencies/ |
140 | 143 | ||
141 | // `.realpath()` should handle the cases where os.tmpdir() contains symlinks | 144 | // `.realpath()` should handle the cases where os.tmpdir() contains symlinks |
@@ -144,7 +147,7 @@ async function withTempFile(scope: (tempFilePath: string) => Promise<void>) { | |||
144 | const tempDir = await fs.promises.mkdtemp(path.join(osTempDir, "rust-analyzer")); | 147 | const tempDir = await fs.promises.mkdtemp(path.join(osTempDir, "rust-analyzer")); |
145 | 148 | ||
146 | try { | 149 | try { |
147 | return await scope(path.join(tempDir, "file")); | 150 | return await scope(tempDir); |
148 | } finally { | 151 | } finally { |
149 | // We are good citizens :D | 152 | // We are good citizens :D |
150 | void fs.promises.rmdir(tempDir, { recursive: true }).catch(log.error); | 153 | void fs.promises.rmdir(tempDir, { recursive: true }).catch(log.error); |
@@ -161,6 +164,7 @@ async function moveFile(src: fs.PathLike, dest: fs.PathLike) { | |||
161 | await fs.promises.unlink(src); | 164 | await fs.promises.unlink(src); |
162 | } else { | 165 | } else { |
163 | log.error(`Failed to rename the file ${src} -> ${dest}`, err); | 166 | log.error(`Failed to rename the file ${src} -> ${dest}`, err); |
167 | throw err; | ||
164 | } | 168 | } |
165 | } | 169 | } |
166 | } | 170 | } |
diff --git a/editors/code/src/util.ts b/editors/code/src/util.ts index fe3fb71cd..fec4c3295 100644 --- a/editors/code/src/util.ts +++ b/editors/code/src/util.ts | |||
@@ -26,7 +26,6 @@ export const log = new class { | |||
26 | } | 26 | } |
27 | 27 | ||
28 | error(message?: any, ...optionalParams: any[]): void { | 28 | error(message?: any, ...optionalParams: any[]): void { |
29 | if (!log.enabled) return; | ||
30 | debugger; | 29 | debugger; |
31 | // eslint-disable-next-line no-console | 30 | // eslint-disable-next-line no-console |
32 | console.error(message, ...optionalParams); | 31 | console.error(message, ...optionalParams); |