diff options
-rw-r--r-- | Cargo.lock | 4 | ||||
-rw-r--r-- | crates/hir/src/code_model.rs | 36 | ||||
-rw-r--r-- | crates/ide/src/completion/complete_postfix.rs | 12 | ||||
-rw-r--r-- | crates/ide/src/completion/completion_context.rs | 10 | ||||
-rw-r--r-- | crates/ide/src/completion/presentation.rs | 123 | ||||
-rw-r--r-- | crates/rust-analyzer/Cargo.toml | 2 | ||||
-rw-r--r-- | crates/rust-analyzer/src/caps.rs | 14 | ||||
-rw-r--r-- | editors/code/package.json | 9 | ||||
-rw-r--r-- | editors/code/src/main.ts | 98 | ||||
-rw-r--r-- | editors/code/src/net.ts | 18 | ||||
-rw-r--r-- | editors/code/src/persistent_state.ts | 11 |
11 files changed, 300 insertions, 37 deletions
diff --git a/Cargo.lock b/Cargo.lock index cf88c4e17..7fecee1b5 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -775,9 +775,9 @@ dependencies = [ | |||
775 | 775 | ||
776 | [[package]] | 776 | [[package]] |
777 | name = "lsp-types" | 777 | name = "lsp-types" |
778 | version = "0.81.0" | 778 | version = "0.82.0" |
779 | source = "registry+https://github.com/rust-lang/crates.io-index" | 779 | source = "registry+https://github.com/rust-lang/crates.io-index" |
780 | checksum = "5e02724627e9ef8ba91f461ebc01d48aebbd13a4b7c9dc547a0a2890f53e2171" | 780 | checksum = "db895abb8527cf59e3de893ab2acf52cf904faeb65e60ea6f373e11fe86464e8" |
781 | dependencies = [ | 781 | dependencies = [ |
782 | "base64", | 782 | "base64", |
783 | "bitflags", | 783 | "bitflags", |
diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs index 849c8f6d0..a2a166e0a 100644 --- a/crates/hir/src/code_model.rs +++ b/crates/hir/src/code_model.rs | |||
@@ -709,11 +709,23 @@ impl Function { | |||
709 | } | 709 | } |
710 | 710 | ||
711 | pub fn params(self, db: &dyn HirDatabase) -> Vec<Param> { | 711 | pub fn params(self, db: &dyn HirDatabase) -> Vec<Param> { |
712 | let resolver = self.id.resolver(db.upcast()); | ||
713 | let ctx = hir_ty::TyLoweringContext::new(db, &resolver); | ||
714 | let environment = TraitEnvironment::lower(db, &resolver); | ||
712 | db.function_data(self.id) | 715 | db.function_data(self.id) |
713 | .params | 716 | .params |
714 | .iter() | 717 | .iter() |
715 | .skip(if self.self_param(db).is_some() { 1 } else { 0 }) | 718 | .skip(if self.self_param(db).is_some() { 1 } else { 0 }) |
716 | .map(|_| Param { _ty: () }) | 719 | .map(|type_ref| { |
720 | let ty = Type { | ||
721 | krate: self.id.lookup(db.upcast()).container.module(db.upcast()).krate, | ||
722 | ty: InEnvironment { | ||
723 | value: Ty::from_hir_ext(&ctx, type_ref).0, | ||
724 | environment: environment.clone(), | ||
725 | }, | ||
726 | }; | ||
727 | Param { ty } | ||
728 | }) | ||
717 | .collect() | 729 | .collect() |
718 | } | 730 | } |
719 | 731 | ||
@@ -742,15 +754,21 @@ impl From<Mutability> for Access { | |||
742 | } | 754 | } |
743 | } | 755 | } |
744 | 756 | ||
757 | pub struct Param { | ||
758 | ty: Type, | ||
759 | } | ||
760 | |||
761 | impl Param { | ||
762 | pub fn ty(&self) -> &Type { | ||
763 | &self.ty | ||
764 | } | ||
765 | } | ||
766 | |||
745 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 767 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
746 | pub struct SelfParam { | 768 | pub struct SelfParam { |
747 | func: FunctionId, | 769 | func: FunctionId, |
748 | } | 770 | } |
749 | 771 | ||
750 | pub struct Param { | ||
751 | _ty: (), | ||
752 | } | ||
753 | |||
754 | impl SelfParam { | 772 | impl SelfParam { |
755 | pub fn access(self, db: &dyn HirDatabase) -> Access { | 773 | pub fn access(self, db: &dyn HirDatabase) -> Access { |
756 | let func_data = db.function_data(self.func); | 774 | let func_data = db.function_data(self.func); |
@@ -1276,6 +1294,14 @@ impl Type { | |||
1276 | ) | 1294 | ) |
1277 | } | 1295 | } |
1278 | 1296 | ||
1297 | pub fn remove_ref(&self) -> Option<Type> { | ||
1298 | if let Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(_), .. }) = self.ty.value { | ||
1299 | self.ty.value.substs().map(|substs| self.derived(substs[0].clone())) | ||
1300 | } else { | ||
1301 | None | ||
1302 | } | ||
1303 | } | ||
1304 | |||
1279 | pub fn is_unknown(&self) -> bool { | 1305 | pub fn is_unknown(&self) -> bool { |
1280 | matches!(self.ty.value, Ty::Unknown) | 1306 | matches!(self.ty.value, Ty::Unknown) |
1281 | } | 1307 | } |
diff --git a/crates/ide/src/completion/complete_postfix.rs b/crates/ide/src/completion/complete_postfix.rs index 84c4e129d..29d38661b 100644 --- a/crates/ide/src/completion/complete_postfix.rs +++ b/crates/ide/src/completion/complete_postfix.rs | |||
@@ -189,6 +189,16 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
189 | ctx, | 189 | ctx, |
190 | cap, | 190 | cap, |
191 | &dot_receiver, | 191 | &dot_receiver, |
192 | "dbgr", | ||
193 | "dbg!(&expr)", | ||
194 | &format!("dbg!(&{})", receiver_text), | ||
195 | ) | ||
196 | .add_to(acc); | ||
197 | |||
198 | postfix_snippet( | ||
199 | ctx, | ||
200 | cap, | ||
201 | &dot_receiver, | ||
192 | "call", | 202 | "call", |
193 | "function(expr)", | 203 | "function(expr)", |
194 | &format!("${{1}}({})", receiver_text), | 204 | &format!("${{1}}({})", receiver_text), |
@@ -263,6 +273,7 @@ fn main() { | |||
263 | sn box Box::new(expr) | 273 | sn box Box::new(expr) |
264 | sn call function(expr) | 274 | sn call function(expr) |
265 | sn dbg dbg!(expr) | 275 | sn dbg dbg!(expr) |
276 | sn dbgr dbg!(&expr) | ||
266 | sn if if expr {} | 277 | sn if if expr {} |
267 | sn match match expr {} | 278 | sn match match expr {} |
268 | sn not !expr | 279 | sn not !expr |
@@ -286,6 +297,7 @@ fn main() { | |||
286 | sn box Box::new(expr) | 297 | sn box Box::new(expr) |
287 | sn call function(expr) | 298 | sn call function(expr) |
288 | sn dbg dbg!(expr) | 299 | sn dbg dbg!(expr) |
300 | sn dbgr dbg!(&expr) | ||
289 | sn match match expr {} | 301 | sn match match expr {} |
290 | sn ref &expr | 302 | sn ref &expr |
291 | sn refm &mut expr | 303 | sn refm &mut expr |
diff --git a/crates/ide/src/completion/completion_context.rs b/crates/ide/src/completion/completion_context.rs index 161f59c1e..671b13328 100644 --- a/crates/ide/src/completion/completion_context.rs +++ b/crates/ide/src/completion/completion_context.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use base_db::SourceDatabase; | 3 | use base_db::SourceDatabase; |
4 | use hir::{Semantics, SemanticsScope, Type}; | 4 | use hir::{Local, ScopeDef, Semantics, SemanticsScope, Type}; |
5 | use ide_db::RootDatabase; | 5 | use ide_db::RootDatabase; |
6 | use syntax::{ | 6 | use syntax::{ |
7 | algo::{find_covering_element, find_node_at_offset}, | 7 | algo::{find_covering_element, find_node_at_offset}, |
@@ -91,6 +91,7 @@ pub(crate) struct CompletionContext<'a> { | |||
91 | pub(super) impl_as_prev_sibling: bool, | 91 | pub(super) impl_as_prev_sibling: bool, |
92 | pub(super) is_match_arm: bool, | 92 | pub(super) is_match_arm: bool, |
93 | pub(super) has_item_list_or_source_file_parent: bool, | 93 | pub(super) has_item_list_or_source_file_parent: bool, |
94 | pub(super) locals: Vec<(String, Local)>, | ||
94 | } | 95 | } |
95 | 96 | ||
96 | impl<'a> CompletionContext<'a> { | 97 | impl<'a> CompletionContext<'a> { |
@@ -119,6 +120,12 @@ impl<'a> CompletionContext<'a> { | |||
119 | original_file.syntax().token_at_offset(position.offset).left_biased()?; | 120 | original_file.syntax().token_at_offset(position.offset).left_biased()?; |
120 | let token = sema.descend_into_macros(original_token.clone()); | 121 | let token = sema.descend_into_macros(original_token.clone()); |
121 | let scope = sema.scope_at_offset(&token.parent(), position.offset); | 122 | let scope = sema.scope_at_offset(&token.parent(), position.offset); |
123 | let mut locals = vec![]; | ||
124 | scope.process_all_names(&mut |name, scope| { | ||
125 | if let ScopeDef::Local(local) = scope { | ||
126 | locals.push((name.to_string(), local)); | ||
127 | } | ||
128 | }); | ||
122 | let mut ctx = CompletionContext { | 129 | let mut ctx = CompletionContext { |
123 | sema, | 130 | sema, |
124 | scope, | 131 | scope, |
@@ -167,6 +174,7 @@ impl<'a> CompletionContext<'a> { | |||
167 | if_is_prev: false, | 174 | if_is_prev: false, |
168 | is_match_arm: false, | 175 | is_match_arm: false, |
169 | has_item_list_or_source_file_parent: false, | 176 | has_item_list_or_source_file_parent: false, |
177 | locals, | ||
170 | }; | 178 | }; |
171 | 179 | ||
172 | let mut original_file = original_file.syntax().clone(); | 180 | let mut original_file = original_file.syntax().clone(); |
diff --git a/crates/ide/src/completion/presentation.rs b/crates/ide/src/completion/presentation.rs index 24c507f9b..987cbfa7a 100644 --- a/crates/ide/src/completion/presentation.rs +++ b/crates/ide/src/completion/presentation.rs | |||
@@ -191,6 +191,17 @@ impl Completions { | |||
191 | func: hir::Function, | 191 | func: hir::Function, |
192 | local_name: Option<String>, | 192 | local_name: Option<String>, |
193 | ) { | 193 | ) { |
194 | fn add_arg(arg: &str, ty: &Type, ctx: &CompletionContext) -> String { | ||
195 | if let Some(derefed_ty) = ty.remove_ref() { | ||
196 | for (name, local) in ctx.locals.iter() { | ||
197 | if name == arg && local.ty(ctx.db) == derefed_ty { | ||
198 | return (if ty.is_mutable_reference() { "&mut " } else { "&" }).to_string() | ||
199 | + &arg.to_string(); | ||
200 | } | ||
201 | } | ||
202 | } | ||
203 | arg.to_string() | ||
204 | }; | ||
194 | let name = local_name.unwrap_or_else(|| func.name(ctx.db).to_string()); | 205 | let name = local_name.unwrap_or_else(|| func.name(ctx.db).to_string()); |
195 | let ast_node = func.source(ctx.db).value; | 206 | let ast_node = func.source(ctx.db).value; |
196 | 207 | ||
@@ -205,12 +216,20 @@ impl Completions { | |||
205 | .set_deprecated(is_deprecated(func, ctx.db)) | 216 | .set_deprecated(is_deprecated(func, ctx.db)) |
206 | .detail(function_declaration(&ast_node)); | 217 | .detail(function_declaration(&ast_node)); |
207 | 218 | ||
219 | let params_ty = func.params(ctx.db); | ||
208 | let params = ast_node | 220 | let params = ast_node |
209 | .param_list() | 221 | .param_list() |
210 | .into_iter() | 222 | .into_iter() |
211 | .flat_map(|it| it.params()) | 223 | .flat_map(|it| it.params()) |
212 | .flat_map(|it| it.pat()) | 224 | .zip(params_ty) |
213 | .map(|pat| pat.to_string().trim_start_matches('_').into()) | 225 | .flat_map(|(it, param_ty)| { |
226 | if let Some(pat) = it.pat() { | ||
227 | let name = pat.to_string(); | ||
228 | let arg = name.trim_start_matches("mut ").trim_start_matches('_'); | ||
229 | return Some(add_arg(arg, param_ty.ty(), ctx)); | ||
230 | } | ||
231 | None | ||
232 | }) | ||
214 | .collect(); | 233 | .collect(); |
215 | 234 | ||
216 | builder = builder.add_call_parens(ctx, name, Params::Named(params)); | 235 | builder = builder.add_call_parens(ctx, name, Params::Named(params)); |
@@ -864,6 +883,106 @@ fn main() { foo(${1:foo}, ${2:bar}, ${3:ho_ge_})$0 } | |||
864 | } | 883 | } |
865 | 884 | ||
866 | #[test] | 885 | #[test] |
886 | fn insert_ref_when_matching_local_in_scope() { | ||
887 | check_edit( | ||
888 | "ref_arg", | ||
889 | r#" | ||
890 | struct Foo {} | ||
891 | fn ref_arg(x: &Foo) {} | ||
892 | fn main() { | ||
893 | let x = Foo {}; | ||
894 | ref_ar<|> | ||
895 | } | ||
896 | "#, | ||
897 | r#" | ||
898 | struct Foo {} | ||
899 | fn ref_arg(x: &Foo) {} | ||
900 | fn main() { | ||
901 | let x = Foo {}; | ||
902 | ref_arg(${1:&x})$0 | ||
903 | } | ||
904 | "#, | ||
905 | ); | ||
906 | } | ||
907 | |||
908 | #[test] | ||
909 | fn insert_mut_ref_when_matching_local_in_scope() { | ||
910 | check_edit( | ||
911 | "ref_arg", | ||
912 | r#" | ||
913 | struct Foo {} | ||
914 | fn ref_arg(x: &mut Foo) {} | ||
915 | fn main() { | ||
916 | let x = Foo {}; | ||
917 | ref_ar<|> | ||
918 | } | ||
919 | "#, | ||
920 | r#" | ||
921 | struct Foo {} | ||
922 | fn ref_arg(x: &mut Foo) {} | ||
923 | fn main() { | ||
924 | let x = Foo {}; | ||
925 | ref_arg(${1:&mut x})$0 | ||
926 | } | ||
927 | "#, | ||
928 | ); | ||
929 | } | ||
930 | |||
931 | #[test] | ||
932 | fn insert_ref_when_matching_local_in_scope_for_method() { | ||
933 | check_edit( | ||
934 | "apply_foo", | ||
935 | r#" | ||
936 | struct Foo {} | ||
937 | struct Bar {} | ||
938 | impl Bar { | ||
939 | fn apply_foo(&self, x: &Foo) {} | ||
940 | } | ||
941 | |||
942 | fn main() { | ||
943 | let x = Foo {}; | ||
944 | let y = Bar {}; | ||
945 | y.<|> | ||
946 | } | ||
947 | "#, | ||
948 | r#" | ||
949 | struct Foo {} | ||
950 | struct Bar {} | ||
951 | impl Bar { | ||
952 | fn apply_foo(&self, x: &Foo) {} | ||
953 | } | ||
954 | |||
955 | fn main() { | ||
956 | let x = Foo {}; | ||
957 | let y = Bar {}; | ||
958 | y.apply_foo(${1:&x})$0 | ||
959 | } | ||
960 | "#, | ||
961 | ); | ||
962 | } | ||
963 | |||
964 | #[test] | ||
965 | fn trim_mut_keyword_in_func_completion() { | ||
966 | check_edit( | ||
967 | "take_mutably", | ||
968 | r#" | ||
969 | fn take_mutably(mut x: &i32) {} | ||
970 | |||
971 | fn main() { | ||
972 | take_m<|> | ||
973 | } | ||
974 | "#, | ||
975 | r#" | ||
976 | fn take_mutably(mut x: &i32) {} | ||
977 | |||
978 | fn main() { | ||
979 | take_mutably(${1:x})$0 | ||
980 | } | ||
981 | "#, | ||
982 | ); | ||
983 | } | ||
984 | |||
985 | #[test] | ||
867 | fn inserts_parens_for_tuple_enums() { | 986 | fn inserts_parens_for_tuple_enums() { |
868 | mark::check!(inserts_parens_for_tuple_enums); | 987 | mark::check!(inserts_parens_for_tuple_enums); |
869 | check_edit( | 988 | check_edit( |
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 0fbb9cb0d..631ffc4a7 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml | |||
@@ -21,7 +21,7 @@ env_logger = { version = "0.7.1", default-features = false } | |||
21 | itertools = "0.9.0" | 21 | itertools = "0.9.0" |
22 | jod-thread = "0.1.0" | 22 | jod-thread = "0.1.0" |
23 | log = "0.4.8" | 23 | log = "0.4.8" |
24 | lsp-types = { version = "0.81.0", features = ["proposed"] } | 24 | lsp-types = { version = "0.82.0", features = ["proposed"] } |
25 | parking_lot = "0.11.0" | 25 | parking_lot = "0.11.0" |
26 | pico-args = "0.3.1" | 26 | pico-args = "0.3.1" |
27 | oorandom = "11.1.2" | 27 | oorandom = "11.1.2" |
diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index de4bc2813..c589afeaf 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs | |||
@@ -5,7 +5,7 @@ use lsp_types::{ | |||
5 | CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions, | 5 | CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions, |
6 | CodeActionProviderCapability, CodeLensOptions, CompletionOptions, | 6 | CodeActionProviderCapability, CodeLensOptions, CompletionOptions, |
7 | DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability, HoverProviderCapability, | 7 | DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability, HoverProviderCapability, |
8 | ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions, | 8 | ImplementationProviderCapability, OneOf, RenameOptions, SaveOptions, |
9 | SelectionRangeProviderCapability, SemanticTokensFullOptions, SemanticTokensLegend, | 9 | SelectionRangeProviderCapability, SemanticTokensFullOptions, SemanticTokensLegend, |
10 | SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability, | 10 | SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability, |
11 | TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability, | 11 | TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability, |
@@ -42,16 +42,16 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti | |||
42 | work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, | 42 | work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, |
43 | }), | 43 | }), |
44 | declaration_provider: None, | 44 | declaration_provider: None, |
45 | definition_provider: Some(true), | 45 | definition_provider: Some(OneOf::Left(true)), |
46 | type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)), | 46 | type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)), |
47 | implementation_provider: Some(ImplementationProviderCapability::Simple(true)), | 47 | implementation_provider: Some(ImplementationProviderCapability::Simple(true)), |
48 | references_provider: Some(true), | 48 | references_provider: Some(OneOf::Left(true)), |
49 | document_highlight_provider: Some(true), | 49 | document_highlight_provider: Some(OneOf::Left(true)), |
50 | document_symbol_provider: Some(true), | 50 | document_symbol_provider: Some(OneOf::Left(true)), |
51 | workspace_symbol_provider: Some(true), | 51 | workspace_symbol_provider: Some(true), |
52 | code_action_provider: Some(code_action_provider), | 52 | code_action_provider: Some(code_action_provider), |
53 | code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }), | 53 | code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }), |
54 | document_formatting_provider: Some(true), | 54 | document_formatting_provider: Some(OneOf::Left(true)), |
55 | document_range_formatting_provider: None, | 55 | document_range_formatting_provider: None, |
56 | document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { | 56 | document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { |
57 | first_trigger_character: "=".to_string(), | 57 | first_trigger_character: "=".to_string(), |
@@ -60,7 +60,7 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti | |||
60 | selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)), | 60 | selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)), |
61 | semantic_highlighting: None, | 61 | semantic_highlighting: None, |
62 | folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)), | 62 | folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)), |
63 | rename_provider: Some(RenameProviderCapability::Options(RenameOptions { | 63 | rename_provider: Some(OneOf::Right(RenameOptions { |
64 | prepare_provider: Some(true), | 64 | prepare_provider: Some(true), |
65 | work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, | 65 | work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, |
66 | })), | 66 | })), |
diff --git a/editors/code/package.json b/editors/code/package.json index c57fbdda2..132664926 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -159,6 +159,11 @@ | |||
159 | "category": "Rust Analyzer" | 159 | "category": "Rust Analyzer" |
160 | }, | 160 | }, |
161 | { | 161 | { |
162 | "command": "rust-analyzer.updateGithubToken", | ||
163 | "title": "Update Github API token", | ||
164 | "category": "Rust Analyzer" | ||
165 | }, | ||
166 | { | ||
162 | "command": "rust-analyzer.onEnter", | 167 | "command": "rust-analyzer.onEnter", |
163 | "title": "Enhanced enter key", | 168 | "title": "Enhanced enter key", |
164 | "category": "Rust Analyzer" | 169 | "category": "Rust Analyzer" |
@@ -985,6 +990,10 @@ | |||
985 | "when": "inRustProject" | 990 | "when": "inRustProject" |
986 | }, | 991 | }, |
987 | { | 992 | { |
993 | "command": "rust-analyzer.updateGithubToken", | ||
994 | "when": "inRustProject" | ||
995 | }, | ||
996 | { | ||
988 | "command": "rust-analyzer.onEnter", | 997 | "command": "rust-analyzer.onEnter", |
989 | "when": "inRustProject" | 998 | "when": "inRustProject" |
990 | }, | 999 | }, |
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index bd99d696a..2896d90ac 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts | |||
@@ -95,6 +95,10 @@ async function tryActivate(context: vscode.ExtensionContext) { | |||
95 | await activate(context).catch(log.error); | 95 | await activate(context).catch(log.error); |
96 | }); | 96 | }); |
97 | 97 | ||
98 | ctx.registerCommand('updateGithubToken', ctx => async () => { | ||
99 | await queryForGithubToken(new PersistentState(ctx.globalState)); | ||
100 | }); | ||
101 | |||
98 | ctx.registerCommand('analyzerStatus', commands.analyzerStatus); | 102 | ctx.registerCommand('analyzerStatus', commands.analyzerStatus); |
99 | ctx.registerCommand('memoryUsage', commands.memoryUsage); | 103 | ctx.registerCommand('memoryUsage', commands.memoryUsage); |
100 | ctx.registerCommand('reloadWorkspace', commands.reloadWorkspace); | 104 | ctx.registerCommand('reloadWorkspace', commands.reloadWorkspace); |
@@ -173,7 +177,9 @@ async function bootstrapExtension(config: Config, state: PersistentState): Promi | |||
173 | if (!shouldCheckForNewNightly) return; | 177 | if (!shouldCheckForNewNightly) return; |
174 | } | 178 | } |
175 | 179 | ||
176 | const release = await fetchRelease("nightly").catch((e) => { | 180 | const release = await downloadWithRetryDialog(state, async () => { |
181 | return await fetchRelease("nightly", state.githubToken); | ||
182 | }).catch((e) => { | ||
177 | log.error(e); | 183 | log.error(e); |
178 | if (state.releaseId === undefined) { // Show error only for the initial download | 184 | if (state.releaseId === undefined) { // Show error only for the initial download |
179 | vscode.window.showErrorMessage(`Failed to download rust-analyzer nightly ${e}`); | 185 | vscode.window.showErrorMessage(`Failed to download rust-analyzer nightly ${e}`); |
@@ -192,10 +198,14 @@ async function bootstrapExtension(config: Config, state: PersistentState): Promi | |||
192 | assert(!!artifact, `Bad release: ${JSON.stringify(release)}`); | 198 | assert(!!artifact, `Bad release: ${JSON.stringify(release)}`); |
193 | 199 | ||
194 | const dest = path.join(config.globalStoragePath, "rust-analyzer.vsix"); | 200 | const dest = path.join(config.globalStoragePath, "rust-analyzer.vsix"); |
195 | await download({ | 201 | |
196 | url: artifact.browser_download_url, | 202 | await downloadWithRetryDialog(state, async () => { |
197 | dest, | 203 | await download({ |
198 | progressTitle: "Downloading rust-analyzer extension", | 204 | url: artifact.browser_download_url, |
205 | dest, | ||
206 | progressTitle: "Downloading rust-analyzer extension", | ||
207 | overwrite: true, | ||
208 | }); | ||
199 | }); | 209 | }); |
200 | 210 | ||
201 | await vscode.commands.executeCommand("workbench.extensions.installExtension", vscode.Uri.file(dest)); | 211 | await vscode.commands.executeCommand("workbench.extensions.installExtension", vscode.Uri.file(dest)); |
@@ -308,21 +318,22 @@ async function getServer(config: Config, state: PersistentState): Promise<string | |||
308 | if (userResponse !== "Download now") return dest; | 318 | if (userResponse !== "Download now") return dest; |
309 | } | 319 | } |
310 | 320 | ||
311 | const release = await fetchRelease(config.package.releaseTag); | 321 | const releaseTag = config.package.releaseTag; |
322 | const release = await downloadWithRetryDialog(state, async () => { | ||
323 | return await fetchRelease(releaseTag, state.githubToken); | ||
324 | }); | ||
312 | const artifact = release.assets.find(artifact => artifact.name === `rust-analyzer-${platform}.gz`); | 325 | const artifact = release.assets.find(artifact => artifact.name === `rust-analyzer-${platform}.gz`); |
313 | assert(!!artifact, `Bad release: ${JSON.stringify(release)}`); | 326 | assert(!!artifact, `Bad release: ${JSON.stringify(release)}`); |
314 | 327 | ||
315 | // Unlinking the exe file before moving new one on its place should prevent ETXTBSY error. | 328 | await downloadWithRetryDialog(state, async () => { |
316 | await fs.unlink(dest).catch(err => { | 329 | await download({ |
317 | if (err.code !== "ENOENT") throw err; | 330 | url: artifact.browser_download_url, |
318 | }); | 331 | dest, |
319 | 332 | progressTitle: "Downloading rust-analyzer server", | |
320 | await download({ | 333 | gunzip: true, |
321 | url: artifact.browser_download_url, | 334 | mode: 0o755, |
322 | dest, | 335 | overwrite: true, |
323 | progressTitle: "Downloading rust-analyzer server", | 336 | }); |
324 | gunzip: true, | ||
325 | mode: 0o755 | ||
326 | }); | 337 | }); |
327 | 338 | ||
328 | // Patching executable if that's NixOS. | 339 | // Patching executable if that's NixOS. |
@@ -333,3 +344,56 @@ async function getServer(config: Config, state: PersistentState): Promise<string | |||
333 | await state.updateServerVersion(config.package.version); | 344 | await state.updateServerVersion(config.package.version); |
334 | return dest; | 345 | return dest; |
335 | } | 346 | } |
347 | |||
348 | async function downloadWithRetryDialog<T>(state: PersistentState, downloadFunc: () => Promise<T>): Promise<T> { | ||
349 | while (true) { | ||
350 | try { | ||
351 | return await downloadFunc(); | ||
352 | } catch (e) { | ||
353 | const selected = await vscode.window.showErrorMessage("Failed to download: " + e.message, {}, { | ||
354 | title: "Update Github Auth Token", | ||
355 | updateToken: true, | ||
356 | }, { | ||
357 | title: "Retry download", | ||
358 | retry: true, | ||
359 | }, { | ||
360 | title: "Dismiss", | ||
361 | }); | ||
362 | |||
363 | if (selected?.updateToken) { | ||
364 | await queryForGithubToken(state); | ||
365 | continue; | ||
366 | } else if (selected?.retry) { | ||
367 | continue; | ||
368 | } | ||
369 | throw e; | ||
370 | }; | ||
371 | } | ||
372 | } | ||
373 | |||
374 | async function queryForGithubToken(state: PersistentState): Promise<void> { | ||
375 | const githubTokenOptions: vscode.InputBoxOptions = { | ||
376 | value: state.githubToken, | ||
377 | password: true, | ||
378 | prompt: ` | ||
379 | This dialog allows to store a Github authorization token. | ||
380 | The usage of an authorization token will increase the rate | ||
381 | limit on the use of Github APIs and can thereby prevent getting | ||
382 | throttled. | ||
383 | Auth tokens can be created at https://github.com/settings/tokens`, | ||
384 | }; | ||
385 | |||
386 | const newToken = await vscode.window.showInputBox(githubTokenOptions); | ||
387 | if (newToken === undefined) { | ||
388 | // The user aborted the dialog => Do not update the stored token | ||
389 | return; | ||
390 | } | ||
391 | |||
392 | if (newToken === "") { | ||
393 | log.info("Clearing github token"); | ||
394 | await state.updateGithubToken(undefined); | ||
395 | } else { | ||
396 | log.info("Storing new github token"); | ||
397 | await state.updateGithubToken(newToken); | ||
398 | } | ||
399 | } | ||
diff --git a/editors/code/src/net.ts b/editors/code/src/net.ts index 5eba2728d..9ba17b7b5 100644 --- a/editors/code/src/net.ts +++ b/editors/code/src/net.ts | |||
@@ -18,7 +18,8 @@ const OWNER = "rust-analyzer"; | |||
18 | const REPO = "rust-analyzer"; | 18 | const REPO = "rust-analyzer"; |
19 | 19 | ||
20 | export async function fetchRelease( | 20 | export async function fetchRelease( |
21 | releaseTag: string | 21 | releaseTag: string, |
22 | githubToken: string | null | undefined, | ||
22 | ): Promise<GithubRelease> { | 23 | ): Promise<GithubRelease> { |
23 | 24 | ||
24 | const apiEndpointPath = `/repos/${OWNER}/${REPO}/releases/tags/${releaseTag}`; | 25 | const apiEndpointPath = `/repos/${OWNER}/${REPO}/releases/tags/${releaseTag}`; |
@@ -27,7 +28,12 @@ export async function fetchRelease( | |||
27 | 28 | ||
28 | log.debug("Issuing request for released artifacts metadata to", requestUrl); | 29 | log.debug("Issuing request for released artifacts metadata to", requestUrl); |
29 | 30 | ||
30 | const response = await fetch(requestUrl, { headers: { Accept: "application/vnd.github.v3+json" } }); | 31 | const headers: Record<string, string> = { Accept: "application/vnd.github.v3+json" }; |
32 | if (githubToken != null) { | ||
33 | headers.Authorization = "token " + githubToken; | ||
34 | } | ||
35 | |||
36 | const response = await fetch(requestUrl, { headers: headers }); | ||
31 | 37 | ||
32 | if (!response.ok) { | 38 | if (!response.ok) { |
33 | log.error("Error fetching artifact release info", { | 39 | log.error("Error fetching artifact release info", { |
@@ -70,6 +76,7 @@ interface DownloadOpts { | |||
70 | dest: string; | 76 | dest: string; |
71 | mode?: number; | 77 | mode?: number; |
72 | gunzip?: boolean; | 78 | gunzip?: boolean; |
79 | overwrite?: boolean; | ||
73 | } | 80 | } |
74 | 81 | ||
75 | export async function download(opts: DownloadOpts) { | 82 | export async function download(opts: DownloadOpts) { |
@@ -79,6 +86,13 @@ export async function download(opts: DownloadOpts) { | |||
79 | const randomHex = crypto.randomBytes(5).toString("hex"); | 86 | const randomHex = crypto.randomBytes(5).toString("hex"); |
80 | const tempFile = path.join(dest.dir, `${dest.name}${randomHex}`); | 87 | const tempFile = path.join(dest.dir, `${dest.name}${randomHex}`); |
81 | 88 | ||
89 | if (opts.overwrite) { | ||
90 | // Unlinking the exe file before moving new one on its place should prevent ETXTBSY error. | ||
91 | await fs.promises.unlink(opts.dest).catch(err => { | ||
92 | if (err.code !== "ENOENT") throw err; | ||
93 | }); | ||
94 | } | ||
95 | |||
82 | await vscode.window.withProgress( | 96 | await vscode.window.withProgress( |
83 | { | 97 | { |
84 | location: vscode.ProgressLocation.Notification, | 98 | location: vscode.ProgressLocation.Notification, |
diff --git a/editors/code/src/persistent_state.ts b/editors/code/src/persistent_state.ts index 5705eed81..afb652589 100644 --- a/editors/code/src/persistent_state.ts +++ b/editors/code/src/persistent_state.ts | |||
@@ -38,4 +38,15 @@ export class PersistentState { | |||
38 | async updateServerVersion(value: string | undefined) { | 38 | async updateServerVersion(value: string | undefined) { |
39 | await this.globalState.update("serverVersion", value); | 39 | await this.globalState.update("serverVersion", value); |
40 | } | 40 | } |
41 | |||
42 | /** | ||
43 | * Github authorization token. | ||
44 | * This is used for API requests against the Github API. | ||
45 | */ | ||
46 | get githubToken(): string | undefined { | ||
47 | return this.globalState.get("githubToken"); | ||
48 | } | ||
49 | async updateGithubToken(value: string | undefined) { | ||
50 | await this.globalState.update("githubToken", value); | ||
51 | } | ||
41 | } | 52 | } |