aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/release.yaml1
-rw-r--r--Cargo.lock23
-rw-r--r--crates/ra_hir_expand/src/db.rs99
-rw-r--r--crates/ra_hir_ty/src/tests/macros.rs8
-rw-r--r--crates/ra_ide/src/completion/complete_dot.rs50
-rw-r--r--crates/ra_ide/src/completion/complete_pattern.rs13
-rw-r--r--crates/ra_ide/src/completion/complete_scope.rs92
-rw-r--r--crates/ra_ide/src/completion/completion_context.rs2
-rw-r--r--crates/ra_ide/src/expand_macro.rs2
-rw-r--r--crates/ra_mbe/src/lib.rs32
-rw-r--r--crates/ra_mbe/src/mbe_expander.rs61
-rw-r--r--crates/ra_mbe/src/mbe_expander/matcher.rs212
-rw-r--r--crates/ra_mbe/src/mbe_expander/transcriber.rs69
-rw-r--r--crates/ra_mbe/src/tests.rs4
-rw-r--r--crates/ra_parser/src/grammar/expressions/atom.rs2
-rw-r--r--crates/ra_tt/src/lib.rs6
-rw-r--r--editors/code/src/commands/server_version.ts2
-rw-r--r--editors/code/src/config.ts41
-rw-r--r--editors/code/src/ctx.ts6
-rw-r--r--editors/code/src/installation/extension.ts16
-rw-r--r--editors/code/src/installation/server.ts21
-rw-r--r--editors/code/src/main.ts10
-rw-r--r--editors/code/src/persistent_state.ts49
-rw-r--r--xtask/tests/tidy-tests/cli.rs25
-rw-r--r--xtask/tests/tidy-tests/docs.rs106
-rw-r--r--xtask/tests/tidy-tests/main.rs145
26 files changed, 704 insertions, 393 deletions
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 0434b6128..21ac3a4bc 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -11,6 +11,7 @@ jobs:
11 dist: 11 dist:
12 name: dist 12 name: dist
13 runs-on: ${{ matrix.os }} 13 runs-on: ${{ matrix.os }}
14 if: github.repository == "rust-analyzer/rust-analyzer"
14 strategy: 15 strategy:
15 matrix: 16 matrix:
16 os: [ubuntu-latest, windows-latest, macos-latest] 17 os: [ubuntu-latest, windows-latest, macos-latest]
diff --git a/Cargo.lock b/Cargo.lock
index efe8dd189..f6df77206 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -419,9 +419,9 @@ dependencies = [
419 419
420[[package]] 420[[package]]
421name = "globset" 421name = "globset"
422version = "0.4.4" 422version = "0.4.5"
423source = "registry+https://github.com/rust-lang/crates.io-index" 423source = "registry+https://github.com/rust-lang/crates.io-index"
424checksum = "925aa2cac82d8834e2b2a4415b6f6879757fb5c0928fc445ae76461a12eed8f2" 424checksum = "7ad1da430bd7281dde2576f44c84cc3f0f7b475e7202cd503042dff01a8c8120"
425dependencies = [ 425dependencies = [
426 "aho-corasick", 426 "aho-corasick",
427 "bstr", 427 "bstr",
@@ -668,11 +668,11 @@ checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400"
668 668
669[[package]] 669[[package]]
670name = "memoffset" 670name = "memoffset"
671version = "0.5.3" 671version = "0.5.4"
672source = "registry+https://github.com/rust-lang/crates.io-index" 672source = "registry+https://github.com/rust-lang/crates.io-index"
673checksum = "75189eb85871ea5c2e2c15abbdd541185f63b408415e5051f5cac122d8c774b9" 673checksum = "b4fc2c02a7e374099d4ee95a193111f72d2110197fe200272371758f6c3643d8"
674dependencies = [ 674dependencies = [
675 "rustc_version", 675 "autocfg",
676] 676]
677 677
678[[package]] 678[[package]]
@@ -1133,9 +1133,9 @@ dependencies = [
1133 1133
1134[[package]] 1134[[package]]
1135name = "ra_vfs" 1135name = "ra_vfs"
1136version = "0.5.2" 1136version = "0.5.3"
1137source = "registry+https://github.com/rust-lang/crates.io-index" 1137source = "registry+https://github.com/rust-lang/crates.io-index"
1138checksum = "bc898f237e4b4498959ae0100c688793a23e77624d44ef710ba70094217f98e0" 1138checksum = "58a265769d5e5655345a9fcbd870a1a7c3658558c0d8efaed79e0669358f46b8"
1139dependencies = [ 1139dependencies = [
1140 "crossbeam-channel", 1140 "crossbeam-channel",
1141 "jod-thread", 1141 "jod-thread",
@@ -1332,15 +1332,6 @@ dependencies = [
1332] 1332]
1333 1333
1334[[package]] 1334[[package]]
1335name = "rustc_version"
1336version = "0.2.3"
1337source = "registry+https://github.com/rust-lang/crates.io-index"
1338checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
1339dependencies = [
1340 "semver",
1341]
1342
1343[[package]]
1344name = "ryu" 1335name = "ryu"
1345version = "1.0.3" 1336version = "1.0.3"
1346source = "registry+https://github.com/rust-lang/crates.io-index" 1337source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs
index c3e1c68b7..d171d2dfd 100644
--- a/crates/ra_hir_expand/src/db.rs
+++ b/crates/ra_hir_expand/src/db.rs
@@ -2,7 +2,7 @@
2 2
3use std::sync::Arc; 3use std::sync::Arc;
4 4
5use mbe::MacroRules; 5use mbe::{ExpandResult, MacroRules};
6use ra_db::{salsa, SourceDatabase}; 6use ra_db::{salsa, SourceDatabase};
7use ra_parser::FragmentKind; 7use ra_parser::FragmentKind;
8use ra_prof::profile; 8use ra_prof::profile;
@@ -27,11 +27,12 @@ impl TokenExpander {
27 db: &dyn AstDatabase, 27 db: &dyn AstDatabase,
28 id: LazyMacroId, 28 id: LazyMacroId,
29 tt: &tt::Subtree, 29 tt: &tt::Subtree,
30 ) -> Result<tt::Subtree, mbe::ExpandError> { 30 ) -> mbe::ExpandResult<tt::Subtree> {
31 match self { 31 match self {
32 TokenExpander::MacroRules(it) => it.expand(tt), 32 TokenExpander::MacroRules(it) => it.expand(tt),
33 TokenExpander::Builtin(it) => it.expand(db, id, tt), 33 // FIXME switch these to ExpandResult as well
34 TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt), 34 TokenExpander::Builtin(it) => it.expand(db, id, tt).into(),
35 TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(),
35 } 36 }
36 } 37 }
37 38
@@ -66,7 +67,7 @@ pub trait AstDatabase: SourceDatabase {
66 fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>; 67 fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>;
67 fn parse_macro(&self, macro_file: MacroFile) 68 fn parse_macro(&self, macro_file: MacroFile)
68 -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>; 69 -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
69 fn macro_expand(&self, macro_call: MacroCallId) -> Result<Arc<tt::Subtree>, String>; 70 fn macro_expand(&self, macro_call: MacroCallId) -> (Option<Arc<tt::Subtree>>, Option<String>);
70 71
71 #[salsa::interned] 72 #[salsa::interned]
72 fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; 73 fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId;
@@ -153,7 +154,7 @@ pub(crate) fn macro_arg(
153pub(crate) fn macro_expand( 154pub(crate) fn macro_expand(
154 db: &dyn AstDatabase, 155 db: &dyn AstDatabase,
155 id: MacroCallId, 156 id: MacroCallId,
156) -> Result<Arc<tt::Subtree>, String> { 157) -> (Option<Arc<tt::Subtree>>, Option<String>) {
157 macro_expand_with_arg(db, id, None) 158 macro_expand_with_arg(db, id, None)
158} 159}
159 160
@@ -174,31 +175,38 @@ fn macro_expand_with_arg(
174 db: &dyn AstDatabase, 175 db: &dyn AstDatabase,
175 id: MacroCallId, 176 id: MacroCallId,
176 arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, 177 arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
177) -> Result<Arc<tt::Subtree>, String> { 178) -> (Option<Arc<tt::Subtree>>, Option<String>) {
178 let lazy_id = match id { 179 let lazy_id = match id {
179 MacroCallId::LazyMacro(id) => id, 180 MacroCallId::LazyMacro(id) => id,
180 MacroCallId::EagerMacro(id) => { 181 MacroCallId::EagerMacro(id) => {
181 if arg.is_some() { 182 if arg.is_some() {
182 return Err( 183 return (
183 "hypothetical macro expansion not implemented for eager macro".to_owned() 184 None,
185 Some("hypothetical macro expansion not implemented for eager macro".to_owned()),
184 ); 186 );
185 } else { 187 } else {
186 return Ok(db.lookup_intern_eager_expansion(id).subtree); 188 return (Some(db.lookup_intern_eager_expansion(id).subtree), None);
187 } 189 }
188 } 190 }
189 }; 191 };
190 192
191 let loc = db.lookup_intern_macro(lazy_id); 193 let loc = db.lookup_intern_macro(lazy_id);
192 let macro_arg = arg.or_else(|| db.macro_arg(id)).ok_or("Fail to args in to tt::TokenTree")?; 194 let macro_arg = match arg.or_else(|| db.macro_arg(id)) {
195 Some(it) => it,
196 None => return (None, Some("Fail to args in to tt::TokenTree".into())),
197 };
193 198
194 let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?; 199 let macro_rules = match db.macro_def(loc.def) {
195 let tt = macro_rules.0.expand(db, lazy_id, &macro_arg.0).map_err(|err| format!("{:?}", err))?; 200 Some(it) => it,
201 None => return (None, Some("Fail to find macro definition".into())),
202 };
203 let ExpandResult(tt, err) = macro_rules.0.expand(db, lazy_id, &macro_arg.0);
196 // Set a hard limit for the expanded tt 204 // Set a hard limit for the expanded tt
197 let count = tt.count(); 205 let count = tt.count();
198 if count > 65536 { 206 if count > 65536 {
199 return Err(format!("Total tokens count exceed limit : count = {}", count)); 207 return (None, Some(format!("Total tokens count exceed limit : count = {}", count)));
200 } 208 }
201 Ok(Arc::new(tt)) 209 (Some(Arc::new(tt)), err.map(|e| format!("{:?}", e)))
202} 210}
203 211
204pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { 212pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
@@ -225,42 +233,41 @@ pub fn parse_macro_with_arg(
225 let _p = profile("parse_macro_query"); 233 let _p = profile("parse_macro_query");
226 234
227 let macro_call_id = macro_file.macro_call_id; 235 let macro_call_id = macro_file.macro_call_id;
228 let expansion = if let Some(arg) = arg { 236 let (tt, err) = if let Some(arg) = arg {
229 macro_expand_with_arg(db, macro_call_id, Some(arg)) 237 macro_expand_with_arg(db, macro_call_id, Some(arg))
230 } else { 238 } else {
231 db.macro_expand(macro_call_id) 239 db.macro_expand(macro_call_id)
232 }; 240 };
233 let tt = expansion 241 if let Some(err) = err {
234 .map_err(|err| { 242 // Note:
235 // Note: 243 // The final goal we would like to make all parse_macro success,
236 // The final goal we would like to make all parse_macro success, 244 // such that the following log will not call anyway.
237 // such that the following log will not call anyway. 245 match macro_call_id {
238 match macro_call_id { 246 MacroCallId::LazyMacro(id) => {
239 MacroCallId::LazyMacro(id) => { 247 let loc: MacroCallLoc = db.lookup_intern_macro(id);
240 let loc: MacroCallLoc = db.lookup_intern_macro(id); 248 let node = loc.kind.node(db);
241 let node = loc.kind.node(db); 249
242 250 // collect parent information for warning log
243 // collect parent information for warning log 251 let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| {
244 let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| { 252 it.file_id.call_node(db)
245 it.file_id.call_node(db) 253 })
246 }) 254 .map(|n| format!("{:#}", n.value))
247 .map(|n| format!("{:#}", n.value)) 255 .collect::<Vec<_>>()
248 .collect::<Vec<_>>() 256 .join("\n");
249 .join("\n"); 257
250 258 log::warn!(
251 log::warn!( 259 "fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}",
252 "fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}", 260 err,
253 err, 261 node.value,
254 node.value, 262 parents
255 parents 263 );
256 ); 264 }
257 } 265 _ => {
258 _ => { 266 log::warn!("fail on macro_parse: (reason: {})", err);
259 log::warn!("fail on macro_parse: (reason: {})", err);
260 }
261 } 267 }
262 }) 268 }
263 .ok()?; 269 };
270 let tt = tt?;
264 271
265 let fragment_kind = to_fragment_kind(db, macro_call_id); 272 let fragment_kind = to_fragment_kind(db, macro_call_id);
266 273
diff --git a/crates/ra_hir_ty/src/tests/macros.rs b/crates/ra_hir_ty/src/tests/macros.rs
index 3b7022ad5..2e309a379 100644
--- a/crates/ra_hir_ty/src/tests/macros.rs
+++ b/crates/ra_hir_ty/src/tests/macros.rs
@@ -462,7 +462,7 @@ fn main() {
462fn infer_builtin_macros_include() { 462fn infer_builtin_macros_include() {
463 let (db, pos) = TestDB::with_position( 463 let (db, pos) = TestDB::with_position(
464 r#" 464 r#"
465//- /main.rs 465//- /main.rs
466#[rustc_builtin_macro] 466#[rustc_builtin_macro]
467macro_rules! include {() => {}} 467macro_rules! include {() => {}}
468 468
@@ -483,7 +483,7 @@ fn bar() -> u32 {0}
483fn infer_builtin_macros_include_concat() { 483fn infer_builtin_macros_include_concat() {
484 let (db, pos) = TestDB::with_position( 484 let (db, pos) = TestDB::with_position(
485 r#" 485 r#"
486//- /main.rs 486//- /main.rs
487#[rustc_builtin_macro] 487#[rustc_builtin_macro]
488macro_rules! include {() => {}} 488macro_rules! include {() => {}}
489 489
@@ -507,7 +507,7 @@ fn bar() -> u32 {0}
507fn infer_builtin_macros_include_concat_with_bad_env_should_failed() { 507fn infer_builtin_macros_include_concat_with_bad_env_should_failed() {
508 let (db, pos) = TestDB::with_position( 508 let (db, pos) = TestDB::with_position(
509 r#" 509 r#"
510//- /main.rs 510//- /main.rs
511#[rustc_builtin_macro] 511#[rustc_builtin_macro]
512macro_rules! include {() => {}} 512macro_rules! include {() => {}}
513 513
@@ -534,7 +534,7 @@ fn bar() -> u32 {0}
534fn infer_builtin_macros_include_itself_should_failed() { 534fn infer_builtin_macros_include_itself_should_failed() {
535 let (db, pos) = TestDB::with_position( 535 let (db, pos) = TestDB::with_position(
536 r#" 536 r#"
537//- /main.rs 537//- /main.rs
538#[rustc_builtin_macro] 538#[rustc_builtin_macro]
539macro_rules! include {() => {}} 539macro_rules! include {() => {}}
540 540
diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ra_ide/src/completion/complete_dot.rs
index f07611d88..82ec16913 100644
--- a/crates/ra_ide/src/completion/complete_dot.rs
+++ b/crates/ra_ide/src/completion/complete_dot.rs
@@ -720,7 +720,18 @@ mod tests {
720 } 720 }
721 ", 721 ",
722 ), 722 ),
723 @r###"[]"### 723 @r###"
724 [
725 CompletionItem {
726 label: "the_field",
727 source_range: [156; 156),
728 delete: [156; 156),
729 insert: "the_field",
730 kind: Field,
731 detail: "u32",
732 },
733 ]
734 "###
724 ); 735 );
725 } 736 }
726 737
@@ -752,6 +763,43 @@ mod tests {
752 } 763 }
753 764
754 #[test] 765 #[test]
766 fn macro_expansion_resilient() {
767 assert_debug_snapshot!(
768 do_ref_completion(
769 r"
770 macro_rules! dbg {
771 () => {};
772 ($val:expr) => {
773 match $val { tmp => { tmp } }
774 };
775 // Trailing comma with single argument is ignored
776 ($val:expr,) => { $crate::dbg!($val) };
777 ($($val:expr),+ $(,)?) => {
778 ($($crate::dbg!($val)),+,)
779 };
780 }
781 struct A { the_field: u32 }
782 fn foo(a: A) {
783 dbg!(a.<|>)
784 }
785 ",
786 ),
787 @r###"
788 [
789 CompletionItem {
790 label: "the_field",
791 source_range: [552; 552),
792 delete: [552; 552),
793 insert: "the_field",
794 kind: Field,
795 detail: "u32",
796 },
797 ]
798 "###
799 );
800 }
801
802 #[test]
755 fn test_method_completion_3547() { 803 fn test_method_completion_3547() {
756 assert_debug_snapshot!( 804 assert_debug_snapshot!(
757 do_ref_completion( 805 do_ref_completion(
diff --git a/crates/ra_ide/src/completion/complete_pattern.rs b/crates/ra_ide/src/completion/complete_pattern.rs
index 6a1a66ef1..cb84bb934 100644
--- a/crates/ra_ide/src/completion/complete_pattern.rs
+++ b/crates/ra_ide/src/completion/complete_pattern.rs
@@ -89,7 +89,6 @@ mod tests {
89 89
90 #[test] 90 #[test]
91 fn completes_in_simple_macro_call() { 91 fn completes_in_simple_macro_call() {
92 // FIXME: doesn't work yet because of missing error recovery in macro expansion
93 let completions = complete( 92 let completions = complete(
94 r" 93 r"
95 macro_rules! m { ($e:expr) => { $e } } 94 macro_rules! m { ($e:expr) => { $e } }
@@ -102,6 +101,16 @@ mod tests {
102 } 101 }
103 ", 102 ",
104 ); 103 );
105 assert_debug_snapshot!(completions, @r###"[]"###); 104 assert_debug_snapshot!(completions, @r###"
105 [
106 CompletionItem {
107 label: "E",
108 source_range: [151; 151),
109 delete: [151; 151),
110 insert: "E",
111 kind: Enum,
112 },
113 ]
114 "###);
106 } 115 }
107} 116}
diff --git a/crates/ra_ide/src/completion/complete_scope.rs b/crates/ra_ide/src/completion/complete_scope.rs
index 5ffff5a1c..81d3cc1b6 100644
--- a/crates/ra_ide/src/completion/complete_scope.rs
+++ b/crates/ra_ide/src/completion/complete_scope.rs
@@ -811,7 +811,44 @@ mod tests {
811 } 811 }
812 " 812 "
813 ), 813 ),
814 @"[]" 814 @r###"
815 [
816 CompletionItem {
817 label: "m!",
818 source_range: [145; 145),
819 delete: [145; 145),
820 insert: "m!($0)",
821 kind: Macro,
822 detail: "macro_rules! m",
823 },
824 CompletionItem {
825 label: "quux(…)",
826 source_range: [145; 145),
827 delete: [145; 145),
828 insert: "quux(${1:x})$0",
829 kind: Function,
830 lookup: "quux",
831 detail: "fn quux(x: i32)",
832 trigger_call_info: true,
833 },
834 CompletionItem {
835 label: "x",
836 source_range: [145; 145),
837 delete: [145; 145),
838 insert: "x",
839 kind: Binding,
840 detail: "i32",
841 },
842 CompletionItem {
843 label: "y",
844 source_range: [145; 145),
845 delete: [145; 145),
846 insert: "y",
847 kind: Binding,
848 detail: "i32",
849 },
850 ]
851 "###
815 ); 852 );
816 } 853 }
817 854
@@ -869,6 +906,59 @@ mod tests {
869 } 906 }
870 907
871 #[test] 908 #[test]
909 fn completes_in_simple_macro_without_closing_parens() {
910 assert_debug_snapshot!(
911 do_reference_completion(
912 r"
913 macro_rules! m { ($e:expr) => { $e } }
914 fn quux(x: i32) {
915 let y = 92;
916 m!(x<|>
917 }
918 "
919 ),
920 @r###"
921 [
922 CompletionItem {
923 label: "m!",
924 source_range: [145; 146),
925 delete: [145; 146),
926 insert: "m!($0)",
927 kind: Macro,
928 detail: "macro_rules! m",
929 },
930 CompletionItem {
931 label: "quux(…)",
932 source_range: [145; 146),
933 delete: [145; 146),
934 insert: "quux(${1:x})$0",
935 kind: Function,
936 lookup: "quux",
937 detail: "fn quux(x: i32)",
938 trigger_call_info: true,
939 },
940 CompletionItem {
941 label: "x",
942 source_range: [145; 146),
943 delete: [145; 146),
944 insert: "x",
945 kind: Binding,
946 detail: "i32",
947 },
948 CompletionItem {
949 label: "y",
950 source_range: [145; 146),
951 delete: [145; 146),
952 insert: "y",
953 kind: Binding,
954 detail: "i32",
955 },
956 ]
957 "###
958 );
959 }
960
961 #[test]
872 fn completes_unresolved_uses() { 962 fn completes_unresolved_uses() {
873 assert_debug_snapshot!( 963 assert_debug_snapshot!(
874 do_reference_completion( 964 do_reference_completion(
diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs
index 3646fb8dc..54589a2a8 100644
--- a/crates/ra_ide/src/completion/completion_context.rs
+++ b/crates/ra_ide/src/completion/completion_context.rs
@@ -135,7 +135,7 @@ impl<'a> CompletionContext<'a> {
135 ), 135 ),
136 ) { 136 ) {
137 let new_offset = hypothetical_expansion.1.text_range().start(); 137 let new_offset = hypothetical_expansion.1.text_range().start();
138 if new_offset >= actual_expansion.text_range().end() { 138 if new_offset > actual_expansion.text_range().end() {
139 break; 139 break;
140 } 140 }
141 original_file = actual_expansion; 141 original_file = actual_expansion;
diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs
index f6667cb33..e58526f31 100644
--- a/crates/ra_ide/src/expand_macro.rs
+++ b/crates/ra_ide/src/expand_macro.rs
@@ -259,7 +259,7 @@ fn some_thing() -> u32 {
259 ); 259 );
260 260
261 assert_eq!(res.name, "foo"); 261 assert_eq!(res.name, "foo");
262 assert_snapshot!(res.expansion, @r###"bar!()"###); 262 assert_snapshot!(res.expansion, @r###""###);
263 } 263 }
264 264
265 #[test] 265 #[test]
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index 43afe24cc..6a9037bfc 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -150,7 +150,7 @@ impl MacroRules {
150 Ok(MacroRules { rules, shift: Shift::new(tt) }) 150 Ok(MacroRules { rules, shift: Shift::new(tt) })
151 } 151 }
152 152
153 pub fn expand(&self, tt: &tt::Subtree) -> Result<tt::Subtree, ExpandError> { 153 pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
154 // apply shift 154 // apply shift
155 let mut tt = tt.clone(); 155 let mut tt = tt.clone();
156 self.shift.shift_all(&mut tt); 156 self.shift.shift_all(&mut tt);
@@ -209,5 +209,35 @@ fn validate(pattern: &tt::Subtree) -> Result<(), ParseError> {
209 Ok(()) 209 Ok(())
210} 210}
211 211
212pub struct ExpandResult<T>(pub T, pub Option<ExpandError>);
213
214impl<T> ExpandResult<T> {
215 pub fn ok(t: T) -> ExpandResult<T> {
216 ExpandResult(t, None)
217 }
218
219 pub fn only_err(err: ExpandError) -> ExpandResult<T>
220 where
221 T: Default,
222 {
223 ExpandResult(Default::default(), Some(err))
224 }
225
226 pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ExpandResult<U> {
227 ExpandResult(f(self.0), self.1)
228 }
229
230 pub fn result(self) -> Result<T, ExpandError> {
231 self.1.map(Err).unwrap_or(Ok(self.0))
232 }
233}
234
235impl<T: Default> From<Result<T, ExpandError>> for ExpandResult<T> {
236 fn from(result: Result<T, ExpandError>) -> ExpandResult<T> {
237 result
238 .map_or_else(|e| ExpandResult(Default::default(), Some(e)), |it| ExpandResult(it, None))
239 }
240}
241
212#[cfg(test)] 242#[cfg(test)]
213mod tests; 243mod tests;
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs
index b455b7321..b1eacf124 100644
--- a/crates/ra_mbe/src/mbe_expander.rs
+++ b/crates/ra_mbe/src/mbe_expander.rs
@@ -8,19 +8,51 @@ mod transcriber;
8use ra_syntax::SmolStr; 8use ra_syntax::SmolStr;
9use rustc_hash::FxHashMap; 9use rustc_hash::FxHashMap;
10 10
11use crate::ExpandError; 11use crate::{ExpandError, ExpandResult};
12 12
13pub(crate) fn expand( 13pub(crate) fn expand(rules: &crate::MacroRules, input: &tt::Subtree) -> ExpandResult<tt::Subtree> {
14 rules: &crate::MacroRules, 14 expand_rules(&rules.rules, input)
15 input: &tt::Subtree,
16) -> Result<tt::Subtree, ExpandError> {
17 rules.rules.iter().find_map(|it| expand_rule(it, input).ok()).ok_or(ExpandError::NoMatchingRule)
18} 15}
19 16
20fn expand_rule(rule: &crate::Rule, input: &tt::Subtree) -> Result<tt::Subtree, ExpandError> { 17fn expand_rules(rules: &[crate::Rule], input: &tt::Subtree) -> ExpandResult<tt::Subtree> {
21 let bindings = matcher::match_(&rule.lhs, input)?; 18 let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
22 let res = transcriber::transcribe(&rule.rhs, &bindings)?; 19 for rule in rules {
23 Ok(res) 20 let new_match = match matcher::match_(&rule.lhs, input) {
21 Ok(m) => m,
22 Err(_e) => {
23 // error in pattern parsing
24 continue;
25 }
26 };
27 if new_match.err.is_none() {
28 // If we find a rule that applies without errors, we're done.
29 // Unconditionally returning the transcription here makes the
30 // `test_repeat_bad_var` test fail.
31 let ExpandResult(res, transcribe_err) =
32 transcriber::transcribe(&rule.rhs, &new_match.bindings);
33 if transcribe_err.is_none() {
34 return ExpandResult::ok(res);
35 }
36 }
37 // Use the rule if we matched more tokens, or had fewer errors
38 if let Some((prev_match, _)) = &match_ {
39 if (new_match.unmatched_tts, new_match.err_count)
40 < (prev_match.unmatched_tts, prev_match.err_count)
41 {
42 match_ = Some((new_match, rule));
43 }
44 } else {
45 match_ = Some((new_match, rule));
46 }
47 }
48 if let Some((match_, rule)) = match_ {
49 // if we got here, there was no match without errors
50 let ExpandResult(result, transcribe_err) =
51 transcriber::transcribe(&rule.rhs, &match_.bindings);
52 ExpandResult(result, match_.err.or(transcribe_err))
53 } else {
54 ExpandResult(tt::Subtree::default(), Some(ExpandError::NoMatchingRule))
55 }
24} 56}
25 57
26/// The actual algorithm for expansion is not too hard, but is pretty tricky. 58/// The actual algorithm for expansion is not too hard, but is pretty tricky.
@@ -111,7 +143,7 @@ mod tests {
111 } 143 }
112 144
113 fn assert_err(macro_body: &str, invocation: &str, err: ExpandError) { 145 fn assert_err(macro_body: &str, invocation: &str, err: ExpandError) {
114 assert_eq!(expand_first(&create_rules(&format_macro(macro_body)), invocation), Err(err)); 146 assert_eq!(expand_first(&create_rules(&format_macro(macro_body)), invocation).1, Some(err));
115 } 147 }
116 148
117 fn format_macro(macro_body: &str) -> String { 149 fn format_macro(macro_body: &str) -> String {
@@ -135,10 +167,7 @@ mod tests {
135 crate::MacroRules::parse(&definition_tt).unwrap() 167 crate::MacroRules::parse(&definition_tt).unwrap()
136 } 168 }
137 169
138 fn expand_first( 170 fn expand_first(rules: &crate::MacroRules, invocation: &str) -> ExpandResult<tt::Subtree> {
139 rules: &crate::MacroRules,
140 invocation: &str,
141 ) -> Result<tt::Subtree, ExpandError> {
142 let source_file = ast::SourceFile::parse(invocation).ok().unwrap(); 171 let source_file = ast::SourceFile::parse(invocation).ok().unwrap();
143 let macro_invocation = 172 let macro_invocation =
144 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); 173 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
@@ -146,6 +175,6 @@ mod tests {
146 let (invocation_tt, _) = 175 let (invocation_tt, _) =
147 ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap(); 176 ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap();
148 177
149 expand_rule(&rules.rules[0], &invocation_tt) 178 expand_rules(&rules.rules, &invocation_tt)
150 } 179 }
151} 180}
diff --git a/crates/ra_mbe/src/mbe_expander/matcher.rs b/crates/ra_mbe/src/mbe_expander/matcher.rs
index 49c53183a..2579382da 100644
--- a/crates/ra_mbe/src/mbe_expander/matcher.rs
+++ b/crates/ra_mbe/src/mbe_expander/matcher.rs
@@ -8,6 +8,7 @@ use crate::{
8 ExpandError, 8 ExpandError,
9}; 9};
10 10
11use super::ExpandResult;
11use ra_parser::{FragmentKind::*, TreeSink}; 12use ra_parser::{FragmentKind::*, TreeSink};
12use ra_syntax::{SmolStr, SyntaxKind}; 13use ra_syntax::{SmolStr, SyntaxKind};
13use tt::buffer::{Cursor, TokenBuffer}; 14use tt::buffer::{Cursor, TokenBuffer};
@@ -58,36 +59,61 @@ macro_rules! err {
58 }; 59 };
59} 60}
60 61
61macro_rules! bail { 62#[derive(Debug, Default)]
62 ($($tt:tt)*) => { 63pub(super) struct Match {
63 return Err(err!($($tt)*)) 64 pub bindings: Bindings,
64 }; 65 /// We currently just keep the first error and count the rest to compare matches.
66 pub err: Option<ExpandError>,
67 pub err_count: usize,
68 /// How many top-level token trees were left to match.
69 pub unmatched_tts: usize,
65} 70}
66 71
67pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result<Bindings, ExpandError> { 72impl Match {
73 pub fn add_err(&mut self, err: ExpandError) {
74 let prev_err = self.err.take();
75 self.err = prev_err.or(Some(err));
76 self.err_count += 1;
77 }
78}
79
80// General note: These functions have two channels to return errors, a `Result`
81// return value and the `&mut Match`. The returned Result is for pattern parsing
82// errors; if a branch of the macro definition doesn't parse, it doesn't make
83// sense to try using it. Matching errors are added to the `Match`. It might
84// make sense to make pattern parsing a separate step?
85
86pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result<Match, ExpandError> {
68 assert!(pattern.delimiter == None); 87 assert!(pattern.delimiter == None);
69 88
70 let mut res = Bindings::default(); 89 let mut res = Match::default();
71 let mut src = TtIter::new(src); 90 let mut src = TtIter::new(src);
72 91
73 match_subtree(&mut res, pattern, &mut src)?; 92 match_subtree(&mut res, pattern, &mut src)?;
74 93
75 if src.len() > 0 { 94 if src.len() > 0 {
76 bail!("leftover tokens"); 95 res.unmatched_tts += src.len();
96 res.add_err(err!("leftover tokens"));
77 } 97 }
78 98
79 Ok(res) 99 Ok(res)
80} 100}
81 101
82fn match_subtree( 102fn match_subtree(
83 bindings: &mut Bindings, 103 res: &mut Match,
84 pattern: &tt::Subtree, 104 pattern: &tt::Subtree,
85 src: &mut TtIter, 105 src: &mut TtIter,
86) -> Result<(), ExpandError> { 106) -> Result<(), ExpandError> {
87 for op in parse_pattern(pattern) { 107 for op in parse_pattern(pattern) {
88 match op? { 108 match op? {
89 Op::TokenTree(tt::TokenTree::Leaf(lhs)) => { 109 Op::TokenTree(tt::TokenTree::Leaf(lhs)) => {
90 let rhs = src.expect_leaf().map_err(|()| err!("expected leaf: `{}`", lhs))?; 110 let rhs = match src.expect_leaf() {
111 Ok(l) => l,
112 Err(()) => {
113 res.add_err(err!("expected leaf: `{}`", lhs));
114 continue;
115 }
116 };
91 match (lhs, rhs) { 117 match (lhs, rhs) {
92 ( 118 (
93 tt::Leaf::Punct(tt::Punct { char: lhs, .. }), 119 tt::Leaf::Punct(tt::Punct { char: lhs, .. }),
@@ -101,31 +127,51 @@ fn match_subtree(
101 tt::Leaf::Literal(tt::Literal { text: lhs, .. }), 127 tt::Leaf::Literal(tt::Literal { text: lhs, .. }),
102 tt::Leaf::Literal(tt::Literal { text: rhs, .. }), 128 tt::Leaf::Literal(tt::Literal { text: rhs, .. }),
103 ) if lhs == rhs => (), 129 ) if lhs == rhs => (),
104 _ => return Err(ExpandError::UnexpectedToken), 130 _ => {
131 res.add_err(ExpandError::UnexpectedToken);
132 }
105 } 133 }
106 } 134 }
107 Op::TokenTree(tt::TokenTree::Subtree(lhs)) => { 135 Op::TokenTree(tt::TokenTree::Subtree(lhs)) => {
108 let rhs = src.expect_subtree().map_err(|()| err!("expected subtree"))?; 136 let rhs = match src.expect_subtree() {
137 Ok(s) => s,
138 Err(()) => {
139 res.add_err(err!("expected subtree"));
140 continue;
141 }
142 };
109 if lhs.delimiter_kind() != rhs.delimiter_kind() { 143 if lhs.delimiter_kind() != rhs.delimiter_kind() {
110 bail!("mismatched delimiter") 144 res.add_err(err!("mismatched delimiter"));
145 continue;
111 } 146 }
112 let mut src = TtIter::new(rhs); 147 let mut src = TtIter::new(rhs);
113 match_subtree(bindings, lhs, &mut src)?; 148 match_subtree(res, lhs, &mut src)?;
114 if src.len() > 0 { 149 if src.len() > 0 {
115 bail!("leftover tokens"); 150 res.add_err(err!("leftover tokens"));
116 } 151 }
117 } 152 }
118 Op::Var { name, kind } => { 153 Op::Var { name, kind } => {
119 let kind = kind.as_ref().ok_or(ExpandError::UnexpectedToken)?; 154 let kind = match kind {
120 match match_meta_var(kind.as_str(), src)? { 155 Some(k) => k,
156 None => {
157 res.add_err(ExpandError::UnexpectedToken);
158 continue;
159 }
160 };
161 let ExpandResult(matched, match_err) = match_meta_var(kind.as_str(), src);
162 match matched {
121 Some(fragment) => { 163 Some(fragment) => {
122 bindings.inner.insert(name.clone(), Binding::Fragment(fragment)); 164 res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment));
123 } 165 }
124 None => bindings.push_optional(name), 166 None if match_err.is_none() => res.bindings.push_optional(name),
167 _ => {}
168 }
169 if let Some(err) = match_err {
170 res.add_err(err);
125 } 171 }
126 } 172 }
127 Op::Repeat { subtree, kind, separator } => { 173 Op::Repeat { subtree, kind, separator } => {
128 match_repeat(bindings, subtree, kind, separator, src)? 174 match_repeat(res, subtree, kind, separator, src)?;
129 } 175 }
130 } 176 }
131 } 177 }
@@ -221,7 +267,7 @@ impl<'a> TtIter<'a> {
221 pub(crate) fn expect_fragment( 267 pub(crate) fn expect_fragment(
222 &mut self, 268 &mut self,
223 fragment_kind: ra_parser::FragmentKind, 269 fragment_kind: ra_parser::FragmentKind,
224 ) -> Result<tt::TokenTree, ()> { 270 ) -> ExpandResult<Option<tt::TokenTree>> {
225 pub(crate) struct OffsetTokenSink<'a> { 271 pub(crate) struct OffsetTokenSink<'a> {
226 pub(crate) cursor: Cursor<'a>, 272 pub(crate) cursor: Cursor<'a>,
227 pub(crate) error: bool, 273 pub(crate) error: bool,
@@ -246,45 +292,51 @@ impl<'a> TtIter<'a> {
246 292
247 ra_parser::parse_fragment(&mut src, &mut sink, fragment_kind); 293 ra_parser::parse_fragment(&mut src, &mut sink, fragment_kind);
248 294
295 let mut err = None;
249 if !sink.cursor.is_root() || sink.error { 296 if !sink.cursor.is_root() || sink.error {
250 // FIXME better recovery in this case would help completion inside macros immensely 297 err = Some(err!("expected {:?}", fragment_kind));
251 return Err(());
252 } 298 }
253 299
254 let mut curr = buffer.begin(); 300 let mut curr = buffer.begin();
255 let mut res = vec![]; 301 let mut res = vec![];
256 302
257 while curr != sink.cursor { 303 if sink.cursor.is_root() {
258 if let Some(token) = curr.token_tree() { 304 while curr != sink.cursor {
259 res.push(token); 305 if let Some(token) = curr.token_tree() {
306 res.push(token);
307 }
308 curr = curr.bump();
260 } 309 }
261 curr = curr.bump();
262 } 310 }
263 self.inner = self.inner.as_slice()[res.len()..].iter(); 311 self.inner = self.inner.as_slice()[res.len()..].iter();
264 match res.len() { 312 if res.len() == 0 && err.is_none() {
265 0 => Err(()), 313 err = Some(err!("no tokens consumed"));
266 1 => Ok(res[0].clone()), 314 }
267 _ => Ok(tt::TokenTree::Subtree(tt::Subtree { 315 let res = match res.len() {
316 1 => Some(res[0].clone()),
317 0 => None,
318 _ => Some(tt::TokenTree::Subtree(tt::Subtree {
268 delimiter: None, 319 delimiter: None,
269 token_trees: res.into_iter().cloned().collect(), 320 token_trees: res.into_iter().cloned().collect(),
270 })), 321 })),
271 } 322 };
323 ExpandResult(res, err)
272 } 324 }
273 325
274 pub(crate) fn eat_vis(&mut self) -> Option<tt::TokenTree> { 326 pub(crate) fn eat_vis(&mut self) -> Option<tt::TokenTree> {
275 let mut fork = self.clone(); 327 let mut fork = self.clone();
276 match fork.expect_fragment(Visibility) { 328 match fork.expect_fragment(Visibility) {
277 Ok(tt) => { 329 ExpandResult(tt, None) => {
278 *self = fork; 330 *self = fork;
279 Some(tt) 331 tt
280 } 332 }
281 Err(()) => None, 333 ExpandResult(_, Some(_)) => None,
282 } 334 }
283 } 335 }
284} 336}
285 337
286pub(super) fn match_repeat( 338pub(super) fn match_repeat(
287 bindings: &mut Bindings, 339 res: &mut Match,
288 pattern: &tt::Subtree, 340 pattern: &tt::Subtree,
289 kind: RepeatKind, 341 kind: RepeatKind,
290 separator: Option<Separator>, 342 separator: Option<Separator>,
@@ -304,36 +356,46 @@ pub(super) fn match_repeat(
304 } 356 }
305 } 357 }
306 358
307 let mut nested = Bindings::default(); 359 let mut nested = Match::default();
308 match match_subtree(&mut nested, pattern, &mut fork) { 360 match_subtree(&mut nested, pattern, &mut fork)?;
309 Ok(()) => { 361 if nested.err.is_none() {
310 limit -= 1; 362 limit -= 1;
311 if limit == 0 { 363 if limit == 0 {
312 log::warn!("match_lhs excced in repeat pattern exceed limit => {:#?}\n{:#?}\n{:#?}\n{:#?}", pattern, src, kind, separator); 364 log::warn!(
313 break; 365 "match_lhs exceeded repeat pattern limit => {:#?}\n{:#?}\n{:#?}\n{:#?}",
314 } 366 pattern,
315 *src = fork; 367 src,
368 kind,
369 separator
370 );
371 break;
372 }
373 *src = fork;
316 374
317 bindings.push_nested(counter, nested)?; 375 if let Err(err) = res.bindings.push_nested(counter, nested.bindings) {
318 counter += 1; 376 res.add_err(err);
319 if counter == 1 { 377 }
320 if let RepeatKind::ZeroOrOne = kind { 378 counter += 1;
321 break; 379 if counter == 1 {
322 } 380 if let RepeatKind::ZeroOrOne = kind {
381 break;
323 } 382 }
324 } 383 }
325 Err(_) => break, 384 } else {
385 break;
326 } 386 }
327 } 387 }
328 388
329 match (kind, counter) { 389 match (kind, counter) {
330 (RepeatKind::OneOrMore, 0) => return Err(ExpandError::UnexpectedToken), 390 (RepeatKind::OneOrMore, 0) => {
391 res.add_err(ExpandError::UnexpectedToken);
392 }
331 (_, 0) => { 393 (_, 0) => {
332 // Collect all empty variables in subtrees 394 // Collect all empty variables in subtrees
333 let mut vars = Vec::new(); 395 let mut vars = Vec::new();
334 collect_vars(&mut vars, pattern)?; 396 collect_vars(&mut vars, pattern)?;
335 for var in vars { 397 for var in vars {
336 bindings.push_empty(&var) 398 res.bindings.push_empty(&var)
337 } 399 }
338 } 400 }
339 _ => (), 401 _ => (),
@@ -341,7 +403,7 @@ pub(super) fn match_repeat(
341 Ok(()) 403 Ok(())
342} 404}
343 405
344fn match_meta_var(kind: &str, input: &mut TtIter) -> Result<Option<Fragment>, ExpandError> { 406fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> {
345 let fragment = match kind { 407 let fragment = match kind {
346 "path" => Path, 408 "path" => Path,
347 "expr" => Expr, 409 "expr" => Expr,
@@ -352,34 +414,32 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> Result<Option<Fragment>, Ex
352 "meta" => MetaItem, 414 "meta" => MetaItem,
353 "item" => Item, 415 "item" => Item,
354 _ => { 416 _ => {
355 let tt = match kind { 417 let tt_result = match kind {
356 "ident" => { 418 "ident" => input
357 let ident = input.expect_ident().map_err(|()| err!("expected ident"))?.clone(); 419 .expect_ident()
358 tt::Leaf::from(ident).into() 420 .map(|ident| Some(tt::Leaf::from(ident.clone()).into()))
359 } 421 .map_err(|()| err!("expected ident")),
360 "tt" => input.expect_tt().map_err(|()| err!())?.clone(), 422 "tt" => input.expect_tt().map(Some).map_err(|()| err!()),
361 "lifetime" => { 423 "lifetime" => input
362 let ident = input.expect_lifetime().map_err(|()| err!())?; 424 .expect_lifetime()
363 tt::Leaf::Ident(ident.clone()).into() 425 .map(|ident| Some(tt::Leaf::Ident(ident.clone()).into()))
364 } 426 .map_err(|()| err!("expected lifetime")),
365 "literal" => { 427 "literal" => input
366 let literal = input.expect_literal().map_err(|()| err!())?.clone(); 428 .expect_literal()
367 tt::Leaf::from(literal).into() 429 .map(|literal| Some(tt::Leaf::from(literal.clone()).into()))
368 } 430 .map_err(|()| err!()),
369 // `vis` is optional 431 // `vis` is optional
370 "vis" => match input.eat_vis() { 432 "vis" => match input.eat_vis() {
371 Some(vis) => vis, 433 Some(vis) => Ok(Some(vis)),
372 None => return Ok(None), 434 None => Ok(None),
373 }, 435 },
374 _ => return Err(ExpandError::UnexpectedToken), 436 _ => Err(ExpandError::UnexpectedToken),
375 }; 437 };
376 return Ok(Some(Fragment::Tokens(tt))); 438 return tt_result.map(|it| it.map(Fragment::Tokens)).into();
377 } 439 }
378 }; 440 };
379 let tt = 441 let result = input.expect_fragment(fragment);
380 input.expect_fragment(fragment).map_err(|()| err!("fragment did not parse as {}", kind))?; 442 result.map(|tt| if kind == "expr" { tt.map(Fragment::Ast) } else { tt.map(Fragment::Tokens) })
381 let fragment = if kind == "expr" { Fragment::Ast(tt) } else { Fragment::Tokens(tt) };
382 Ok(Some(fragment))
383} 443}
384 444
385fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &tt::Subtree) -> Result<(), ExpandError> { 445fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &tt::Subtree) -> Result<(), ExpandError> {
diff --git a/crates/ra_mbe/src/mbe_expander/transcriber.rs b/crates/ra_mbe/src/mbe_expander/transcriber.rs
index 7662020f3..4b173edd3 100644
--- a/crates/ra_mbe/src/mbe_expander/transcriber.rs
+++ b/crates/ra_mbe/src/mbe_expander/transcriber.rs
@@ -3,6 +3,7 @@
3 3
4use ra_syntax::SmolStr; 4use ra_syntax::SmolStr;
5 5
6use super::ExpandResult;
6use crate::{ 7use crate::{
7 mbe_expander::{Binding, Bindings, Fragment}, 8 mbe_expander::{Binding, Bindings, Fragment},
8 parser::{parse_template, Op, RepeatKind, Separator}, 9 parser::{parse_template, Op, RepeatKind, Separator},
@@ -49,10 +50,7 @@ impl Bindings {
49 } 50 }
50} 51}
51 52
52pub(super) fn transcribe( 53pub(super) fn transcribe(template: &tt::Subtree, bindings: &Bindings) -> ExpandResult<tt::Subtree> {
53 template: &tt::Subtree,
54 bindings: &Bindings,
55) -> Result<tt::Subtree, ExpandError> {
56 assert!(template.delimiter == None); 54 assert!(template.delimiter == None);
57 let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() }; 55 let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() };
58 expand_subtree(&mut ctx, template) 56 expand_subtree(&mut ctx, template)
@@ -75,35 +73,46 @@ struct ExpandCtx<'a> {
75 nesting: Vec<NestingState>, 73 nesting: Vec<NestingState>,
76} 74}
77 75
78fn expand_subtree(ctx: &mut ExpandCtx, template: &tt::Subtree) -> Result<tt::Subtree, ExpandError> { 76fn expand_subtree(ctx: &mut ExpandCtx, template: &tt::Subtree) -> ExpandResult<tt::Subtree> {
79 let mut buf: Vec<tt::TokenTree> = Vec::new(); 77 let mut buf: Vec<tt::TokenTree> = Vec::new();
78 let mut err = None;
80 for op in parse_template(template) { 79 for op in parse_template(template) {
81 match op? { 80 let op = match op {
81 Ok(op) => op,
82 Err(e) => {
83 err = Some(e);
84 break;
85 }
86 };
87 match op {
82 Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => buf.push(tt.clone()), 88 Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => buf.push(tt.clone()),
83 Op::TokenTree(tt::TokenTree::Subtree(tt)) => { 89 Op::TokenTree(tt::TokenTree::Subtree(tt)) => {
84 let tt = expand_subtree(ctx, tt)?; 90 let ExpandResult(tt, e) = expand_subtree(ctx, tt);
91 err = err.or(e);
85 buf.push(tt.into()); 92 buf.push(tt.into());
86 } 93 }
87 Op::Var { name, kind: _ } => { 94 Op::Var { name, kind: _ } => {
88 let fragment = expand_var(ctx, name)?; 95 let ExpandResult(fragment, e) = expand_var(ctx, name);
96 err = err.or(e);
89 push_fragment(&mut buf, fragment); 97 push_fragment(&mut buf, fragment);
90 } 98 }
91 Op::Repeat { subtree, kind, separator } => { 99 Op::Repeat { subtree, kind, separator } => {
92 let fragment = expand_repeat(ctx, subtree, kind, separator)?; 100 let ExpandResult(fragment, e) = expand_repeat(ctx, subtree, kind, separator);
101 err = err.or(e);
93 push_fragment(&mut buf, fragment) 102 push_fragment(&mut buf, fragment)
94 } 103 }
95 } 104 }
96 } 105 }
97 Ok(tt::Subtree { delimiter: template.delimiter, token_trees: buf }) 106 ExpandResult(tt::Subtree { delimiter: template.delimiter, token_trees: buf }, err)
98} 107}
99 108
100fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> Result<Fragment, ExpandError> { 109fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> {
101 let res = if v == "crate" { 110 if v == "crate" {
102 // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. 111 // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path.
103 let tt = 112 let tt =
104 tt::Leaf::from(tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() }) 113 tt::Leaf::from(tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() })
105 .into(); 114 .into();
106 Fragment::Tokens(tt) 115 ExpandResult::ok(Fragment::Tokens(tt))
107 } else if !ctx.bindings.contains(v) { 116 } else if !ctx.bindings.contains(v) {
108 // Note that it is possible to have a `$var` inside a macro which is not bound. 117 // Note that it is possible to have a `$var` inside a macro which is not bound.
109 // For example: 118 // For example:
@@ -132,11 +141,13 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> Result<Fragment, ExpandError>
132 ], 141 ],
133 } 142 }
134 .into(); 143 .into();
135 Fragment::Tokens(tt) 144 ExpandResult::ok(Fragment::Tokens(tt))
136 } else { 145 } else {
137 ctx.bindings.get(&v, &mut ctx.nesting)?.clone() 146 ctx.bindings.get(&v, &mut ctx.nesting).map_or_else(
138 }; 147 |e| ExpandResult(Fragment::Tokens(tt::TokenTree::empty()), Some(e)),
139 Ok(res) 148 |b| ExpandResult::ok(b.clone()),
149 )
150 }
140} 151}
141 152
142fn expand_repeat( 153fn expand_repeat(
@@ -144,17 +155,17 @@ fn expand_repeat(
144 template: &tt::Subtree, 155 template: &tt::Subtree,
145 kind: RepeatKind, 156 kind: RepeatKind,
146 separator: Option<Separator>, 157 separator: Option<Separator>,
147) -> Result<Fragment, ExpandError> { 158) -> ExpandResult<Fragment> {
148 let mut buf: Vec<tt::TokenTree> = Vec::new(); 159 let mut buf: Vec<tt::TokenTree> = Vec::new();
149 ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false }); 160 ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
150 // Dirty hack to make macro-expansion terminate. 161 // Dirty hack to make macro-expansion terminate.
151 // This should be replaced by a propper macro-by-example implementation 162 // This should be replaced by a proper macro-by-example implementation
152 let limit = 65536; 163 let limit = 65536;
153 let mut has_seps = 0; 164 let mut has_seps = 0;
154 let mut counter = 0; 165 let mut counter = 0;
155 166
156 loop { 167 loop {
157 let res = expand_subtree(ctx, template); 168 let ExpandResult(mut t, e) = expand_subtree(ctx, template);
158 let nesting_state = ctx.nesting.last_mut().unwrap(); 169 let nesting_state = ctx.nesting.last_mut().unwrap();
159 if nesting_state.at_end || !nesting_state.hit { 170 if nesting_state.at_end || !nesting_state.hit {
160 break; 171 break;
@@ -172,10 +183,10 @@ fn expand_repeat(
172 break; 183 break;
173 } 184 }
174 185
175 let mut t = match res { 186 if e.is_some() {
176 Ok(t) => t, 187 continue;
177 Err(_) => continue, 188 }
178 }; 189
179 t.delimiter = None; 190 t.delimiter = None;
180 push_subtree(&mut buf, t); 191 push_subtree(&mut buf, t);
181 192
@@ -209,14 +220,14 @@ fn expand_repeat(
209 buf.pop(); 220 buf.pop();
210 } 221 }
211 222
212 if RepeatKind::OneOrMore == kind && counter == 0 {
213 return Err(ExpandError::UnexpectedToken);
214 }
215
216 // Check if it is a single token subtree without any delimiter 223 // Check if it is a single token subtree without any delimiter
217 // e.g {Delimiter:None> ['>'] /Delimiter:None>} 224 // e.g {Delimiter:None> ['>'] /Delimiter:None>}
218 let tt = tt::Subtree { delimiter: None, token_trees: buf }.into(); 225 let tt = tt::Subtree { delimiter: None, token_trees: buf }.into();
219 Ok(Fragment::Tokens(tt)) 226
227 if RepeatKind::OneOrMore == kind && counter == 0 {
228 return ExpandResult(Fragment::Tokens(tt), Some(ExpandError::UnexpectedToken));
229 }
230 ExpandResult::ok(Fragment::Tokens(tt))
220} 231}
221 232
222fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) { 233fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index 6d5d1e9e6..44f381938 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -1430,7 +1430,7 @@ impl MacroFixture {
1430 let (invocation_tt, _) = 1430 let (invocation_tt, _) =
1431 ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap(); 1431 ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap();
1432 1432
1433 self.rules.expand(&invocation_tt) 1433 self.rules.expand(&invocation_tt).result()
1434 } 1434 }
1435 1435
1436 fn assert_expand_err(&self, invocation: &str, err: &ExpandError) { 1436 fn assert_expand_err(&self, invocation: &str, err: &ExpandError) {
@@ -1662,5 +1662,5 @@ fn test_expand_bad_literal() {
1662 macro_rules! foo { ($i:literal) => {}; } 1662 macro_rules! foo { ($i:literal) => {}; }
1663 "#, 1663 "#,
1664 ) 1664 )
1665 .assert_expand_err(r#"foo!(&k");"#, &ExpandError::NoMatchingRule); 1665 .assert_expand_err(r#"foo!(&k");"#, &ExpandError::BindingError("".to_string()));
1666} 1666}
diff --git a/crates/ra_parser/src/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs
index b77b683b5..2335d99b3 100644
--- a/crates/ra_parser/src/grammar/expressions/atom.rs
+++ b/crates/ra_parser/src/grammar/expressions/atom.rs
@@ -61,7 +61,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
61 LIFETIME, 61 LIFETIME,
62 ]); 62 ]);
63 63
64const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW]; 64const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW, R_DOLLAR];
65 65
66pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { 66pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
67 if let Some(m) = literal(p) { 67 if let Some(m) = literal(p) {
diff --git a/crates/ra_tt/src/lib.rs b/crates/ra_tt/src/lib.rs
index 10f424aae..1e2fb8b91 100644
--- a/crates/ra_tt/src/lib.rs
+++ b/crates/ra_tt/src/lib.rs
@@ -40,6 +40,12 @@ pub enum TokenTree {
40} 40}
41impl_froms!(TokenTree: Leaf, Subtree); 41impl_froms!(TokenTree: Leaf, Subtree);
42 42
43impl TokenTree {
44 pub fn empty() -> Self {
45 TokenTree::Subtree(Subtree::default())
46 }
47}
48
43#[derive(Debug, Clone, PartialEq, Eq, Hash)] 49#[derive(Debug, Clone, PartialEq, Eq, Hash)]
44pub enum Leaf { 50pub enum Leaf {
45 Literal(Literal), 51 Literal(Literal),
diff --git a/editors/code/src/commands/server_version.ts b/editors/code/src/commands/server_version.ts
index c4d84b443..83b1acf67 100644
--- a/editors/code/src/commands/server_version.ts
+++ b/editors/code/src/commands/server_version.ts
@@ -5,7 +5,7 @@ import { spawnSync } from 'child_process';
5 5
6export function serverVersion(ctx: Ctx): Cmd { 6export function serverVersion(ctx: Ctx): Cmd {
7 return async () => { 7 return async () => {
8 const binaryPath = await ensureServerBinary(ctx.config); 8 const binaryPath = await ensureServerBinary(ctx.config, ctx.state);
9 9
10 if (binaryPath == null) { 10 if (binaryPath == null) {
11 throw new Error( 11 throw new Error(
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index f63e1d20e..bd8096dd6 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -182,13 +182,6 @@ export class Config {
182 return this.createGithubReleaseSource("rust-analyzer.vsix", NIGHTLY_TAG); 182 return this.createGithubReleaseSource("rust-analyzer.vsix", NIGHTLY_TAG);
183 } 183 }
184 184
185 readonly installedNightlyExtensionReleaseDate = new DateStorage(
186 "installed-nightly-extension-release-date",
187 this.ctx.globalState
188 );
189 readonly serverReleaseDate = new DateStorage("server-release-date", this.ctx.globalState);
190 readonly serverReleaseTag = new Storage<null | string>("server-release-tag", this.ctx.globalState, null);
191
192 // We don't do runtime config validation here for simplicity. More on stackoverflow: 185 // We don't do runtime config validation here for simplicity. More on stackoverflow:
193 // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension 186 // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension
194 187
@@ -232,37 +225,3 @@ export class Config {
232 // for internal use 225 // for internal use
233 get withSysroot() { return this.cfg.get("withSysroot", true) as boolean; } 226 get withSysroot() { return this.cfg.get("withSysroot", true) as boolean; }
234} 227}
235
236export class Storage<T> {
237 constructor(
238 private readonly key: string,
239 private readonly storage: vscode.Memento,
240 private readonly defaultVal: T
241 ) { }
242
243 get(): T {
244 const val = this.storage.get(this.key, this.defaultVal);
245 log.debug(this.key, "==", val);
246 return val;
247 }
248 async set(val: T) {
249 log.debug(this.key, "=", val);
250 await this.storage.update(this.key, val);
251 }
252}
253export class DateStorage {
254 inner: Storage<null | string>;
255
256 constructor(key: string, storage: vscode.Memento) {
257 this.inner = new Storage(key, storage, null);
258 }
259
260 get(): null | Date {
261 const dateStr = this.inner.get();
262 return dateStr ? new Date(dateStr) : null;
263 }
264
265 async set(date: null | Date) {
266 await this.inner.set(date ? date.toString() : null);
267 }
268}
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index 25ef38aed..c929ab063 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -4,19 +4,21 @@ import * as lc from 'vscode-languageclient';
4import { Config } from './config'; 4import { Config } from './config';
5import { createClient } from './client'; 5import { createClient } from './client';
6import { isRustEditor, RustEditor } from './util'; 6import { isRustEditor, RustEditor } from './util';
7import { PersistentState } from './persistent_state';
7 8
8export class Ctx { 9export class Ctx {
9 private constructor( 10 private constructor(
10 readonly config: Config, 11 readonly config: Config,
12 readonly state: PersistentState,
11 private readonly extCtx: vscode.ExtensionContext, 13 private readonly extCtx: vscode.ExtensionContext,
12 readonly client: lc.LanguageClient 14 readonly client: lc.LanguageClient
13 ) { 15 ) {
14 16
15 } 17 }
16 18
17 static async create(config: Config, extCtx: vscode.ExtensionContext, serverPath: string): Promise<Ctx> { 19 static async create(config: Config, state: PersistentState, extCtx: vscode.ExtensionContext, serverPath: string): Promise<Ctx> {
18 const client = await createClient(config, serverPath); 20 const client = await createClient(config, serverPath);
19 const res = new Ctx(config, extCtx, client); 21 const res = new Ctx(config, state, extCtx, client);
20 res.pushCleanup(client.start()); 22 res.pushCleanup(client.start());
21 await client.onReady(); 23 await client.onReady();
22 return res; 24 return res;
diff --git a/editors/code/src/installation/extension.ts b/editors/code/src/installation/extension.ts
index eea6fded2..a1db96f05 100644
--- a/editors/code/src/installation/extension.ts
+++ b/editors/code/src/installation/extension.ts
@@ -7,6 +7,7 @@ import { Config, UpdatesChannel } from "../config";
7import { ArtifactReleaseInfo, ArtifactSource } from "./interfaces"; 7import { ArtifactReleaseInfo, ArtifactSource } from "./interfaces";
8import { downloadArtifactWithProgressUi } from "./downloads"; 8import { downloadArtifactWithProgressUi } from "./downloads";
9import { fetchArtifactReleaseInfo } from "./fetch_artifact_release_info"; 9import { fetchArtifactReleaseInfo } from "./fetch_artifact_release_info";
10import { PersistentState } from "../persistent_state";
10 11
11const HEURISTIC_NIGHTLY_RELEASE_PERIOD_IN_HOURS = 25; 12const HEURISTIC_NIGHTLY_RELEASE_PERIOD_IN_HOURS = 25;
12 13
@@ -14,7 +15,7 @@ const HEURISTIC_NIGHTLY_RELEASE_PERIOD_IN_HOURS = 25;
14 * Installs `stable` or latest `nightly` version or does nothing if the current 15 * Installs `stable` or latest `nightly` version or does nothing if the current
15 * extension version is what's needed according to `desiredUpdateChannel`. 16 * extension version is what's needed according to `desiredUpdateChannel`.
16 */ 17 */
17export async function ensureProperExtensionVersion(config: Config): Promise<never | void> { 18export async function ensureProperExtensionVersion(config: Config, state: PersistentState): Promise<never | void> {
18 // User has built lsp server from sources, she should manage updates manually 19 // User has built lsp server from sources, she should manage updates manually
19 if (config.serverSource?.type === ArtifactSource.Type.ExplicitPath) return; 20 if (config.serverSource?.type === ArtifactSource.Type.ExplicitPath) return;
20 21
@@ -23,7 +24,7 @@ export async function ensureProperExtensionVersion(config: Config): Promise<neve
23 24
24 if (currentUpdChannel === UpdatesChannel.Stable) { 25 if (currentUpdChannel === UpdatesChannel.Stable) {
25 // Release date is present only when we are on nightly 26 // Release date is present only when we are on nightly
26 await config.installedNightlyExtensionReleaseDate.set(null); 27 await state.installedNightlyExtensionReleaseDate.set(null);
27 } 28 }
28 29
29 if (desiredUpdChannel === UpdatesChannel.Stable) { 30 if (desiredUpdChannel === UpdatesChannel.Stable) {
@@ -39,10 +40,10 @@ export async function ensureProperExtensionVersion(config: Config): Promise<neve
39 if (currentUpdChannel === UpdatesChannel.Stable) { 40 if (currentUpdChannel === UpdatesChannel.Stable) {
40 if (!await askToDownloadProperExtensionVersion(config)) return; 41 if (!await askToDownloadProperExtensionVersion(config)) return;
41 42
42 return await tryDownloadNightlyExtension(config); 43 return await tryDownloadNightlyExtension(config, state);
43 } 44 }
44 45
45 const currentExtReleaseDate = config.installedNightlyExtensionReleaseDate.get(); 46 const currentExtReleaseDate = state.installedNightlyExtensionReleaseDate.get();
46 47
47 if (currentExtReleaseDate === null) { 48 if (currentExtReleaseDate === null) {
48 void vscode.window.showErrorMessage( 49 void vscode.window.showErrorMessage(
@@ -66,9 +67,9 @@ export async function ensureProperExtensionVersion(config: Config): Promise<neve
66 return; 67 return;
67 } 68 }
68 69
69 await tryDownloadNightlyExtension(config, releaseInfo => { 70 await tryDownloadNightlyExtension(config, state, releaseInfo => {
70 assert( 71 assert(
71 currentExtReleaseDate.getTime() === config.installedNightlyExtensionReleaseDate.get()?.getTime(), 72 currentExtReleaseDate.getTime() === state.installedNightlyExtensionReleaseDate.get()?.getTime(),
72 "Other active VSCode instance has reinstalled the extension" 73 "Other active VSCode instance has reinstalled the extension"
73 ); 74 );
74 75
@@ -111,6 +112,7 @@ async function askToDownloadProperExtensionVersion(config: Config, reason = "")
111 */ 112 */
112const tryDownloadNightlyExtension = notReentrant(async ( 113const tryDownloadNightlyExtension = notReentrant(async (
113 config: Config, 114 config: Config,
115 state: PersistentState,
114 shouldDownload: (releaseInfo: ArtifactReleaseInfo) => boolean = () => true 116 shouldDownload: (releaseInfo: ArtifactReleaseInfo) => boolean = () => true
115): Promise<never | void> => { 117): Promise<never | void> => {
116 const vsixSource = config.nightlyVsixSource; 118 const vsixSource = config.nightlyVsixSource;
@@ -124,7 +126,7 @@ const tryDownloadNightlyExtension = notReentrant(async (
124 const vsixPath = path.join(vsixSource.dir, vsixSource.file); 126 const vsixPath = path.join(vsixSource.dir, vsixSource.file);
125 127
126 await vscodeInstallExtensionFromVsix(vsixPath); 128 await vscodeInstallExtensionFromVsix(vsixPath);
127 await config.installedNightlyExtensionReleaseDate.set(releaseInfo.releaseDate); 129 await state.installedNightlyExtensionReleaseDate.set(releaseInfo.releaseDate);
128 await fs.unlink(vsixPath); 130 await fs.unlink(vsixPath);
129 131
130 await vscodeReloadWindow(); // never returns 132 await vscodeReloadWindow(); // never returns
diff --git a/editors/code/src/installation/server.ts b/editors/code/src/installation/server.ts
index 05730a778..05d326131 100644
--- a/editors/code/src/installation/server.ts
+++ b/editors/code/src/installation/server.ts
@@ -7,8 +7,9 @@ import { fetchArtifactReleaseInfo } from "./fetch_artifact_release_info";
7import { downloadArtifactWithProgressUi } from "./downloads"; 7import { downloadArtifactWithProgressUi } from "./downloads";
8import { log, assert, notReentrant } from "../util"; 8import { log, assert, notReentrant } from "../util";
9import { Config, NIGHTLY_TAG } from "../config"; 9import { Config, NIGHTLY_TAG } from "../config";
10import { PersistentState } from "../persistent_state";
10 11
11export async function ensureServerBinary(config: Config): Promise<null | string> { 12export async function ensureServerBinary(config: Config, state: PersistentState): Promise<null | string> {
12 const source = config.serverSource; 13 const source = config.serverSource;
13 14
14 if (!source) { 15 if (!source) {
@@ -37,7 +38,7 @@ export async function ensureServerBinary(config: Config): Promise<null | string>
37 return null; 38 return null;
38 } 39 }
39 case ArtifactSource.Type.GithubRelease: { 40 case ArtifactSource.Type.GithubRelease: {
40 if (!shouldDownloadServer(source, config)) { 41 if (!shouldDownloadServer(state, source)) {
41 return path.join(source.dir, source.file); 42 return path.join(source.dir, source.file);
42 } 43 }
43 44
@@ -50,24 +51,24 @@ export async function ensureServerBinary(config: Config): Promise<null | string>
50 if (userResponse !== "Download now") return null; 51 if (userResponse !== "Download now") return null;
51 } 52 }
52 53
53 return await downloadServer(source, config); 54 return await downloadServer(state, source);
54 } 55 }
55 } 56 }
56} 57}
57 58
58function shouldDownloadServer( 59function shouldDownloadServer(
60 state: PersistentState,
59 source: ArtifactSource.GithubRelease, 61 source: ArtifactSource.GithubRelease,
60 config: Config
61): boolean { 62): boolean {
62 if (!isBinaryAvailable(path.join(source.dir, source.file))) return true; 63 if (!isBinaryAvailable(path.join(source.dir, source.file))) return true;
63 64
64 const installed = { 65 const installed = {
65 tag: config.serverReleaseTag.get(), 66 tag: state.serverReleaseTag.get(),
66 date: config.serverReleaseDate.get() 67 date: state.serverReleaseDate.get()
67 }; 68 };
68 const required = { 69 const required = {
69 tag: source.tag, 70 tag: source.tag,
70 date: config.installedNightlyExtensionReleaseDate.get() 71 date: state.installedNightlyExtensionReleaseDate.get()
71 }; 72 };
72 73
73 log.debug("Installed server:", installed, "required:", required); 74 log.debug("Installed server:", installed, "required:", required);
@@ -86,16 +87,16 @@ function shouldDownloadServer(
86 * Enforcing no reentrancy for this is best-effort. 87 * Enforcing no reentrancy for this is best-effort.
87 */ 88 */
88const downloadServer = notReentrant(async ( 89const downloadServer = notReentrant(async (
90 state: PersistentState,
89 source: ArtifactSource.GithubRelease, 91 source: ArtifactSource.GithubRelease,
90 config: Config,
91): Promise<null | string> => { 92): Promise<null | string> => {
92 try { 93 try {
93 const releaseInfo = await fetchArtifactReleaseInfo(source.repo, source.file, source.tag); 94 const releaseInfo = await fetchArtifactReleaseInfo(source.repo, source.file, source.tag);
94 95
95 await downloadArtifactWithProgressUi(releaseInfo, source.file, source.dir, "language server"); 96 await downloadArtifactWithProgressUi(releaseInfo, source.file, source.dir, "language server");
96 await Promise.all([ 97 await Promise.all([
97 config.serverReleaseTag.set(releaseInfo.releaseName), 98 state.serverReleaseTag.set(releaseInfo.releaseName),
98 config.serverReleaseDate.set(releaseInfo.releaseDate) 99 state.serverReleaseDate.set(releaseInfo.releaseDate)
99 ]); 100 ]);
100 } catch (err) { 101 } catch (err) {
101 log.downloadError(err, "language server", source.repo.name); 102 log.downloadError(err, "language server", source.repo.name);
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts
index bd4661a36..94ecd4dab 100644
--- a/editors/code/src/main.ts
+++ b/editors/code/src/main.ts
@@ -9,6 +9,7 @@ import { ensureServerBinary } from './installation/server';
9import { Config } from './config'; 9import { Config } from './config';
10import { log } from './util'; 10import { log } from './util';
11import { ensureProperExtensionVersion } from './installation/extension'; 11import { ensureProperExtensionVersion } from './installation/extension';
12import { PersistentState } from './persistent_state';
12 13
13let ctx: Ctx | undefined; 14let ctx: Ctx | undefined;
14 15
@@ -34,13 +35,14 @@ export async function activate(context: vscode.ExtensionContext) {
34 context.subscriptions.push(defaultOnEnter); 35 context.subscriptions.push(defaultOnEnter);
35 36
36 const config = new Config(context); 37 const config = new Config(context);
38 const state = new PersistentState(context);
37 39
38 vscode.workspace.onDidChangeConfiguration(() => ensureProperExtensionVersion(config).catch(log.error)); 40 vscode.workspace.onDidChangeConfiguration(() => ensureProperExtensionVersion(config, state).catch(log.error));
39 41
40 // Don't await the user response here, otherwise we will block the lsp server bootstrap 42 // Don't await the user response here, otherwise we will block the lsp server bootstrap
41 void ensureProperExtensionVersion(config).catch(log.error); 43 void ensureProperExtensionVersion(config, state).catch(log.error);
42 44
43 const serverPath = await ensureServerBinary(config); 45 const serverPath = await ensureServerBinary(config, state);
44 46
45 if (serverPath == null) { 47 if (serverPath == null) {
46 throw new Error( 48 throw new Error(
@@ -53,7 +55,7 @@ export async function activate(context: vscode.ExtensionContext) {
53 // registers its `onDidChangeDocument` handler before us. 55 // registers its `onDidChangeDocument` handler before us.
54 // 56 //
55 // This a horribly, horribly wrong way to deal with this problem. 57 // This a horribly, horribly wrong way to deal with this problem.
56 ctx = await Ctx.create(config, context, serverPath); 58 ctx = await Ctx.create(config, state, context, serverPath);
57 59
58 // Commands which invokes manually via command palette, shortcut, etc. 60 // Commands which invokes manually via command palette, shortcut, etc.
59 ctx.registerCommand('reload', (ctx) => { 61 ctx.registerCommand('reload', (ctx) => {
diff --git a/editors/code/src/persistent_state.ts b/editors/code/src/persistent_state.ts
new file mode 100644
index 000000000..13095b806
--- /dev/null
+++ b/editors/code/src/persistent_state.ts
@@ -0,0 +1,49 @@
1import * as vscode from 'vscode';
2import { log } from "./util";
3
4export class PersistentState {
5 constructor(private readonly ctx: vscode.ExtensionContext) {
6 }
7
8 readonly installedNightlyExtensionReleaseDate = new DateStorage(
9 "installed-nightly-extension-release-date",
10 this.ctx.globalState
11 );
12 readonly serverReleaseDate = new DateStorage("server-release-date", this.ctx.globalState);
13 readonly serverReleaseTag = new Storage<null | string>("server-release-tag", this.ctx.globalState, null);
14}
15
16
17export class Storage<T> {
18 constructor(
19 private readonly key: string,
20 private readonly storage: vscode.Memento,
21 private readonly defaultVal: T
22 ) { }
23
24 get(): T {
25 const val = this.storage.get(this.key, this.defaultVal);
26 log.debug(this.key, "==", val);
27 return val;
28 }
29 async set(val: T) {
30 log.debug(this.key, "=", val);
31 await this.storage.update(this.key, val);
32 }
33}
34export class DateStorage {
35 inner: Storage<null | string>;
36
37 constructor(key: string, storage: vscode.Memento) {
38 this.inner = new Storage(key, storage, null);
39 }
40
41 get(): null | Date {
42 const dateStr = this.inner.get();
43 return dateStr ? new Date(dateStr) : null;
44 }
45
46 async set(date: null | Date) {
47 await this.inner.set(date ? date.toString() : null);
48 }
49}
diff --git a/xtask/tests/tidy-tests/cli.rs b/xtask/tests/tidy-tests/cli.rs
index f9ca45292..f5b00a8b8 100644
--- a/xtask/tests/tidy-tests/cli.rs
+++ b/xtask/tests/tidy-tests/cli.rs
@@ -1,7 +1,6 @@
1use walkdir::WalkDir;
2use xtask::{ 1use xtask::{
3 codegen::{self, Mode}, 2 codegen::{self, Mode},
4 project_root, run_rustfmt, 3 run_rustfmt,
5}; 4};
6 5
7#[test] 6#[test]
@@ -31,25 +30,3 @@ fn check_code_formatting() {
31 panic!("{}. Please format the code by running `cargo format`", error); 30 panic!("{}. Please format the code by running `cargo format`", error);
32 } 31 }
33} 32}
34
35#[test]
36fn no_todo() {
37 WalkDir::new(project_root().join("crates")).into_iter().for_each(|e| {
38 let e = e.unwrap();
39 if e.path().extension().map(|it| it != "rs").unwrap_or(true) {
40 return;
41 }
42 if e.path().ends_with("tests/cli.rs") {
43 return;
44 }
45 let text = std::fs::read_to_string(e.path()).unwrap();
46 if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
47 panic!(
48 "\nTODO markers should not be committed to the master branch,\n\
49 use FIXME instead\n\
50 {}\n",
51 e.path().display(),
52 )
53 }
54 })
55}
diff --git a/xtask/tests/tidy-tests/docs.rs b/xtask/tests/tidy-tests/docs.rs
deleted file mode 100644
index 62c4f8441..000000000
--- a/xtask/tests/tidy-tests/docs.rs
+++ /dev/null
@@ -1,106 +0,0 @@
1use std::{collections::HashMap, fs, io::prelude::*, io::BufReader, path::Path};
2
3use anyhow::Context;
4use walkdir::{DirEntry, WalkDir};
5use xtask::project_root;
6
7fn is_exclude_dir(p: &Path) -> bool {
8 // Test hopefully don't really need comments, and for assists we already
9 // have special comments which are source of doc tests and user docs.
10 let exclude_dirs = ["tests", "test_data", "handlers"];
11 let mut cur_path = p;
12 while let Some(path) = cur_path.parent() {
13 if exclude_dirs.iter().any(|dir| path.ends_with(dir)) {
14 return true;
15 }
16 cur_path = path;
17 }
18
19 false
20}
21
22fn is_exclude_file(d: &DirEntry) -> bool {
23 let file_names = ["tests.rs"];
24
25 d.file_name().to_str().map(|f_n| file_names.iter().any(|name| *name == f_n)).unwrap_or(false)
26}
27
28fn is_hidden(entry: &DirEntry) -> bool {
29 entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
30}
31
32#[test]
33fn no_docs_comments() {
34 let crates = project_root().join("crates");
35 let iter = WalkDir::new(crates);
36 let mut missing_docs = Vec::new();
37 let mut contains_fixme = Vec::new();
38 for f in iter.into_iter().filter_entry(|e| !is_hidden(e)) {
39 let f = f.unwrap();
40 if f.file_type().is_dir() {
41 continue;
42 }
43 if f.path().extension().map(|it| it != "rs").unwrap_or(false) {
44 continue;
45 }
46 if is_exclude_dir(f.path()) {
47 continue;
48 }
49 if is_exclude_file(&f) {
50 continue;
51 }
52 let mut reader = BufReader::new(fs::File::open(f.path()).unwrap());
53 let mut line = String::new();
54 reader
55 .read_line(&mut line)
56 .with_context(|| format!("Failed to read {}", f.path().display()))
57 .unwrap();
58
59 if line.starts_with("//!") {
60 if line.contains("FIXME") {
61 contains_fixme.push(f.path().to_path_buf())
62 }
63 } else {
64 missing_docs.push(f.path().display().to_string());
65 }
66 }
67 if !missing_docs.is_empty() {
68 panic!(
69 "\nMissing docs strings\n\n\
70 modules:\n{}\n\n",
71 missing_docs.join("\n")
72 )
73 }
74
75 let whitelist = [
76 "ra_db",
77 "ra_hir",
78 "ra_hir_expand",
79 "ra_ide",
80 "ra_mbe",
81 "ra_parser",
82 "ra_prof",
83 "ra_project_model",
84 "ra_syntax",
85 "ra_text_edit",
86 "ra_tt",
87 "ra_hir_ty",
88 ];
89
90 let mut has_fixmes = whitelist.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>();
91 'outer: for path in contains_fixme {
92 for krate in whitelist.iter() {
93 if path.components().any(|it| it.as_os_str() == *krate) {
94 has_fixmes.insert(krate, true);
95 continue 'outer;
96 }
97 }
98 panic!("FIXME doc in a fully-documented crate: {}", path.display())
99 }
100
101 for (krate, has_fixme) in has_fixmes.iter() {
102 if !has_fixme {
103 panic!("crate {} is fully documented, remove it from the white list", krate)
104 }
105 }
106}
diff --git a/xtask/tests/tidy-tests/main.rs b/xtask/tests/tidy-tests/main.rs
index 56d1318d6..2d2d88bec 100644
--- a/xtask/tests/tidy-tests/main.rs
+++ b/xtask/tests/tidy-tests/main.rs
@@ -1,2 +1,145 @@
1mod cli; 1mod cli;
2mod docs; 2
3use std::{
4 collections::HashMap,
5 path::{Path, PathBuf},
6};
7
8use walkdir::{DirEntry, WalkDir};
9use xtask::{not_bash::fs2, project_root};
10
11#[test]
12fn rust_files_are_tidy() {
13 let mut tidy_docs = TidyDocs::default();
14 for path in rust_files() {
15 let text = fs2::read_to_string(&path).unwrap();
16 check_todo(&path, &text);
17 tidy_docs.visit(&path, &text);
18 }
19 tidy_docs.finish();
20}
21
22fn check_todo(path: &Path, text: &str) {
23 if path.ends_with("tests/cli.rs") {
24 return;
25 }
26 if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
27 panic!(
28 "\nTODO markers should not be committed to the master branch,\n\
29 use FIXME instead\n\
30 {}\n",
31 path.display(),
32 )
33 }
34}
35
36#[derive(Default)]
37struct TidyDocs {
38 missing_docs: Vec<String>,
39 contains_fixme: Vec<PathBuf>,
40}
41
42impl TidyDocs {
43 fn visit(&mut self, path: &Path, text: &str) {
44 if is_exclude_dir(path) || is_exclude_file(path) {
45 return;
46 }
47
48 let first_line = match text.lines().next() {
49 Some(it) => it,
50 None => return,
51 };
52
53 if first_line.starts_with("//!") {
54 if first_line.contains("FIXME") {
55 self.contains_fixme.push(path.to_path_buf())
56 }
57 } else {
58 self.missing_docs.push(path.display().to_string());
59 }
60
61 fn is_exclude_dir(p: &Path) -> bool {
62 // Test hopefully don't really need comments, and for assists we already
63 // have special comments which are source of doc tests and user docs.
64 let exclude_dirs = ["tests", "test_data", "handlers"];
65 let mut cur_path = p;
66 while let Some(path) = cur_path.parent() {
67 if exclude_dirs.iter().any(|dir| path.ends_with(dir)) {
68 return true;
69 }
70 cur_path = path;
71 }
72
73 false
74 }
75
76 fn is_exclude_file(d: &Path) -> bool {
77 let file_names = ["tests.rs"];
78
79 d.file_name()
80 .unwrap_or_default()
81 .to_str()
82 .map(|f_n| file_names.iter().any(|name| *name == f_n))
83 .unwrap_or(false)
84 }
85 }
86
87 fn finish(self) {
88 if !self.missing_docs.is_empty() {
89 panic!(
90 "\nMissing docs strings\n\n\
91 modules:\n{}\n\n",
92 self.missing_docs.join("\n")
93 )
94 }
95
96 let whitelist = [
97 "ra_db",
98 "ra_hir",
99 "ra_hir_expand",
100 "ra_ide",
101 "ra_mbe",
102 "ra_parser",
103 "ra_prof",
104 "ra_project_model",
105 "ra_syntax",
106 "ra_text_edit",
107 "ra_tt",
108 "ra_hir_ty",
109 ];
110
111 let mut has_fixmes =
112 whitelist.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>();
113 'outer: for path in self.contains_fixme {
114 for krate in whitelist.iter() {
115 if path.components().any(|it| it.as_os_str() == *krate) {
116 has_fixmes.insert(krate, true);
117 continue 'outer;
118 }
119 }
120 panic!("FIXME doc in a fully-documented crate: {}", path.display())
121 }
122
123 for (krate, has_fixme) in has_fixmes.iter() {
124 if !has_fixme {
125 panic!("crate {} is fully documented, remove it from the white list", krate)
126 }
127 }
128 }
129}
130
131fn rust_files() -> impl Iterator<Item = PathBuf> {
132 let crates = project_root().join("crates");
133 let iter = WalkDir::new(crates);
134 return iter
135 .into_iter()
136 .filter_entry(|e| !is_hidden(e))
137 .map(|e| e.unwrap())
138 .filter(|e| !e.file_type().is_dir())
139 .map(|e| e.into_path())
140 .filter(|path| path.extension().map(|it| it == "rs").unwrap_or(false));
141
142 fn is_hidden(entry: &DirEntry) -> bool {
143 entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
144 }
145}