aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock12
-rw-r--r--crates/completion/src/completions/flyimport.rs23
-rw-r--r--crates/hir_def/src/item_tree.rs10
-rw-r--r--crates/hir_def/src/item_tree/lower.rs26
-rw-r--r--crates/hir_def/src/nameres.rs12
-rw-r--r--crates/hir_def/src/nameres/collector.rs13
-rw-r--r--crates/hir_def/src/nameres/path_resolution.rs7
-rw-r--r--crates/hir_def/src/nameres/tests/block.rs63
-rw-r--r--crates/mbe/src/expander.rs (renamed from crates/mbe/src/mbe_expander.rs)9
-rw-r--r--crates/mbe/src/expander/matcher.rs (renamed from crates/mbe/src/mbe_expander/matcher.rs)366
-rw-r--r--crates/mbe/src/expander/transcriber.rs (renamed from crates/mbe/src/mbe_expander/transcriber.rs)25
-rw-r--r--crates/mbe/src/lib.rs50
-rw-r--r--crates/mbe/src/parser.rs52
-rw-r--r--crates/mbe/src/tests.rs17
-rw-r--r--crates/project_model/src/build_data.rs288
-rw-r--r--crates/project_model/src/cargo_workspace.rs47
-rw-r--r--crates/project_model/src/lib.rs1
-rw-r--r--crates/project_model/src/workspace.rs53
-rw-r--r--crates/rust-analyzer/src/bin/args.rs2
-rw-r--r--crates/rust-analyzer/src/cli/load_cargo.rs36
-rw-r--r--crates/rust-analyzer/src/config.rs4
-rw-r--r--crates/rust-analyzer/src/global_state.rs12
-rw-r--r--crates/rust-analyzer/src/lsp_ext.rs1
-rw-r--r--crates/rust-analyzer/src/main_loop.rs38
-rw-r--r--crates/rust-analyzer/src/op_queue.rs28
-rw-r--r--crates/rust-analyzer/src/reload.rs88
-rw-r--r--docs/dev/lsp-extensions.md4
-rw-r--r--editors/code/src/ctx.ts6
-rw-r--r--editors/code/src/lsp_ext.ts2
29 files changed, 788 insertions, 507 deletions
diff --git a/Cargo.lock b/Cargo.lock
index ba0b8e083..7f89fc580 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -418,9 +418,9 @@ checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
418 418
419[[package]] 419[[package]]
420name = "flate2" 420name = "flate2"
421version = "1.0.19" 421version = "1.0.20"
422source = "registry+https://github.com/rust-lang/crates.io-index" 422source = "registry+https://github.com/rust-lang/crates.io-index"
423checksum = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129" 423checksum = "cd3aec53de10fe96d7d8c565eb17f2c687bb5518a2ec453b5b1252964526abe0"
424dependencies = [ 424dependencies = [
425 "cfg-if", 425 "cfg-if",
426 "crc32fast", 426 "crc32fast",
@@ -785,9 +785,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
785 785
786[[package]] 786[[package]]
787name = "libc" 787name = "libc"
788version = "0.2.83" 788version = "0.2.84"
789source = "registry+https://github.com/rust-lang/crates.io-index" 789source = "registry+https://github.com/rust-lang/crates.io-index"
790checksum = "7eb0c4e9c72ee9d69b767adebc5f4788462a3b45624acd919475c92597bcaf4f" 790checksum = "1cca32fa0182e8c0989459524dc356b8f2b5c10f1b9eb521b7d182c03cf8c5ff"
791 791
792[[package]] 792[[package]]
793name = "libloading" 793name = "libloading"
@@ -1312,9 +1312,9 @@ checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581"
1312 1312
1313[[package]] 1313[[package]]
1314name = "rowan" 1314name = "rowan"
1315version = "0.12.2" 1315version = "0.12.4"
1316source = "registry+https://github.com/rust-lang/crates.io-index" 1316source = "registry+https://github.com/rust-lang/crates.io-index"
1317checksum = "5e05df24c035422fb2b845d66ef3542d6c4f8572eafe077175a30787b0254207" 1317checksum = "437240cd341f96c5a09924912a221726989c919f69ebce2e289f3adfbce36443"
1318dependencies = [ 1318dependencies = [
1319 "countme", 1319 "countme",
1320 "hashbrown", 1320 "hashbrown",
diff --git a/crates/completion/src/completions/flyimport.rs b/crates/completion/src/completions/flyimport.rs
index 6591127b1..9c6a5a40c 100644
--- a/crates/completion/src/completions/flyimport.rs
+++ b/crates/completion/src/completions/flyimport.rs
@@ -68,7 +68,10 @@ pub(crate) fn import_on_the_fly(acc: &mut Completions, ctx: &CompletionContext)
68 if !ctx.config.enable_imports_on_the_fly { 68 if !ctx.config.enable_imports_on_the_fly {
69 return None; 69 return None;
70 } 70 }
71 if ctx.attribute_under_caret.is_some() || ctx.mod_declaration_under_caret.is_some() { 71 if ctx.use_item_syntax.is_some()
72 || ctx.attribute_under_caret.is_some()
73 || ctx.mod_declaration_under_caret.is_some()
74 {
72 return None; 75 return None;
73 } 76 }
74 let potential_import_name = { 77 let potential_import_name = {
@@ -664,4 +667,22 @@ fn main() {
664 "#]], 667 "#]],
665 ); 668 );
666 } 669 }
670
671 #[test]
672 fn no_completions_in_use_statements() {
673 check(
674 r#"
675//- /lib.rs crate:dep
676pub mod io {
677 pub fn stdin() {}
678};
679
680//- /main.rs crate:main deps:dep
681use stdi$0
682
683fn main() {}
684"#,
685 expect![[]],
686 );
687 }
667} 688}
diff --git a/crates/hir_def/src/item_tree.rs b/crates/hir_def/src/item_tree.rs
index 1e5c94660..42d9f0947 100644
--- a/crates/hir_def/src/item_tree.rs
+++ b/crates/hir_def/src/item_tree.rs
@@ -98,15 +98,17 @@ impl ItemTree {
98 ctx.lower_module_items(&items) 98 ctx.lower_module_items(&items)
99 }, 99 },
100 ast::MacroStmts(stmts) => { 100 ast::MacroStmts(stmts) => {
101 ctx.lower_inner_items(stmts.syntax()) 101 // The produced statements can include items, which should be added as top-level
102 // items.
103 ctx.lower_macro_stmts(stmts)
102 }, 104 },
103 // Macros can expand to expressions. We return an empty item tree in this case, but
104 // still need to collect inner items.
105 ast::Expr(e) => { 105 ast::Expr(e) => {
106 // Macros can expand to expressions. We return an empty item tree in this case, but
107 // still need to collect inner items.
106 ctx.lower_inner_items(e.syntax()) 108 ctx.lower_inner_items(e.syntax())
107 }, 109 },
108 _ => { 110 _ => {
109 panic!("cannot create item tree from {:?}", syntax); 111 panic!("cannot create item tree from {:?} {}", syntax, syntax);
110 }, 112 },
111 } 113 }
112 }; 114 };
diff --git a/crates/hir_def/src/item_tree/lower.rs b/crates/hir_def/src/item_tree/lower.rs
index 8a71376b9..acc001add 100644
--- a/crates/hir_def/src/item_tree/lower.rs
+++ b/crates/hir_def/src/item_tree/lower.rs
@@ -61,6 +61,32 @@ impl Ctx {
61 self.tree 61 self.tree
62 } 62 }
63 63
64 pub(super) fn lower_macro_stmts(mut self, stmts: ast::MacroStmts) -> ItemTree {
65 self.tree.top_level = stmts
66 .statements()
67 .filter_map(|stmt| match stmt {
68 ast::Stmt::Item(item) => Some(item),
69 _ => None,
70 })
71 .flat_map(|item| self.lower_mod_item(&item, false))
72 .flat_map(|items| items.0)
73 .collect();
74
75 // Non-items need to have their inner items collected.
76 for stmt in stmts.statements() {
77 match stmt {
78 ast::Stmt::ExprStmt(_) | ast::Stmt::LetStmt(_) => {
79 self.collect_inner_items(stmt.syntax())
80 }
81 _ => {}
82 }
83 }
84 if let Some(expr) = stmts.expr() {
85 self.collect_inner_items(expr.syntax());
86 }
87 self.tree
88 }
89
64 pub(super) fn lower_inner_items(mut self, within: &SyntaxNode) -> ItemTree { 90 pub(super) fn lower_inner_items(mut self, within: &SyntaxNode) -> ItemTree {
65 self.collect_inner_items(within); 91 self.collect_inner_items(within);
66 self.tree 92 self.tree
diff --git a/crates/hir_def/src/nameres.rs b/crates/hir_def/src/nameres.rs
index 005b36e02..6169b3bbc 100644
--- a/crates/hir_def/src/nameres.rs
+++ b/crates/hir_def/src/nameres.rs
@@ -199,16 +199,10 @@ impl DefMap {
199 199
200 pub(crate) fn block_def_map_query(db: &dyn DefDatabase, block_id: BlockId) -> Arc<DefMap> { 200 pub(crate) fn block_def_map_query(db: &dyn DefDatabase, block_id: BlockId) -> Arc<DefMap> {
201 let block: BlockLoc = db.lookup_intern_block(block_id); 201 let block: BlockLoc = db.lookup_intern_block(block_id);
202 let item_tree = db.item_tree(block.ast_id.file_id);
203 let block_items = item_tree.inner_items_of_block(block.ast_id.value);
204
205 let parent = block.module.def_map(db); 202 let parent = block.module.def_map(db);
206 203
207 if block_items.is_empty() { 204 // FIXME: It would be good to just return the parent map when the block has no items, but
208 // If there are no inner items, nothing new is brought into scope, so we can just return 205 // we rely on `def_map.block` in a few places, which is `Some` for the inner `DefMap`.
209 // the parent DefMap. This keeps DefMap parent chains short.
210 return parent;
211 }
212 206
213 let block_info = 207 let block_info =
214 BlockInfo { block: block_id, parent, parent_module: block.module.local_id }; 208 BlockInfo { block: block_id, parent, parent_module: block.module.local_id };
@@ -216,7 +210,7 @@ impl DefMap {
216 let mut def_map = DefMap::empty(block.module.krate, block_info.parent.edition); 210 let mut def_map = DefMap::empty(block.module.krate, block_info.parent.edition);
217 def_map.block = Some(block_info); 211 def_map.block = Some(block_info);
218 212
219 let def_map = collector::collect_defs(db, def_map, Some(block.ast_id.value)); 213 let def_map = collector::collect_defs(db, def_map, Some(block.ast_id));
220 Arc::new(def_map) 214 Arc::new(def_map)
221 } 215 }
222 216
diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs
index 761b29c86..ae98fadac 100644
--- a/crates/hir_def/src/nameres/collector.rs
+++ b/crates/hir_def/src/nameres/collector.rs
@@ -48,7 +48,7 @@ const FIXED_POINT_LIMIT: usize = 8192;
48pub(super) fn collect_defs( 48pub(super) fn collect_defs(
49 db: &dyn DefDatabase, 49 db: &dyn DefDatabase,
50 mut def_map: DefMap, 50 mut def_map: DefMap,
51 block: Option<FileAstId<ast::BlockExpr>>, 51 block: Option<AstId<ast::BlockExpr>>,
52) -> DefMap { 52) -> DefMap {
53 let crate_graph = db.crate_graph(); 53 let crate_graph = db.crate_graph();
54 54
@@ -261,11 +261,10 @@ impl DefCollector<'_> {
261 } 261 }
262 } 262 }
263 263
264 fn seed_with_inner(&mut self, block: FileAstId<ast::BlockExpr>) { 264 fn seed_with_inner(&mut self, block: AstId<ast::BlockExpr>) {
265 let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id; 265 let item_tree = self.db.item_tree(block.file_id);
266 let item_tree = self.db.item_tree(file_id.into());
267 let module_id = self.def_map.root; 266 let module_id = self.def_map.root;
268 self.def_map.modules[module_id].origin = ModuleOrigin::CrateRoot { definition: file_id }; 267 self.def_map.modules[module_id].origin = ModuleOrigin::BlockExpr { block };
269 if item_tree 268 if item_tree
270 .top_level_attrs(self.db, self.def_map.krate) 269 .top_level_attrs(self.db, self.def_map.krate)
271 .cfg() 270 .cfg()
@@ -275,11 +274,11 @@ impl DefCollector<'_> {
275 def_collector: &mut *self, 274 def_collector: &mut *self,
276 macro_depth: 0, 275 macro_depth: 0,
277 module_id, 276 module_id,
278 file_id: file_id.into(), 277 file_id: block.file_id,
279 item_tree: &item_tree, 278 item_tree: &item_tree,
280 mod_dir: ModDir::root(), 279 mod_dir: ModDir::root(),
281 } 280 }
282 .collect(item_tree.inner_items_of_block(block)); 281 .collect(item_tree.inner_items_of_block(block.value));
283 } 282 }
284 } 283 }
285 284
diff --git a/crates/hir_def/src/nameres/path_resolution.rs b/crates/hir_def/src/nameres/path_resolution.rs
index 419e465ed..2a0f8ec2b 100644
--- a/crates/hir_def/src/nameres/path_resolution.rs
+++ b/crates/hir_def/src/nameres/path_resolution.rs
@@ -103,7 +103,7 @@ impl DefMap {
103 &self, 103 &self,
104 db: &dyn DefDatabase, 104 db: &dyn DefDatabase,
105 mode: ResolveMode, 105 mode: ResolveMode,
106 original_module: LocalModuleId, 106 mut original_module: LocalModuleId,
107 path: &ModPath, 107 path: &ModPath,
108 shadow: BuiltinShadowMode, 108 shadow: BuiltinShadowMode,
109 ) -> ResolvePathResult { 109 ) -> ResolvePathResult {
@@ -130,7 +130,10 @@ impl DefMap {
130 result.segment_index = result.segment_index.min(new.segment_index); 130 result.segment_index = result.segment_index.min(new.segment_index);
131 131
132 match &current_map.block { 132 match &current_map.block {
133 Some(block) => current_map = &block.parent, 133 Some(block) => {
134 current_map = &block.parent;
135 original_module = block.parent_module;
136 }
134 None => return result, 137 None => return result,
135 } 138 }
136 } 139 }
diff --git a/crates/hir_def/src/nameres/tests/block.rs b/crates/hir_def/src/nameres/tests/block.rs
index 470ca593e..6cc659513 100644
--- a/crates/hir_def/src/nameres/tests/block.rs
+++ b/crates/hir_def/src/nameres/tests/block.rs
@@ -121,3 +121,66 @@ struct Struct {}
121 "#]], 121 "#]],
122 ); 122 );
123} 123}
124
125#[test]
126fn legacy_macro_items() {
127 // Checks that legacy-scoped `macro_rules!` from parent namespaces are resolved and expanded
128 // correctly.
129 check_at(
130 r#"
131macro_rules! hit {
132 () => {
133 struct Hit {}
134 }
135}
136
137fn f() {
138 hit!();
139 $0
140}
141"#,
142 expect![[r#"
143 block scope
144 Hit: t
145 crate
146 f: v
147 "#]],
148 );
149}
150
151#[test]
152fn macro_resolve() {
153 check_at(
154 r#"
155//- /lib.rs crate:lib deps:core
156use core::mark;
157
158fn f() {
159 fn nested() {
160 mark::hit!(Hit);
161 $0
162 }
163}
164//- /core.rs crate:core
165pub mod mark {
166 #[macro_export]
167 macro_rules! _hit {
168 ($name:ident) => {
169 struct $name {}
170 }
171 }
172
173 pub use crate::_hit as hit;
174}
175"#,
176 expect![[r#"
177 block scope
178 Hit: t
179 block scope
180 nested: v
181 crate
182 f: v
183 mark: t
184 "#]],
185 );
186}
diff --git a/crates/mbe/src/mbe_expander.rs b/crates/mbe/src/expander.rs
index 802c8fb0f..e7e14b3cc 100644
--- a/crates/mbe/src/mbe_expander.rs
+++ b/crates/mbe/src/expander.rs
@@ -16,13 +16,8 @@ pub(crate) fn expand_rules(
16) -> ExpandResult<tt::Subtree> { 16) -> ExpandResult<tt::Subtree> {
17 let mut match_: Option<(matcher::Match, &crate::Rule)> = None; 17 let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
18 for rule in rules { 18 for rule in rules {
19 let new_match = match matcher::match_(&rule.lhs, input) { 19 let new_match = matcher::match_(&rule.lhs, input);
20 Ok(m) => m, 20
21 Err(_e) => {
22 // error in pattern parsing
23 continue;
24 }
25 };
26 if new_match.err.is_none() { 21 if new_match.err.is_none() {
27 // If we find a rule that applies without errors, we're done. 22 // If we find a rule that applies without errors, we're done.
28 // Unconditionally returning the transcription here makes the 23 // Unconditionally returning the transcription here makes the
diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/expander/matcher.rs
index d32e60521..800931cd1 100644
--- a/crates/mbe/src/mbe_expander/matcher.rs
+++ b/crates/mbe/src/expander/matcher.rs
@@ -1,7 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use crate::{ 3use crate::{
4 mbe_expander::{Binding, Bindings, Fragment}, 4 expander::{Binding, Bindings, Fragment},
5 parser::{Op, RepeatKind, Separator}, 5 parser::{Op, RepeatKind, Separator},
6 subtree_source::SubtreeTokenSource, 6 subtree_source::SubtreeTokenSource,
7 tt_iter::TtIter, 7 tt_iter::TtIter,
@@ -70,69 +70,38 @@ pub(super) struct Match {
70} 70}
71 71
72impl Match { 72impl Match {
73 pub(super) fn add_err(&mut self, err: ExpandError) { 73 fn add_err(&mut self, err: ExpandError) {
74 let prev_err = self.err.take(); 74 let prev_err = self.err.take();
75 self.err = prev_err.or(Some(err)); 75 self.err = prev_err.or(Some(err));
76 self.err_count += 1; 76 self.err_count += 1;
77 } 77 }
78} 78}
79 79
80// General note: These functions have two channels to return errors, a `Result` 80/// Matching errors are added to the `Match`.
81// return value and the `&mut Match`. The returned Result is for pattern parsing 81pub(super) fn match_(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
82// errors; if a branch of the macro definition doesn't parse, it doesn't make
83// sense to try using it. Matching errors are added to the `Match`. It might
84// make sense to make pattern parsing a separate step?
85
86pub(super) fn match_(pattern: &MetaTemplate, src: &tt::Subtree) -> Result<Match, ExpandError> {
87 assert!(pattern.delimiter == None);
88
89 let mut res = Match::default(); 82 let mut res = Match::default();
90 let mut src = TtIter::new(src); 83 let mut src = TtIter::new(src);
91 84
92 match_subtree(&mut res, pattern, &mut src)?; 85 match_tokens(&mut res, pattern, &mut src);
93 86
94 if src.len() > 0 { 87 if src.len() > 0 {
95 res.unmatched_tts += src.len(); 88 res.unmatched_tts += src.len();
96 res.add_err(err!("leftover tokens")); 89 res.add_err(err!("leftover tokens"));
97 } 90 }
98 91
99 Ok(res) 92 res
100} 93}
101 94
102fn match_subtree( 95fn match_tokens(res: &mut Match, pattern: &MetaTemplate, src: &mut TtIter) {
103 res: &mut Match,
104 pattern: &MetaTemplate,
105 src: &mut TtIter,
106) -> Result<(), ExpandError> {
107 for op in pattern.iter() { 96 for op in pattern.iter() {
108 match op.as_ref().map_err(|err| err.clone())? { 97 match op {
109 Op::Leaf(lhs) => { 98 Op::Leaf(lhs) => {
110 let rhs = match src.expect_leaf() { 99 if let Err(err) = match_leaf(lhs, src) {
111 Ok(l) => l, 100 res.add_err(err);
112 Err(()) => { 101 continue;
113 res.add_err(err!("expected leaf: `{}`", lhs));
114 continue;
115 }
116 };
117 match (lhs, rhs) {
118 (
119 tt::Leaf::Punct(tt::Punct { char: lhs, .. }),
120 tt::Leaf::Punct(tt::Punct { char: rhs, .. }),
121 ) if lhs == rhs => (),
122 (
123 tt::Leaf::Ident(tt::Ident { text: lhs, .. }),
124 tt::Leaf::Ident(tt::Ident { text: rhs, .. }),
125 ) if lhs == rhs => (),
126 (
127 tt::Leaf::Literal(tt::Literal { text: lhs, .. }),
128 tt::Leaf::Literal(tt::Literal { text: rhs, .. }),
129 ) if lhs == rhs => (),
130 _ => {
131 res.add_err(ExpandError::UnexpectedToken);
132 }
133 } 102 }
134 } 103 }
135 Op::Subtree(lhs) => { 104 Op::Subtree { tokens, delimiter: delim } => {
136 let rhs = match src.expect_subtree() { 105 let rhs = match src.expect_subtree() {
137 Ok(s) => s, 106 Ok(s) => s,
138 Err(()) => { 107 Err(()) => {
@@ -140,12 +109,12 @@ fn match_subtree(
140 continue; 109 continue;
141 } 110 }
142 }; 111 };
143 if lhs.delimiter_kind() != rhs.delimiter_kind() { 112 if delim.map(|it| it.kind) != rhs.delimiter_kind() {
144 res.add_err(err!("mismatched delimiter")); 113 res.add_err(err!("mismatched delimiter"));
145 continue; 114 continue;
146 } 115 }
147 let mut src = TtIter::new(rhs); 116 let mut src = TtIter::new(rhs);
148 match_subtree(res, lhs, &mut src)?; 117 match_tokens(res, tokens, &mut src);
149 if src.len() > 0 { 118 if src.len() > 0 {
150 res.add_err(err!("leftover tokens")); 119 res.add_err(err!("leftover tokens"));
151 } 120 }
@@ -171,14 +140,170 @@ fn match_subtree(
171 res.add_err(err); 140 res.add_err(err);
172 } 141 }
173 } 142 }
174 Op::Repeat { subtree, kind, separator } => { 143 Op::Repeat { tokens: subtree, kind, separator } => {
175 match_repeat(res, subtree, *kind, separator, src)?; 144 match_repeat(res, subtree, *kind, separator, src);
176 } 145 }
177 } 146 }
178 } 147 }
148}
149
150fn match_leaf(lhs: &tt::Leaf, src: &mut TtIter) -> Result<(), ExpandError> {
151 let rhs = match src.expect_leaf() {
152 Ok(l) => l,
153 Err(()) => {
154 return Err(err!("expected leaf: `{}`", lhs));
155 }
156 };
157 match (lhs, rhs) {
158 (
159 tt::Leaf::Punct(tt::Punct { char: lhs, .. }),
160 tt::Leaf::Punct(tt::Punct { char: rhs, .. }),
161 ) if lhs == rhs => (),
162 (
163 tt::Leaf::Ident(tt::Ident { text: lhs, .. }),
164 tt::Leaf::Ident(tt::Ident { text: rhs, .. }),
165 ) if lhs == rhs => (),
166 (
167 tt::Leaf::Literal(tt::Literal { text: lhs, .. }),
168 tt::Leaf::Literal(tt::Literal { text: rhs, .. }),
169 ) if lhs == rhs => (),
170 _ => {
171 return Err(ExpandError::UnexpectedToken);
172 }
173 }
174
179 Ok(()) 175 Ok(())
180} 176}
181 177
178fn match_repeat(
179 res: &mut Match,
180 pattern: &MetaTemplate,
181 kind: RepeatKind,
182 separator: &Option<Separator>,
183 src: &mut TtIter,
184) {
185 // Dirty hack to make macro-expansion terminate.
186 // This should be replaced by a proper macro-by-example implementation
187 let mut limit = 65536;
188 let mut counter = 0;
189
190 for i in 0.. {
191 let mut fork = src.clone();
192
193 if let Some(separator) = &separator {
194 if i != 0 && !fork.eat_separator(separator) {
195 break;
196 }
197 }
198
199 let mut nested = Match::default();
200 match_tokens(&mut nested, pattern, &mut fork);
201 if nested.err.is_none() {
202 limit -= 1;
203 if limit == 0 {
204 log::warn!(
205 "match_lhs exceeded repeat pattern limit => {:#?}\n{:#?}\n{:#?}\n{:#?}",
206 pattern,
207 src,
208 kind,
209 separator
210 );
211 break;
212 }
213 *src = fork;
214
215 if let Err(err) = res.bindings.push_nested(counter, nested.bindings) {
216 res.add_err(err);
217 }
218 counter += 1;
219 if counter == 1 {
220 if let RepeatKind::ZeroOrOne = kind {
221 break;
222 }
223 }
224 } else {
225 break;
226 }
227 }
228
229 match (kind, counter) {
230 (RepeatKind::OneOrMore, 0) => {
231 res.add_err(ExpandError::UnexpectedToken);
232 }
233 (_, 0) => {
234 // Collect all empty variables in subtrees
235 let mut vars = Vec::new();
236 collect_vars(&mut vars, pattern);
237 for var in vars {
238 res.bindings.push_empty(&var)
239 }
240 }
241 _ => (),
242 }
243}
244
245fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> {
246 let fragment = match kind {
247 "path" => Path,
248 "expr" => Expr,
249 "ty" => Type,
250 "pat" => Pattern,
251 "stmt" => Statement,
252 "block" => Block,
253 "meta" => MetaItem,
254 "item" => Item,
255 _ => {
256 let tt_result = match kind {
257 "ident" => input
258 .expect_ident()
259 .map(|ident| Some(tt::Leaf::from(ident.clone()).into()))
260 .map_err(|()| err!("expected ident")),
261 "tt" => input.expect_tt().map(Some).map_err(|()| err!()),
262 "lifetime" => input
263 .expect_lifetime()
264 .map(|tt| Some(tt))
265 .map_err(|()| err!("expected lifetime")),
266 "literal" => {
267 let neg = input.eat_char('-');
268 input
269 .expect_literal()
270 .map(|literal| {
271 let lit = tt::Leaf::from(literal.clone());
272 match neg {
273 None => Some(lit.into()),
274 Some(neg) => Some(tt::TokenTree::Subtree(tt::Subtree {
275 delimiter: None,
276 token_trees: vec![neg, lit.into()],
277 })),
278 }
279 })
280 .map_err(|()| err!())
281 }
282 // `vis` is optional
283 "vis" => match input.eat_vis() {
284 Some(vis) => Ok(Some(vis)),
285 None => Ok(None),
286 },
287 _ => Err(ExpandError::UnexpectedToken),
288 };
289 return tt_result.map(|it| it.map(Fragment::Tokens)).into();
290 }
291 };
292 let result = input.expect_fragment(fragment);
293 result.map(|tt| if kind == "expr" { tt.map(Fragment::Ast) } else { tt.map(Fragment::Tokens) })
294}
295
296fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &MetaTemplate) {
297 for op in pattern.iter() {
298 match op {
299 Op::Var { name, .. } => buf.push(name.clone()),
300 Op::Leaf(_) => (),
301 Op::Subtree { tokens, .. } => collect_vars(buf, tokens),
302 Op::Repeat { tokens, .. } => collect_vars(buf, tokens),
303 }
304 }
305}
306
182impl<'a> TtIter<'a> { 307impl<'a> TtIter<'a> {
183 fn eat_separator(&mut self, separator: &Separator) -> bool { 308 fn eat_separator(&mut self, separator: &Separator) -> bool {
184 let mut fork = self.clone(); 309 let mut fork = self.clone();
@@ -206,7 +331,7 @@ impl<'a> TtIter<'a> {
206 ok 331 ok
207 } 332 }
208 333
209 pub(crate) fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> { 334 fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
210 match self.peek_n(0) { 335 match self.peek_n(0) {
211 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => { 336 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => {
212 return self.expect_lifetime(); 337 return self.expect_lifetime();
@@ -267,7 +392,7 @@ impl<'a> TtIter<'a> {
267 } 392 }
268 } 393 }
269 394
270 pub(crate) fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> { 395 fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
271 let punct = self.expect_punct()?; 396 let punct = self.expect_punct()?;
272 if punct.char != '\'' { 397 if punct.char != '\'' {
273 return Err(()); 398 return Err(());
@@ -284,13 +409,13 @@ impl<'a> TtIter<'a> {
284 .into()) 409 .into())
285 } 410 }
286 411
287 pub(crate) fn expect_fragment( 412 fn expect_fragment(
288 &mut self, 413 &mut self,
289 fragment_kind: parser::FragmentKind, 414 fragment_kind: parser::FragmentKind,
290 ) -> ExpandResult<Option<tt::TokenTree>> { 415 ) -> ExpandResult<Option<tt::TokenTree>> {
291 pub(crate) struct OffsetTokenSink<'a> { 416 struct OffsetTokenSink<'a> {
292 pub(crate) cursor: Cursor<'a>, 417 cursor: Cursor<'a>,
293 pub(crate) error: bool, 418 error: bool,
294 } 419 }
295 420
296 impl<'a> TreeSink for OffsetTokenSink<'a> { 421 impl<'a> TreeSink for OffsetTokenSink<'a> {
@@ -346,7 +471,7 @@ impl<'a> TtIter<'a> {
346 ExpandResult { value: res, err } 471 ExpandResult { value: res, err }
347 } 472 }
348 473
349 pub(crate) fn eat_vis(&mut self) -> Option<tt::TokenTree> { 474 fn eat_vis(&mut self) -> Option<tt::TokenTree> {
350 let mut fork = self.clone(); 475 let mut fork = self.clone();
351 match fork.expect_fragment(Visibility) { 476 match fork.expect_fragment(Visibility) {
352 ExpandResult { value: tt, err: None } => { 477 ExpandResult { value: tt, err: None } => {
@@ -357,7 +482,7 @@ impl<'a> TtIter<'a> {
357 } 482 }
358 } 483 }
359 484
360 pub(crate) fn eat_char(&mut self, c: char) -> Option<tt::TokenTree> { 485 fn eat_char(&mut self, c: char) -> Option<tt::TokenTree> {
361 let mut fork = self.clone(); 486 let mut fork = self.clone();
362 match fork.expect_char(c) { 487 match fork.expect_char(c) {
363 Ok(_) => { 488 Ok(_) => {
@@ -369,134 +494,3 @@ impl<'a> TtIter<'a> {
369 } 494 }
370 } 495 }
371} 496}
372
373pub(super) fn match_repeat(
374 res: &mut Match,
375 pattern: &MetaTemplate,
376 kind: RepeatKind,
377 separator: &Option<Separator>,
378 src: &mut TtIter,
379) -> Result<(), ExpandError> {
380 // Dirty hack to make macro-expansion terminate.
381 // This should be replaced by a proper macro-by-example implementation
382 let mut limit = 65536;
383 let mut counter = 0;
384
385 for i in 0.. {
386 let mut fork = src.clone();
387
388 if let Some(separator) = &separator {
389 if i != 0 && !fork.eat_separator(separator) {
390 break;
391 }
392 }
393
394 let mut nested = Match::default();
395 match_subtree(&mut nested, pattern, &mut fork)?;
396 if nested.err.is_none() {
397 limit -= 1;
398 if limit == 0 {
399 log::warn!(
400 "match_lhs exceeded repeat pattern limit => {:#?}\n{:#?}\n{:#?}\n{:#?}",
401 pattern,
402 src,
403 kind,
404 separator
405 );
406 break;
407 }
408 *src = fork;
409
410 if let Err(err) = res.bindings.push_nested(counter, nested.bindings) {
411 res.add_err(err);
412 }
413 counter += 1;
414 if counter == 1 {
415 if let RepeatKind::ZeroOrOne = kind {
416 break;
417 }
418 }
419 } else {
420 break;
421 }
422 }
423
424 match (kind, counter) {
425 (RepeatKind::OneOrMore, 0) => {
426 res.add_err(ExpandError::UnexpectedToken);
427 }
428 (_, 0) => {
429 // Collect all empty variables in subtrees
430 let mut vars = Vec::new();
431 collect_vars(&mut vars, pattern)?;
432 for var in vars {
433 res.bindings.push_empty(&var)
434 }
435 }
436 _ => (),
437 }
438 Ok(())
439}
440
441fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> {
442 let fragment = match kind {
443 "path" => Path,
444 "expr" => Expr,
445 "ty" => Type,
446 "pat" => Pattern,
447 "stmt" => Statement,
448 "block" => Block,
449 "meta" => MetaItem,
450 "item" => Item,
451 _ => {
452 let tt_result = match kind {
453 "ident" => input
454 .expect_ident()
455 .map(|ident| Some(tt::Leaf::from(ident.clone()).into()))
456 .map_err(|()| err!("expected ident")),
457 "tt" => input.expect_tt().map(Some).map_err(|()| err!()),
458 "lifetime" => input
459 .expect_lifetime()
460 .map(|tt| Some(tt))
461 .map_err(|()| err!("expected lifetime")),
462 "literal" => {
463 let neg = input.eat_char('-');
464 input
465 .expect_literal()
466 .map(|literal| {
467 let lit = tt::Leaf::from(literal.clone());
468 match neg {
469 None => Some(lit.into()),
470 Some(neg) => Some(tt::TokenTree::Subtree(tt::Subtree {
471 delimiter: None,
472 token_trees: vec![neg, lit.into()],
473 })),
474 }
475 })
476 .map_err(|()| err!())
477 }
478 // `vis` is optional
479 "vis" => match input.eat_vis() {
480 Some(vis) => Ok(Some(vis)),
481 None => Ok(None),
482 },
483 _ => Err(ExpandError::UnexpectedToken),
484 };
485 return tt_result.map(|it| it.map(Fragment::Tokens)).into();
486 }
487 };
488 let result = input.expect_fragment(fragment);
489 result.map(|tt| if kind == "expr" { tt.map(Fragment::Ast) } else { tt.map(Fragment::Tokens) })
490}
491
492fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &MetaTemplate) -> Result<(), ExpandError> {
493 for op in pattern.iter() {
494 match op.as_ref().map_err(|e| e.clone())? {
495 Op::Var { name, .. } => buf.push(name.clone()),
496 Op::Leaf(_) => (),
497 Op::Subtree(subtree) => collect_vars(buf, subtree)?,
498 Op::Repeat { subtree, .. } => collect_vars(buf, subtree)?,
499 }
500 }
501 Ok(())
502}
diff --git a/crates/mbe/src/mbe_expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs
index 59a3c80a8..78368a33e 100644
--- a/crates/mbe/src/mbe_expander/transcriber.rs
+++ b/crates/mbe/src/expander/transcriber.rs
@@ -2,10 +2,11 @@
2//! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}` 2//! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}`
3 3
4use syntax::SmolStr; 4use syntax::SmolStr;
5use tt::Delimiter;
5 6
6use super::ExpandResult; 7use super::ExpandResult;
7use crate::{ 8use crate::{
8 mbe_expander::{Binding, Bindings, Fragment}, 9 expander::{Binding, Bindings, Fragment},
9 parser::{Op, RepeatKind, Separator}, 10 parser::{Op, RepeatKind, Separator},
10 ExpandError, MetaTemplate, 11 ExpandError, MetaTemplate,
11}; 12};
@@ -54,10 +55,9 @@ pub(super) fn transcribe(
54 template: &MetaTemplate, 55 template: &MetaTemplate,
55 bindings: &Bindings, 56 bindings: &Bindings,
56) -> ExpandResult<tt::Subtree> { 57) -> ExpandResult<tt::Subtree> {
57 assert!(template.delimiter == None);
58 let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() }; 58 let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() };
59 let mut arena: Vec<tt::TokenTree> = Vec::new(); 59 let mut arena: Vec<tt::TokenTree> = Vec::new();
60 expand_subtree(&mut ctx, template, &mut arena) 60 expand_subtree(&mut ctx, template, None, &mut arena)
61} 61}
62 62
63#[derive(Debug)] 63#[derive(Debug)]
@@ -80,23 +80,18 @@ struct ExpandCtx<'a> {
80fn expand_subtree( 80fn expand_subtree(
81 ctx: &mut ExpandCtx, 81 ctx: &mut ExpandCtx,
82 template: &MetaTemplate, 82 template: &MetaTemplate,
83 delimiter: Option<Delimiter>,
83 arena: &mut Vec<tt::TokenTree>, 84 arena: &mut Vec<tt::TokenTree>,
84) -> ExpandResult<tt::Subtree> { 85) -> ExpandResult<tt::Subtree> {
85 // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation 86 // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
86 let start_elements = arena.len(); 87 let start_elements = arena.len();
87 let mut err = None; 88 let mut err = None;
88 for op in template.iter() { 89 for op in template.iter() {
89 let op = match op {
90 Ok(op) => op,
91 Err(e) => {
92 err = Some(e.clone());
93 break;
94 }
95 };
96 match op { 90 match op {
97 Op::Leaf(tt) => arena.push(tt.clone().into()), 91 Op::Leaf(tt) => arena.push(tt.clone().into()),
98 Op::Subtree(tt) => { 92 Op::Subtree { tokens, delimiter } => {
99 let ExpandResult { value: tt, err: e } = expand_subtree(ctx, &tt, arena); 93 let ExpandResult { value: tt, err: e } =
94 expand_subtree(ctx, &tokens, *delimiter, arena);
100 err = err.or(e); 95 err = err.or(e);
101 arena.push(tt.into()); 96 arena.push(tt.into());
102 } 97 }
@@ -105,7 +100,7 @@ fn expand_subtree(
105 err = err.or(e); 100 err = err.or(e);
106 push_fragment(arena, fragment); 101 push_fragment(arena, fragment);
107 } 102 }
108 Op::Repeat { subtree, kind, separator } => { 103 Op::Repeat { tokens: subtree, kind, separator } => {
109 let ExpandResult { value: fragment, err: e } = 104 let ExpandResult { value: fragment, err: e } =
110 expand_repeat(ctx, subtree, *kind, separator, arena); 105 expand_repeat(ctx, subtree, *kind, separator, arena);
111 err = err.or(e); 106 err = err.or(e);
@@ -115,7 +110,7 @@ fn expand_subtree(
115 } 110 }
116 // drain the elements added in this instance of expand_subtree 111 // drain the elements added in this instance of expand_subtree
117 let tts = arena.drain(start_elements..arena.len()).collect(); 112 let tts = arena.drain(start_elements..arena.len()).collect();
118 ExpandResult { value: tt::Subtree { delimiter: template.delimiter, token_trees: tts }, err } 113 ExpandResult { value: tt::Subtree { delimiter, token_trees: tts }, err }
119} 114}
120 115
121fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr, id: tt::TokenId) -> ExpandResult<Fragment> { 116fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr, id: tt::TokenId) -> ExpandResult<Fragment> {
@@ -169,7 +164,7 @@ fn expand_repeat(
169 let mut counter = 0; 164 let mut counter = 0;
170 165
171 loop { 166 loop {
172 let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, arena); 167 let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, None, arena);
173 let nesting_state = ctx.nesting.last_mut().unwrap(); 168 let nesting_state = ctx.nesting.last_mut().unwrap();
174 if nesting_state.at_end || !nesting_state.hit { 169 if nesting_state.at_end || !nesting_state.hit {
175 break; 170 break;
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index 35cde5f10..d80bd7a33 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -4,7 +4,7 @@
4//! `TokenTree`s as well! 4//! `TokenTree`s as well!
5 5
6mod parser; 6mod parser;
7mod mbe_expander; 7mod expander;
8mod syntax_bridge; 8mod syntax_bridge;
9mod tt_iter; 9mod tt_iter;
10mod subtree_source; 10mod subtree_source;
@@ -24,7 +24,9 @@ use crate::{
24 24
25#[derive(Debug, PartialEq, Eq)] 25#[derive(Debug, PartialEq, Eq)]
26pub enum ParseError { 26pub enum ParseError {
27 UnexpectedToken(String),
27 Expected(String), 28 Expected(String),
29 InvalidRepeat,
28 RepetitionEmptyTokenTree, 30 RepetitionEmptyTokenTree,
29} 31}
30 32
@@ -34,7 +36,6 @@ pub enum ExpandError {
34 UnexpectedToken, 36 UnexpectedToken,
35 BindingError(String), 37 BindingError(String),
36 ConversionError, 38 ConversionError,
37 InvalidRepeat,
38 ProcMacroError(tt::ExpansionError), 39 ProcMacroError(tt::ExpansionError),
39 UnresolvedProcMacro, 40 UnresolvedProcMacro,
40 Other(String), 41 Other(String),
@@ -53,7 +54,6 @@ impl fmt::Display for ExpandError {
53 ExpandError::UnexpectedToken => f.write_str("unexpected token in input"), 54 ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
54 ExpandError::BindingError(e) => f.write_str(e), 55 ExpandError::BindingError(e) => f.write_str(e),
55 ExpandError::ConversionError => f.write_str("could not convert tokens"), 56 ExpandError::ConversionError => f.write_str("could not convert tokens"),
56 ExpandError::InvalidRepeat => f.write_str("invalid repeat expression"),
57 ExpandError::ProcMacroError(e) => e.fmt(f), 57 ExpandError::ProcMacroError(e) => e.fmt(f),
58 ExpandError::UnresolvedProcMacro => f.write_str("unresolved proc macro"), 58 ExpandError::UnresolvedProcMacro => f.write_str("unresolved proc macro"),
59 ExpandError::Other(e) => f.write_str(e), 59 ExpandError::Other(e) => f.write_str(e),
@@ -92,18 +92,11 @@ struct Rule {
92} 92}
93 93
94#[derive(Clone, Debug, PartialEq, Eq)] 94#[derive(Clone, Debug, PartialEq, Eq)]
95struct MetaTemplate { 95struct MetaTemplate(Vec<Op>);
96 delimiter: Option<Delimiter>,
97 tokens: Vec<Result<Op, ExpandError>>,
98}
99 96
100impl<'a> MetaTemplate { 97impl<'a> MetaTemplate {
101 fn iter(&self) -> impl Iterator<Item = &Result<Op, ExpandError>> { 98 fn iter(&self) -> impl Iterator<Item = &Op> {
102 self.tokens.iter() 99 self.0.iter()
103 }
104
105 fn delimiter_kind(&self) -> Option<DelimiterKind> {
106 self.delimiter.map(|it| it.kind)
107 } 100 }
108} 101}
109 102
@@ -209,7 +202,7 @@ impl MacroRules {
209 // apply shift 202 // apply shift
210 let mut tt = tt.clone(); 203 let mut tt = tt.clone();
211 self.shift.shift_all(&mut tt); 204 self.shift.shift_all(&mut tt);
212 mbe_expander::expand_rules(&self.rules, &tt) 205 expander::expand_rules(&self.rules, &tt)
213 } 206 }
214 207
215 pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { 208 pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
@@ -260,7 +253,7 @@ impl MacroDef {
260 // apply shift 253 // apply shift
261 let mut tt = tt.clone(); 254 let mut tt = tt.clone();
262 self.shift.shift_all(&mut tt); 255 self.shift.shift_all(&mut tt);
263 mbe_expander::expand_rules(&self.rules, &tt) 256 expander::expand_rules(&self.rules, &tt)
264 } 257 }
265 258
266 pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { 259 pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
@@ -288,47 +281,38 @@ impl Rule {
288 .expect_subtree() 281 .expect_subtree()
289 .map_err(|()| ParseError::Expected("expected subtree".to_string()))?; 282 .map_err(|()| ParseError::Expected("expected subtree".to_string()))?;
290 283
291 let lhs = MetaTemplate { tokens: parse_pattern(&lhs), delimiter: None }; 284 let lhs = MetaTemplate(parse_pattern(&lhs)?);
292 let rhs = MetaTemplate { tokens: parse_template(&rhs), delimiter: None }; 285 let rhs = MetaTemplate(parse_template(&rhs)?);
293 286
294 Ok(crate::Rule { lhs, rhs }) 287 Ok(crate::Rule { lhs, rhs })
295 } 288 }
296} 289}
297 290
298fn to_parse_error(e: &ExpandError) -> ParseError {
299 let msg = match e {
300 ExpandError::InvalidRepeat => "invalid repeat".to_string(),
301 _ => "invalid macro definition".to_string(),
302 };
303 ParseError::Expected(msg)
304}
305
306fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { 291fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
307 for op in pattern.iter() { 292 for op in pattern.iter() {
308 let op = op.as_ref().map_err(|e| to_parse_error(&e))?;
309
310 match op { 293 match op {
311 Op::Subtree(subtree) => validate(&subtree)?, 294 Op::Subtree { tokens, .. } => validate(&tokens)?,
312 Op::Repeat { subtree, separator, .. } => { 295 Op::Repeat { tokens: subtree, separator, .. } => {
313 // Checks that no repetition which could match an empty token 296 // Checks that no repetition which could match an empty token
314 // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558 297 // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558
315 298
316 if separator.is_none() { 299 if separator.is_none() {
317 if subtree.iter().all(|child_op| { 300 if subtree.iter().all(|child_op| {
318 match child_op.as_ref().map_err(to_parse_error) { 301 match child_op {
319 Ok(Op::Var { kind, .. }) => { 302 Op::Var { kind, .. } => {
320 // vis is optional 303 // vis is optional
321 if kind.as_ref().map_or(false, |it| it == "vis") { 304 if kind.as_ref().map_or(false, |it| it == "vis") {
322 return true; 305 return true;
323 } 306 }
324 } 307 }
325 Ok(Op::Repeat { kind, .. }) => { 308 Op::Repeat { kind, .. } => {
326 return matches!( 309 return matches!(
327 kind, 310 kind,
328 parser::RepeatKind::ZeroOrMore | parser::RepeatKind::ZeroOrOne 311 parser::RepeatKind::ZeroOrMore | parser::RepeatKind::ZeroOrOne
329 ) 312 )
330 } 313 }
331 _ => {} 314 Op::Leaf(_) => {}
315 Op::Subtree { .. } => {}
332 } 316 }
333 false 317 false
334 }) { 318 }) {
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs
index f3047972d..f891ec29c 100644
--- a/crates/mbe/src/parser.rs
+++ b/crates/mbe/src/parser.rs
@@ -3,15 +3,16 @@
3 3
4use smallvec::SmallVec; 4use smallvec::SmallVec;
5use syntax::SmolStr; 5use syntax::SmolStr;
6use tt::Delimiter;
6 7
7use crate::{tt_iter::TtIter, ExpandError, MetaTemplate}; 8use crate::{tt_iter::TtIter, MetaTemplate, ParseError};
8 9
9#[derive(Clone, Debug, PartialEq, Eq)] 10#[derive(Clone, Debug, PartialEq, Eq)]
10pub(crate) enum Op { 11pub(crate) enum Op {
11 Var { name: SmolStr, kind: Option<SmolStr>, id: tt::TokenId }, 12 Var { name: SmolStr, kind: Option<SmolStr>, id: tt::TokenId },
12 Repeat { subtree: MetaTemplate, kind: RepeatKind, separator: Option<Separator> }, 13 Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option<Separator> },
13 Leaf(tt::Leaf), 14 Leaf(tt::Leaf),
14 Subtree(MetaTemplate), 15 Subtree { tokens: MetaTemplate, delimiter: Option<Delimiter> },
15} 16}
16 17
17#[derive(Copy, Clone, Debug, PartialEq, Eq)] 18#[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -46,12 +47,12 @@ impl PartialEq for Separator {
46 } 47 }
47} 48}
48 49
49pub(crate) fn parse_template(template: &tt::Subtree) -> Vec<Result<Op, ExpandError>> { 50pub(crate) fn parse_template(template: &tt::Subtree) -> Result<Vec<Op>, ParseError> {
50 parse_inner(&template, Mode::Template) 51 parse_inner(&template, Mode::Template).into_iter().collect()
51} 52}
52 53
53pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Vec<Result<Op, ExpandError>> { 54pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<Vec<Op>, ParseError> {
54 parse_inner(&pattern, Mode::Pattern) 55 parse_inner(&pattern, Mode::Pattern).into_iter().collect()
55} 56}
56 57
57#[derive(Clone, Copy)] 58#[derive(Clone, Copy)]
@@ -60,7 +61,7 @@ enum Mode {
60 Template, 61 Template,
61} 62}
62 63
63fn parse_inner(tt: &tt::Subtree, mode: Mode) -> Vec<Result<Op, ExpandError>> { 64fn parse_inner(tt: &tt::Subtree, mode: Mode) -> Vec<Result<Op, ParseError>> {
64 let mut src = TtIter::new(&tt); 65 let mut src = TtIter::new(&tt);
65 std::iter::from_fn(move || { 66 std::iter::from_fn(move || {
66 let first = src.next()?; 67 let first = src.next()?;
@@ -71,7 +72,7 @@ fn parse_inner(tt: &tt::Subtree, mode: Mode) -> Vec<Result<Op, ExpandError>> {
71 72
72macro_rules! err { 73macro_rules! err {
73 ($($tt:tt)*) => { 74 ($($tt:tt)*) => {
74 ExpandError::UnexpectedToken 75 ParseError::UnexpectedToken(($($tt)*).to_string())
75 }; 76 };
76} 77}
77 78
@@ -81,7 +82,7 @@ macro_rules! bail {
81 }; 82 };
82} 83}
83 84
84fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Result<Op, ExpandError> { 85fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Result<Op, ParseError> {
85 let res = match first { 86 let res = match first {
86 tt::TokenTree::Leaf(leaf @ tt::Leaf::Punct(tt::Punct { char: '$', .. })) => { 87 tt::TokenTree::Leaf(leaf @ tt::Leaf::Punct(tt::Punct { char: '$', .. })) => {
87 // Note that the '$' itself is a valid token inside macro_rules. 88 // Note that the '$' itself is a valid token inside macro_rules.
@@ -92,17 +93,17 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
92 match second { 93 match second {
93 tt::TokenTree::Subtree(subtree) => { 94 tt::TokenTree::Subtree(subtree) => {
94 let (separator, kind) = parse_repeat(src)?; 95 let (separator, kind) = parse_repeat(src)?;
95 let delimiter = subtree.delimiter; 96 let tokens = parse_inner(&subtree, mode)
96 let tokens = parse_inner(&subtree, mode); 97 .into_iter()
97 let subtree = MetaTemplate { tokens, delimiter }; 98 .collect::<Result<Vec<Op>, ParseError>>()?;
98 Op::Repeat { subtree, separator, kind } 99 Op::Repeat { tokens: MetaTemplate(tokens), separator, kind }
99 } 100 }
100 tt::TokenTree::Leaf(leaf) => match leaf { 101 tt::TokenTree::Leaf(leaf) => match leaf {
101 tt::Leaf::Punct(punct) => { 102 tt::Leaf::Punct(punct) => {
102 static UNDERSCORE: SmolStr = SmolStr::new_inline("_"); 103 static UNDERSCORE: SmolStr = SmolStr::new_inline("_");
103 104
104 if punct.char != '_' { 105 if punct.char != '_' {
105 return Err(ExpandError::UnexpectedToken); 106 return Err(ParseError::Expected("_".to_string()));
106 } 107 }
107 let name = UNDERSCORE.clone(); 108 let name = UNDERSCORE.clone();
108 let kind = eat_fragment_kind(src, mode)?; 109 let kind = eat_fragment_kind(src, mode)?;
@@ -134,16 +135,15 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
134 } 135 }
135 tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()), 136 tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()),
136 tt::TokenTree::Subtree(subtree) => { 137 tt::TokenTree::Subtree(subtree) => {
137 let delimiter = subtree.delimiter; 138 let tokens =
138 let tokens = parse_inner(&subtree, mode); 139 parse_inner(&subtree, mode).into_iter().collect::<Result<Vec<Op>, ParseError>>()?;
139 let subtree = MetaTemplate { tokens, delimiter }; 140 Op::Subtree { tokens: MetaTemplate(tokens), delimiter: subtree.delimiter }
140 Op::Subtree(subtree)
141 } 141 }
142 }; 142 };
143 Ok(res) 143 Ok(res)
144} 144}
145 145
146fn eat_fragment_kind<'a>(src: &mut TtIter<'a>, mode: Mode) -> Result<Option<SmolStr>, ExpandError> { 146fn eat_fragment_kind<'a>(src: &mut TtIter<'a>, mode: Mode) -> Result<Option<SmolStr>, ParseError> {
147 if let Mode::Pattern = mode { 147 if let Mode::Pattern = mode {
148 src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?; 148 src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?;
149 let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?; 149 let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?;
@@ -156,12 +156,12 @@ fn is_boolean_literal(lit: &tt::Literal) -> bool {
156 matches!(lit.text.as_str(), "true" | "false") 156 matches!(lit.text.as_str(), "true" | "false")
157} 157}
158 158
159fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), ExpandError> { 159fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), ParseError> {
160 let mut separator = Separator::Puncts(SmallVec::new()); 160 let mut separator = Separator::Puncts(SmallVec::new());
161 for tt in src { 161 for tt in src {
162 let tt = match tt { 162 let tt = match tt {
163 tt::TokenTree::Leaf(leaf) => leaf, 163 tt::TokenTree::Leaf(leaf) => leaf,
164 tt::TokenTree::Subtree(_) => return Err(ExpandError::InvalidRepeat), 164 tt::TokenTree::Subtree(_) => return Err(ParseError::InvalidRepeat),
165 }; 165 };
166 let has_sep = match &separator { 166 let has_sep = match &separator {
167 Separator::Puncts(puncts) => !puncts.is_empty(), 167 Separator::Puncts(puncts) => !puncts.is_empty(),
@@ -169,7 +169,7 @@ fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), Exp
169 }; 169 };
170 match tt { 170 match tt {
171 tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => { 171 tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => {
172 return Err(ExpandError::InvalidRepeat) 172 return Err(ParseError::InvalidRepeat)
173 } 173 }
174 tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()), 174 tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()),
175 tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()), 175 tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()),
@@ -182,11 +182,11 @@ fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), Exp
182 match &mut separator { 182 match &mut separator {
183 Separator::Puncts(puncts) => { 183 Separator::Puncts(puncts) => {
184 if puncts.len() == 3 { 184 if puncts.len() == 3 {
185 return Err(ExpandError::InvalidRepeat); 185 return Err(ParseError::InvalidRepeat);
186 } 186 }
187 puncts.push(punct.clone()) 187 puncts.push(punct.clone())
188 } 188 }
189 _ => return Err(ExpandError::InvalidRepeat), 189 _ => return Err(ParseError::InvalidRepeat),
190 } 190 }
191 continue; 191 continue;
192 } 192 }
@@ -196,5 +196,5 @@ fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), Exp
196 } 196 }
197 } 197 }
198 } 198 }
199 Err(ExpandError::InvalidRepeat) 199 Err(ParseError::InvalidRepeat)
200} 200}
diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs
index 8d978163d..1c467facd 100644
--- a/crates/mbe/src/tests.rs
+++ b/crates/mbe/src/tests.rs
@@ -33,19 +33,18 @@ mod rule_parsing {
33 33
34 #[test] 34 #[test]
35 fn test_invalid_arms() { 35 fn test_invalid_arms() {
36 fn check(macro_body: &str, err: &str) { 36 fn check(macro_body: &str, err: ParseError) {
37 let m = parse_macro_arm(macro_body); 37 let m = parse_macro_arm(macro_body);
38 assert_eq!(m, Err(ParseError::Expected(String::from(err)))); 38 assert_eq!(m, Err(err.into()));
39 } 39 }
40 check("invalid", ParseError::Expected("expected subtree".into()));
40 41
41 check("invalid", "expected subtree"); 42 check("$i:ident => ()", ParseError::Expected("expected subtree".into()));
43 check("($i:ident) ()", ParseError::Expected("expected `=`".into()));
44 check("($($i:ident)_) => ()", ParseError::InvalidRepeat);
42 45
43 check("$i:ident => ()", "expected subtree"); 46 check("($i) => ($i)", ParseError::UnexpectedToken("bad fragment specifier 1".into()));
44 check("($i:ident) ()", "expected `=`"); 47 check("($i:) => ($i)", ParseError::UnexpectedToken("bad fragment specifier 1".into()));
45 check("($($i:ident)_) => ()", "invalid repeat");
46
47 check("($i) => ($i)", "invalid macro definition");
48 check("($i:) => ($i)", "invalid macro definition");
49 } 48 }
50 49
51 fn parse_macro_arm(arm_definition: &str) -> Result<crate::MacroRules, ParseError> { 50 fn parse_macro_arm(arm_definition: &str) -> Result<crate::MacroRules, ParseError> {
diff --git a/crates/project_model/src/build_data.rs b/crates/project_model/src/build_data.rs
index 3ff347e2c..a5c564e0a 100644
--- a/crates/project_model/src/build_data.rs
+++ b/crates/project_model/src/build_data.rs
@@ -5,10 +5,11 @@ use std::{
5 io::BufReader, 5 io::BufReader,
6 path::{Path, PathBuf}, 6 path::{Path, PathBuf},
7 process::{Command, Stdio}, 7 process::{Command, Stdio},
8 sync::Arc,
8}; 9};
9 10
10use anyhow::Result; 11use anyhow::Result;
11use cargo_metadata::{BuildScript, Message, Package, PackageId}; 12use cargo_metadata::{BuildScript, Message};
12use itertools::Itertools; 13use itertools::Itertools;
13use paths::{AbsPath, AbsPathBuf}; 14use paths::{AbsPath, AbsPathBuf};
14use rustc_hash::FxHashMap; 15use rustc_hash::FxHashMap;
@@ -16,150 +17,195 @@ use stdx::JodChild;
16 17
17use crate::{cfg_flag::CfgFlag, CargoConfig}; 18use crate::{cfg_flag::CfgFlag, CargoConfig};
18 19
19#[derive(Debug, Clone, Default)]
20pub(crate) struct BuildDataMap {
21 data: FxHashMap<PackageId, BuildData>,
22}
23#[derive(Debug, Clone, Default, PartialEq, Eq)] 20#[derive(Debug, Clone, Default, PartialEq, Eq)]
24pub struct BuildData { 21pub(crate) struct BuildData {
25 /// List of config flags defined by this package's build script 22 /// List of config flags defined by this package's build script
26 pub cfgs: Vec<CfgFlag>, 23 pub(crate) cfgs: Vec<CfgFlag>,
27 /// List of cargo-related environment variables with their value 24 /// List of cargo-related environment variables with their value
28 /// 25 ///
29 /// If the package has a build script which defines environment variables, 26 /// If the package has a build script which defines environment variables,
30 /// they can also be found here. 27 /// they can also be found here.
31 pub envs: Vec<(String, String)>, 28 pub(crate) envs: Vec<(String, String)>,
32 /// Directory where a build script might place its output 29 /// Directory where a build script might place its output
33 pub out_dir: Option<AbsPathBuf>, 30 pub(crate) out_dir: Option<AbsPathBuf>,
34 /// Path to the proc-macro library file if this package exposes proc-macros 31 /// Path to the proc-macro library file if this package exposes proc-macros
35 pub proc_macro_dylib_path: Option<AbsPathBuf>, 32 pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
36} 33}
37 34
38impl BuildDataMap { 35#[derive(Clone, Debug)]
39 pub(crate) fn new( 36pub(crate) struct BuildDataConfig {
40 cargo_toml: &AbsPath, 37 cargo_toml: AbsPathBuf,
41 cargo_features: &CargoConfig, 38 cargo_features: CargoConfig,
42 packages: &Vec<Package>, 39 packages: Arc<Vec<cargo_metadata::Package>>,
43 progress: &dyn Fn(String), 40}
44 ) -> Result<BuildDataMap> {
45 let mut cmd = Command::new(toolchain::cargo());
46 cmd.args(&["check", "--workspace", "--message-format=json", "--manifest-path"])
47 .arg(cargo_toml.as_ref());
48
49 // --all-targets includes tests, benches and examples in addition to the
50 // default lib and bins. This is an independent concept from the --targets
51 // flag below.
52 cmd.arg("--all-targets");
53
54 if let Some(target) = &cargo_features.target {
55 cmd.args(&["--target", target]);
56 }
57 41
58 if cargo_features.all_features { 42impl PartialEq for BuildDataConfig {
59 cmd.arg("--all-features"); 43 fn eq(&self, other: &Self) -> bool {
60 } else { 44 Arc::ptr_eq(&self.packages, &other.packages)
61 if cargo_features.no_default_features { 45 }
62 // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` 46}
63 // https://github.com/oli-obk/cargo_metadata/issues/79
64 cmd.arg("--no-default-features");
65 }
66 if !cargo_features.features.is_empty() {
67 cmd.arg("--features");
68 cmd.arg(cargo_features.features.join(" "));
69 }
70 }
71 47
72 cmd.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null()); 48impl Eq for BuildDataConfig {}
73
74 let mut child = cmd.spawn().map(JodChild)?;
75 let child_stdout = child.stdout.take().unwrap();
76 let stdout = BufReader::new(child_stdout);
77
78 let mut res = BuildDataMap::default();
79 for message in cargo_metadata::Message::parse_stream(stdout) {
80 if let Ok(message) = message {
81 match message {
82 Message::BuildScriptExecuted(BuildScript {
83 package_id,
84 out_dir,
85 cfgs,
86 env,
87 ..
88 }) => {
89 let cfgs = {
90 let mut acc = Vec::new();
91 for cfg in cfgs {
92 match cfg.parse::<CfgFlag>() {
93 Ok(it) => acc.push(it),
94 Err(err) => {
95 anyhow::bail!("invalid cfg from cargo-metadata: {}", err)
96 }
97 };
98 }
99 acc
100 };
101 let res = res.data.entry(package_id.clone()).or_default();
102 // cargo_metadata crate returns default (empty) path for
103 // older cargos, which is not absolute, so work around that.
104 if out_dir != PathBuf::default() {
105 let out_dir = AbsPathBuf::assert(out_dir);
106 res.out_dir = Some(out_dir);
107 res.cfgs = cfgs;
108 }
109 49
110 res.envs = env; 50#[derive(Debug, Default)]
111 } 51pub struct BuildDataCollector {
112 Message::CompilerArtifact(message) => { 52 configs: FxHashMap<AbsPathBuf, BuildDataConfig>,
113 progress(format!("metadata {}", message.target.name)); 53}
114 54
115 if message.target.kind.contains(&"proc-macro".to_string()) { 55#[derive(Debug, Default, PartialEq, Eq)]
116 let package_id = message.package_id; 56pub struct BuildDataResult {
117 // Skip rmeta file 57 data: FxHashMap<AbsPathBuf, BuildDataMap>,
118 if let Some(filename) = 58}
119 message.filenames.iter().find(|name| is_dylib(name)) 59
120 { 60pub(crate) type BuildDataMap = FxHashMap<String, BuildData>;
121 let filename = AbsPathBuf::assert(filename.clone()); 61
122 let res = res.data.entry(package_id.clone()).or_default(); 62impl BuildDataCollector {
123 res.proc_macro_dylib_path = Some(filename); 63 pub(crate) fn add_config(&mut self, workspace_root: &AbsPath, config: BuildDataConfig) {
124 } 64 self.configs.insert(workspace_root.to_path_buf().clone(), config);
125 } 65 }
126 } 66
127 Message::CompilerMessage(message) => { 67 pub fn collect(&mut self, progress: &dyn Fn(String)) -> Result<BuildDataResult> {
128 progress(message.target.name.clone()); 68 let mut res = BuildDataResult::default();
129 } 69 for (path, config) in self.configs.iter() {
130 Message::Unknown => (), 70 res.data.insert(
131 Message::BuildFinished(_) => {} 71 path.clone(),
132 Message::TextLine(_) => {} 72 collect_from_workspace(
133 } 73 &config.cargo_toml,
134 } 74 &config.cargo_features,
75 &config.packages,
76 progress,
77 )?,
78 );
135 } 79 }
136 res.inject_cargo_env(packages);
137 Ok(res) 80 Ok(res)
138 } 81 }
82}
83
84impl BuildDataResult {
85 pub(crate) fn get(&self, root: &AbsPath) -> Option<&BuildDataMap> {
86 self.data.get(&root.to_path_buf())
87 }
88}
139 89
140 pub(crate) fn with_cargo_env(packages: &Vec<Package>) -> Self { 90impl BuildDataConfig {
141 let mut res = Self::default(); 91 pub(crate) fn new(
142 res.inject_cargo_env(packages); 92 cargo_toml: AbsPathBuf,
143 res 93 cargo_features: CargoConfig,
94 packages: Arc<Vec<cargo_metadata::Package>>,
95 ) -> Self {
96 Self { cargo_toml, cargo_features, packages }
144 } 97 }
98}
145 99
146 pub(crate) fn get(&self, id: &PackageId) -> Option<&BuildData> { 100fn collect_from_workspace(
147 self.data.get(id) 101 cargo_toml: &AbsPath,
102 cargo_features: &CargoConfig,
103 packages: &Vec<cargo_metadata::Package>,
104 progress: &dyn Fn(String),
105) -> Result<BuildDataMap> {
106 let mut cmd = Command::new(toolchain::cargo());
107 cmd.args(&["check", "--workspace", "--message-format=json", "--manifest-path"])
108 .arg(cargo_toml.as_ref());
109
110 // --all-targets includes tests, benches and examples in addition to the
111 // default lib and bins. This is an independent concept from the --targets
112 // flag below.
113 cmd.arg("--all-targets");
114
115 if let Some(target) = &cargo_features.target {
116 cmd.args(&["--target", target]);
148 } 117 }
149 118
150 fn inject_cargo_env(&mut self, packages: &Vec<Package>) { 119 if cargo_features.all_features {
151 for meta_pkg in packages { 120 cmd.arg("--all-features");
152 let resource = self.data.entry(meta_pkg.id.clone()).or_default(); 121 } else {
153 inject_cargo_env(meta_pkg, &mut resource.envs); 122 if cargo_features.no_default_features {
123 // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
124 // https://github.com/oli-obk/cargo_metadata/issues/79
125 cmd.arg("--no-default-features");
126 }
127 if !cargo_features.features.is_empty() {
128 cmd.arg("--features");
129 cmd.arg(cargo_features.features.join(" "));
130 }
131 }
132
133 cmd.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null());
154 134
155 if let Some(out_dir) = &resource.out_dir { 135 let mut child = cmd.spawn().map(JodChild)?;
156 // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() 136 let child_stdout = child.stdout.take().unwrap();
157 if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) { 137 let stdout = BufReader::new(child_stdout);
158 resource.envs.push(("OUT_DIR".to_string(), out_dir)); 138
139 let mut res = BuildDataMap::default();
140 for message in cargo_metadata::Message::parse_stream(stdout) {
141 if let Ok(message) = message {
142 match message {
143 Message::BuildScriptExecuted(BuildScript {
144 package_id,
145 out_dir,
146 cfgs,
147 env,
148 ..
149 }) => {
150 let cfgs = {
151 let mut acc = Vec::new();
152 for cfg in cfgs {
153 match cfg.parse::<CfgFlag>() {
154 Ok(it) => acc.push(it),
155 Err(err) => {
156 anyhow::bail!("invalid cfg from cargo-metadata: {}", err)
157 }
158 };
159 }
160 acc
161 };
162 let res = res.entry(package_id.repr.clone()).or_default();
163 // cargo_metadata crate returns default (empty) path for
164 // older cargos, which is not absolute, so work around that.
165 if out_dir != PathBuf::default() {
166 let out_dir = AbsPathBuf::assert(out_dir);
167 res.out_dir = Some(out_dir);
168 res.cfgs = cfgs;
169 }
170
171 res.envs = env;
172 }
173 Message::CompilerArtifact(message) => {
174 progress(format!("metadata {}", message.target.name));
175
176 if message.target.kind.contains(&"proc-macro".to_string()) {
177 let package_id = message.package_id;
178 // Skip rmeta file
179 if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name))
180 {
181 let filename = AbsPathBuf::assert(filename.clone());
182 let res = res.entry(package_id.repr.clone()).or_default();
183 res.proc_macro_dylib_path = Some(filename);
184 }
185 }
186 }
187 Message::CompilerMessage(message) => {
188 progress(message.target.name.clone());
159 } 189 }
190 Message::Unknown => (),
191 Message::BuildFinished(_) => {}
192 Message::TextLine(_) => {}
160 } 193 }
161 } 194 }
162 } 195 }
196
197 for package in packages {
198 let build_data = res.entry(package.id.repr.clone()).or_default();
199 inject_cargo_env(package, build_data);
200 if let Some(out_dir) = &build_data.out_dir {
201 // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
202 if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) {
203 build_data.envs.push(("OUT_DIR".to_string(), out_dir));
204 }
205 }
206 }
207
208 Ok(res)
163} 209}
164 210
165// FIXME: File a better way to know if it is a dylib 211// FIXME: File a better way to know if it is a dylib
@@ -173,7 +219,9 @@ fn is_dylib(path: &Path) -> bool {
173/// Recreates the compile-time environment variables that Cargo sets. 219/// Recreates the compile-time environment variables that Cargo sets.
174/// 220///
175/// Should be synced with <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates> 221/// Should be synced with <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
176fn inject_cargo_env(package: &cargo_metadata::Package, env: &mut Vec<(String, String)>) { 222fn inject_cargo_env(package: &cargo_metadata::Package, build_data: &mut BuildData) {
223 let env = &mut build_data.envs;
224
177 // FIXME: Missing variables: 225 // FIXME: Missing variables:
178 // CARGO_PKG_HOMEPAGE, CARGO_CRATE_NAME, CARGO_BIN_NAME, CARGO_BIN_EXE_<name> 226 // CARGO_PKG_HOMEPAGE, CARGO_CRATE_NAME, CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
179 227
diff --git a/crates/project_model/src/cargo_workspace.rs b/crates/project_model/src/cargo_workspace.rs
index c8a5333c4..f47898b9b 100644
--- a/crates/project_model/src/cargo_workspace.rs
+++ b/crates/project_model/src/cargo_workspace.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use std::{convert::TryInto, ops, process::Command}; 3use std::{convert::TryInto, ops, process::Command, sync::Arc};
4 4
5use anyhow::{Context, Result}; 5use anyhow::{Context, Result};
6use base_db::Edition; 6use base_db::Edition;
@@ -9,7 +9,7 @@ use la_arena::{Arena, Idx};
9use paths::{AbsPath, AbsPathBuf}; 9use paths::{AbsPath, AbsPathBuf};
10use rustc_hash::FxHashMap; 10use rustc_hash::FxHashMap;
11 11
12use crate::build_data::{BuildData, BuildDataMap}; 12use crate::build_data::BuildDataConfig;
13use crate::utf8_stdout; 13use crate::utf8_stdout;
14 14
15/// `CargoWorkspace` represents the logical structure of, well, a Cargo 15/// `CargoWorkspace` represents the logical structure of, well, a Cargo
@@ -27,6 +27,7 @@ pub struct CargoWorkspace {
27 packages: Arena<PackageData>, 27 packages: Arena<PackageData>,
28 targets: Arena<TargetData>, 28 targets: Arena<TargetData>,
29 workspace_root: AbsPathBuf, 29 workspace_root: AbsPathBuf,
30 build_data_config: BuildDataConfig,
30} 31}
31 32
32impl ops::Index<Package> for CargoWorkspace { 33impl ops::Index<Package> for CargoWorkspace {
@@ -55,9 +56,6 @@ pub struct CargoConfig {
55 /// This will be ignored if `cargo_all_features` is true. 56 /// This will be ignored if `cargo_all_features` is true.
56 pub features: Vec<String>, 57 pub features: Vec<String>,
57 58
58 /// Runs cargo check on launch to figure out the correct values of OUT_DIR
59 pub load_out_dirs_from_check: bool,
60
61 /// rustc target 59 /// rustc target
62 pub target: Option<String>, 60 pub target: Option<String>,
63 61
@@ -94,8 +92,8 @@ pub struct PackageData {
94 pub features: FxHashMap<String, Vec<String>>, 92 pub features: FxHashMap<String, Vec<String>>,
95 /// List of features enabled on this package 93 /// List of features enabled on this package
96 pub active_features: Vec<String>, 94 pub active_features: Vec<String>,
97 /// Build script related data for this package 95 // String representation of package id
98 pub build_data: BuildData, 96 pub id: String,
99} 97}
100 98
101#[derive(Debug, Clone, Eq, PartialEq)] 99#[derive(Debug, Clone, Eq, PartialEq)]
@@ -228,12 +226,6 @@ impl CargoWorkspace {
228 ) 226 )
229 })?; 227 })?;
230 228
231 let resources = if config.load_out_dirs_from_check {
232 BuildDataMap::new(cargo_toml, config, &meta.packages, progress)?
233 } else {
234 BuildDataMap::with_cargo_env(&meta.packages)
235 };
236
237 let mut pkg_by_id = FxHashMap::default(); 229 let mut pkg_by_id = FxHashMap::default();
238 let mut packages = Arena::default(); 230 let mut packages = Arena::default();
239 let mut targets = Arena::default(); 231 let mut targets = Arena::default();
@@ -241,10 +233,7 @@ impl CargoWorkspace {
241 let ws_members = &meta.workspace_members; 233 let ws_members = &meta.workspace_members;
242 234
243 meta.packages.sort_by(|a, b| a.id.cmp(&b.id)); 235 meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
244 for meta_pkg in meta.packages { 236 for meta_pkg in &meta.packages {
245 let id = meta_pkg.id.clone();
246 let build_data = resources.get(&id).cloned().unwrap_or_default();
247
248 let cargo_metadata::Package { id, edition, name, manifest_path, version, .. } = 237 let cargo_metadata::Package { id, edition, name, manifest_path, version, .. } =
249 meta_pkg; 238 meta_pkg;
250 let is_member = ws_members.contains(&id); 239 let is_member = ws_members.contains(&id);
@@ -252,24 +241,24 @@ impl CargoWorkspace {
252 .parse::<Edition>() 241 .parse::<Edition>()
253 .with_context(|| format!("Failed to parse edition {}", edition))?; 242 .with_context(|| format!("Failed to parse edition {}", edition))?;
254 let pkg = packages.alloc(PackageData { 243 let pkg = packages.alloc(PackageData {
255 name, 244 id: id.repr.clone(),
245 name: name.clone(),
256 version: version.to_string(), 246 version: version.to_string(),
257 manifest: AbsPathBuf::assert(manifest_path), 247 manifest: AbsPathBuf::assert(manifest_path.clone()),
258 targets: Vec::new(), 248 targets: Vec::new(),
259 is_member, 249 is_member,
260 edition, 250 edition,
261 dependencies: Vec::new(), 251 dependencies: Vec::new(),
262 features: meta_pkg.features.into_iter().collect(), 252 features: meta_pkg.features.clone().into_iter().collect(),
263 active_features: Vec::new(), 253 active_features: Vec::new(),
264 build_data,
265 }); 254 });
266 let pkg_data = &mut packages[pkg]; 255 let pkg_data = &mut packages[pkg];
267 pkg_by_id.insert(id, pkg); 256 pkg_by_id.insert(id, pkg);
268 for meta_tgt in meta_pkg.targets { 257 for meta_tgt in &meta_pkg.targets {
269 let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"]; 258 let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"];
270 let tgt = targets.alloc(TargetData { 259 let tgt = targets.alloc(TargetData {
271 package: pkg, 260 package: pkg,
272 name: meta_tgt.name, 261 name: meta_tgt.name.clone(),
273 root: AbsPathBuf::assert(meta_tgt.src_path.clone()), 262 root: AbsPathBuf::assert(meta_tgt.src_path.clone()),
274 kind: TargetKind::new(meta_tgt.kind.as_slice()), 263 kind: TargetKind::new(meta_tgt.kind.as_slice()),
275 is_proc_macro, 264 is_proc_macro,
@@ -308,7 +297,13 @@ impl CargoWorkspace {
308 } 297 }
309 298
310 let workspace_root = AbsPathBuf::assert(meta.workspace_root); 299 let workspace_root = AbsPathBuf::assert(meta.workspace_root);
311 Ok(CargoWorkspace { packages, targets, workspace_root: workspace_root }) 300 let build_data_config = BuildDataConfig::new(
301 cargo_toml.to_path_buf(),
302 config.clone(),
303 Arc::new(meta.packages.clone()),
304 );
305
306 Ok(CargoWorkspace { packages, targets, workspace_root, build_data_config })
312 } 307 }
313 308
314 pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + ExactSizeIterator + 'a { 309 pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + ExactSizeIterator + 'a {
@@ -334,6 +329,10 @@ impl CargoWorkspace {
334 } 329 }
335 } 330 }
336 331
332 pub(crate) fn build_data_config(&self) -> &BuildDataConfig {
333 &self.build_data_config
334 }
335
337 fn is_unique(&self, name: &str) -> bool { 336 fn is_unique(&self, name: &str) -> bool {
338 self.packages.iter().filter(|(_, v)| v.name == name).count() == 1 337 self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
339 } 338 }
diff --git a/crates/project_model/src/lib.rs b/crates/project_model/src/lib.rs
index 525c336e6..d712095a6 100644
--- a/crates/project_model/src/lib.rs
+++ b/crates/project_model/src/lib.rs
@@ -19,6 +19,7 @@ use paths::{AbsPath, AbsPathBuf};
19use rustc_hash::FxHashSet; 19use rustc_hash::FxHashSet;
20 20
21pub use crate::{ 21pub use crate::{
22 build_data::{BuildDataCollector, BuildDataResult},
22 cargo_workspace::{ 23 cargo_workspace::{
23 CargoConfig, CargoWorkspace, Package, PackageData, PackageDependency, Target, TargetData, 24 CargoConfig, CargoWorkspace, Package, PackageData, PackageDependency, Target, TargetData,
24 TargetKind, 25 TargetKind,
diff --git a/crates/project_model/src/workspace.rs b/crates/project_model/src/workspace.rs
index 559f4e7bf..c30861976 100644
--- a/crates/project_model/src/workspace.rs
+++ b/crates/project_model/src/workspace.rs
@@ -16,8 +16,13 @@ use proc_macro_api::ProcMacroClient;
16use rustc_hash::{FxHashMap, FxHashSet}; 16use rustc_hash::{FxHashMap, FxHashSet};
17 17
18use crate::{ 18use crate::{
19 cargo_workspace, cfg_flag::CfgFlag, rustc_cfg, sysroot::SysrootCrate, utf8_stdout, CargoConfig, 19 build_data::{BuildData, BuildDataMap, BuildDataResult},
20 CargoWorkspace, ProjectJson, ProjectManifest, Sysroot, TargetKind, 20 cargo_workspace,
21 cfg_flag::CfgFlag,
22 rustc_cfg,
23 sysroot::SysrootCrate,
24 utf8_stdout, BuildDataCollector, CargoConfig, CargoWorkspace, ProjectJson, ProjectManifest,
25 Sysroot, TargetKind,
21}; 26};
22 27
23/// `PackageRoot` describes a package root folder. 28/// `PackageRoot` describes a package root folder.
@@ -153,7 +158,7 @@ impl ProjectWorkspace {
153 /// Returns the roots for the current `ProjectWorkspace` 158 /// Returns the roots for the current `ProjectWorkspace`
154 /// The return type contains the path and whether or not 159 /// The return type contains the path and whether or not
155 /// the root is a member of the current workspace 160 /// the root is a member of the current workspace
156 pub fn to_roots(&self) -> Vec<PackageRoot> { 161 pub fn to_roots(&self, build_data: Option<&BuildDataResult>) -> Vec<PackageRoot> {
157 match self { 162 match self {
158 ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project 163 ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
159 .crates() 164 .crates()
@@ -179,7 +184,12 @@ impl ProjectWorkspace {
179 let pkg_root = cargo[pkg].root().to_path_buf(); 184 let pkg_root = cargo[pkg].root().to_path_buf();
180 185
181 let mut include = vec![pkg_root.clone()]; 186 let mut include = vec![pkg_root.clone()];
182 include.extend(cargo[pkg].build_data.out_dir.clone()); 187 include.extend(
188 build_data
189 .and_then(|it| it.get(cargo.workspace_root()))
190 .and_then(|map| map.get(&cargo[pkg].id))
191 .and_then(|it| it.out_dir.clone()),
192 );
183 193
184 let mut exclude = vec![pkg_root.join(".git")]; 194 let mut exclude = vec![pkg_root.join(".git")];
185 if is_member { 195 if is_member {
@@ -219,6 +229,7 @@ impl ProjectWorkspace {
219 229
220 pub fn to_crate_graph( 230 pub fn to_crate_graph(
221 &self, 231 &self,
232 build_data: Option<&BuildDataResult>,
222 proc_macro_client: Option<&ProcMacroClient>, 233 proc_macro_client: Option<&ProcMacroClient>,
223 load: &mut dyn FnMut(&AbsPath) -> Option<FileId>, 234 load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
224 ) -> CrateGraph { 235 ) -> CrateGraph {
@@ -241,8 +252,10 @@ impl ProjectWorkspace {
241 &proc_macro_loader, 252 &proc_macro_loader,
242 load, 253 load,
243 cargo, 254 cargo,
255 build_data.and_then(|it| it.get(cargo.workspace_root())),
244 sysroot, 256 sysroot,
245 rustc, 257 rustc,
258 rustc.as_ref().zip(build_data).and_then(|(it, map)| map.get(it.workspace_root())),
246 ), 259 ),
247 }; 260 };
248 if crate_graph.patch_cfg_if() { 261 if crate_graph.patch_cfg_if() {
@@ -252,6 +265,18 @@ impl ProjectWorkspace {
252 } 265 }
253 crate_graph 266 crate_graph
254 } 267 }
268
269 pub fn collect_build_data_configs(&self, collector: &mut BuildDataCollector) {
270 match self {
271 ProjectWorkspace::Cargo { cargo, rustc, .. } => {
272 collector.add_config(&cargo.workspace_root(), cargo.build_data_config().clone());
273 if let Some(rustc) = rustc {
274 collector.add_config(rustc.workspace_root(), rustc.build_data_config().clone());
275 }
276 }
277 _ => {}
278 }
279 }
255} 280}
256 281
257fn project_json_to_crate_graph( 282fn project_json_to_crate_graph(
@@ -324,8 +349,10 @@ fn cargo_to_crate_graph(
324 proc_macro_loader: &dyn Fn(&Path) -> Vec<ProcMacro>, 349 proc_macro_loader: &dyn Fn(&Path) -> Vec<ProcMacro>,
325 load: &mut dyn FnMut(&AbsPath) -> Option<FileId>, 350 load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
326 cargo: &CargoWorkspace, 351 cargo: &CargoWorkspace,
352 build_data_map: Option<&BuildDataMap>,
327 sysroot: &Sysroot, 353 sysroot: &Sysroot,
328 rustc: &Option<CargoWorkspace>, 354 rustc: &Option<CargoWorkspace>,
355 rustc_build_data_map: Option<&BuildDataMap>,
329) -> CrateGraph { 356) -> CrateGraph {
330 let _p = profile::span("cargo_to_crate_graph"); 357 let _p = profile::span("cargo_to_crate_graph");
331 let mut crate_graph = CrateGraph::default(); 358 let mut crate_graph = CrateGraph::default();
@@ -351,6 +378,7 @@ fn cargo_to_crate_graph(
351 let crate_id = add_target_crate_root( 378 let crate_id = add_target_crate_root(
352 &mut crate_graph, 379 &mut crate_graph,
353 &cargo[pkg], 380 &cargo[pkg],
381 build_data_map.and_then(|it| it.get(&cargo[pkg].id)),
354 &cfg_options, 382 &cfg_options,
355 proc_macro_loader, 383 proc_macro_loader,
356 file_id, 384 file_id,
@@ -427,6 +455,7 @@ fn cargo_to_crate_graph(
427 let crate_id = add_target_crate_root( 455 let crate_id = add_target_crate_root(
428 &mut crate_graph, 456 &mut crate_graph,
429 &rustc_workspace[pkg], 457 &rustc_workspace[pkg],
458 rustc_build_data_map.and_then(|it| it.get(&rustc_workspace[pkg].id)),
430 &cfg_options, 459 &cfg_options,
431 proc_macro_loader, 460 proc_macro_loader,
432 file_id, 461 file_id,
@@ -475,6 +504,7 @@ fn cargo_to_crate_graph(
475fn add_target_crate_root( 504fn add_target_crate_root(
476 crate_graph: &mut CrateGraph, 505 crate_graph: &mut CrateGraph,
477 pkg: &cargo_workspace::PackageData, 506 pkg: &cargo_workspace::PackageData,
507 build_data: Option<&BuildData>,
478 cfg_options: &CfgOptions, 508 cfg_options: &CfgOptions,
479 proc_macro_loader: &dyn Fn(&Path) -> Vec<ProcMacro>, 509 proc_macro_loader: &dyn Fn(&Path) -> Vec<ProcMacro>,
480 file_id: FileId, 510 file_id: FileId,
@@ -485,19 +515,22 @@ fn add_target_crate_root(
485 for feature in pkg.active_features.iter() { 515 for feature in pkg.active_features.iter() {
486 opts.insert_key_value("feature".into(), feature.into()); 516 opts.insert_key_value("feature".into(), feature.into());
487 } 517 }
488 opts.extend(pkg.build_data.cfgs.iter().cloned()); 518 if let Some(cfgs) = build_data.as_ref().map(|it| &it.cfgs) {
519 opts.extend(cfgs.iter().cloned());
520 }
489 opts 521 opts
490 }; 522 };
491 523
492 let mut env = Env::default(); 524 let mut env = Env::default();
493 for (k, v) in &pkg.build_data.envs { 525 if let Some(envs) = build_data.map(|it| &it.envs) {
494 env.set(k, v.clone()); 526 for (k, v) in envs {
527 env.set(k, v.clone());
528 }
495 } 529 }
496 530
497 let proc_macro = pkg 531 let proc_macro = build_data
498 .build_data
499 .proc_macro_dylib_path
500 .as_ref() 532 .as_ref()
533 .and_then(|it| it.proc_macro_dylib_path.as_ref())
501 .map(|it| proc_macro_loader(&it)) 534 .map(|it| proc_macro_loader(&it))
502 .unwrap_or_default(); 535 .unwrap_or_default();
503 536
diff --git a/crates/rust-analyzer/src/bin/args.rs b/crates/rust-analyzer/src/bin/args.rs
index 2a532361d..3a7db6a18 100644
--- a/crates/rust-analyzer/src/bin/args.rs
+++ b/crates/rust-analyzer/src/bin/args.rs
@@ -50,6 +50,8 @@ FLAGS:
50 -vv, --spammy 50 -vv, --spammy
51 -q, --quiet Set verbosity 51 -q, --quiet Set verbosity
52 52
53 --print-config-schema
54 Dump a LSP config JSON schema
53 --log-file <PATH> Log to the specified file instead of stderr 55 --log-file <PATH> Log to the specified file instead of stderr
54 --no-log-buffering 56 --no-log-buffering
55 Flush log records to the file immediately 57 Flush log records to the file immediately
diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs
index dbab4f5f4..e12e87180 100644
--- a/crates/rust-analyzer/src/cli/load_cargo.rs
+++ b/crates/rust-analyzer/src/cli/load_cargo.rs
@@ -6,7 +6,9 @@ use anyhow::Result;
6use crossbeam_channel::{unbounded, Receiver}; 6use crossbeam_channel::{unbounded, Receiver};
7use ide::{AnalysisHost, Change}; 7use ide::{AnalysisHost, Change};
8use ide_db::base_db::CrateGraph; 8use ide_db::base_db::CrateGraph;
9use project_model::{CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace}; 9use project_model::{
10 BuildDataCollector, CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace,
11};
10use vfs::{loader::Handle, AbsPath, AbsPathBuf}; 12use vfs::{loader::Handle, AbsPath, AbsPathBuf};
11 13
12use crate::reload::{ProjectFolders, SourceRootConfig}; 14use crate::reload::{ProjectFolders, SourceRootConfig};
@@ -18,11 +20,7 @@ pub fn load_cargo(
18) -> Result<(AnalysisHost, vfs::Vfs)> { 20) -> Result<(AnalysisHost, vfs::Vfs)> {
19 let root = AbsPathBuf::assert(std::env::current_dir()?.join(root)); 21 let root = AbsPathBuf::assert(std::env::current_dir()?.join(root));
20 let root = ProjectManifest::discover_single(&root)?; 22 let root = ProjectManifest::discover_single(&root)?;
21 let ws = ProjectWorkspace::load( 23 let ws = ProjectWorkspace::load(root, &CargoConfig::default(), &|_| {})?;
22 root,
23 &CargoConfig { load_out_dirs_from_check, ..Default::default() },
24 &|_| {},
25 )?;
26 24
27 let (sender, receiver) = unbounded(); 25 let (sender, receiver) = unbounded();
28 let mut vfs = vfs::Vfs::default(); 26 let mut vfs = vfs::Vfs::default();
@@ -39,14 +37,26 @@ pub fn load_cargo(
39 None 37 None
40 }; 38 };
41 39
42 let crate_graph = ws.to_crate_graph(proc_macro_client.as_ref(), &mut |path: &AbsPath| { 40 let build_data = if load_out_dirs_from_check {
43 let contents = loader.load_sync(path); 41 let mut collector = BuildDataCollector::default();
44 let path = vfs::VfsPath::from(path.to_path_buf()); 42 ws.collect_build_data_configs(&mut collector);
45 vfs.set_file_contents(path.clone(), contents); 43 Some(collector.collect(&|_| {})?)
46 vfs.file_id(&path) 44 } else {
47 }); 45 None
46 };
47
48 let crate_graph = ws.to_crate_graph(
49 build_data.as_ref(),
50 proc_macro_client.as_ref(),
51 &mut |path: &AbsPath| {
52 let contents = loader.load_sync(path);
53 let path = vfs::VfsPath::from(path.to_path_buf());
54 vfs.set_file_contents(path.clone(), contents);
55 vfs.file_id(&path)
56 },
57 );
48 58
49 let project_folders = ProjectFolders::new(&[ws], &[]); 59 let project_folders = ProjectFolders::new(&[ws], &[], build_data.as_ref());
50 loader.set_config(vfs::loader::Config { load: project_folders.load, watch: vec![] }); 60 loader.set_config(vfs::loader::Config { load: project_folders.load, watch: vec![] });
51 61
52 log::debug!("crate graph: {:?}", crate_graph); 62 log::debug!("crate graph: {:?}", crate_graph);
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 37487b6ac..cc0b22bff 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -469,6 +469,9 @@ impl Config {
469 pub fn cargo_autoreload(&self) -> bool { 469 pub fn cargo_autoreload(&self) -> bool {
470 self.data.cargo_autoreload 470 self.data.cargo_autoreload
471 } 471 }
472 pub fn load_out_dirs_from_check(&self) -> bool {
473 self.data.cargo_loadOutDirsFromCheck
474 }
472 pub fn cargo(&self) -> CargoConfig { 475 pub fn cargo(&self) -> CargoConfig {
473 let rustc_source = self.data.rustcSource.as_ref().map(|it| self.root_path.join(&it)); 476 let rustc_source = self.data.rustcSource.as_ref().map(|it| self.root_path.join(&it));
474 477
@@ -476,7 +479,6 @@ impl Config {
476 no_default_features: self.data.cargo_noDefaultFeatures, 479 no_default_features: self.data.cargo_noDefaultFeatures,
477 all_features: self.data.cargo_allFeatures, 480 all_features: self.data.cargo_allFeatures,
478 features: self.data.cargo_features.clone(), 481 features: self.data.cargo_features.clone(),
479 load_out_dirs_from_check: self.data.cargo_loadOutDirsFromCheck,
480 target: self.data.cargo_target.clone(), 482 target: self.data.cargo_target.clone(),
481 rustc_source, 483 rustc_source,
482 no_sysroot: self.data.cargo_noSysroot, 484 no_sysroot: self.data.cargo_noSysroot,
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 442fbd14c..6374a9f3c 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -11,7 +11,9 @@ use ide::{Analysis, AnalysisHost, Change, FileId};
11use ide_db::base_db::{CrateId, VfsPath}; 11use ide_db::base_db::{CrateId, VfsPath};
12use lsp_types::{SemanticTokens, Url}; 12use lsp_types::{SemanticTokens, Url};
13use parking_lot::{Mutex, RwLock}; 13use parking_lot::{Mutex, RwLock};
14use project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; 14use project_model::{
15 BuildDataCollector, BuildDataResult, CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target,
16};
15use rustc_hash::FxHashMap; 17use rustc_hash::FxHashMap;
16use vfs::AnchoredPathBuf; 18use vfs::AnchoredPathBuf;
17 19
@@ -33,7 +35,7 @@ use crate::{
33#[derive(Eq, PartialEq, Copy, Clone)] 35#[derive(Eq, PartialEq, Copy, Clone)]
34pub(crate) enum Status { 36pub(crate) enum Status {
35 Loading, 37 Loading,
36 Ready, 38 Ready { partial: bool },
37 Invalid, 39 Invalid,
38 NeedsReload, 40 NeedsReload,
39} 41}
@@ -79,7 +81,9 @@ pub(crate) struct GlobalState {
79 pub(crate) source_root_config: SourceRootConfig, 81 pub(crate) source_root_config: SourceRootConfig,
80 pub(crate) proc_macro_client: Option<ProcMacroClient>, 82 pub(crate) proc_macro_client: Option<ProcMacroClient>,
81 pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>, 83 pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
82 pub(crate) fetch_workspaces_queue: OpQueue, 84 pub(crate) fetch_workspaces_queue: OpQueue<()>,
85 pub(crate) workspace_build_data: Option<BuildDataResult>,
86 pub(crate) fetch_build_data_queue: OpQueue<BuildDataCollector>,
83 latest_requests: Arc<RwLock<LatestRequests>>, 87 latest_requests: Arc<RwLock<LatestRequests>>,
84} 88}
85 89
@@ -133,6 +137,8 @@ impl GlobalState {
133 proc_macro_client: None, 137 proc_macro_client: None,
134 workspaces: Arc::new(Vec::new()), 138 workspaces: Arc::new(Vec::new()),
135 fetch_workspaces_queue: OpQueue::default(), 139 fetch_workspaces_queue: OpQueue::default(),
140 workspace_build_data: None,
141 fetch_build_data_queue: OpQueue::default(),
136 latest_requests: Default::default(), 142 latest_requests: Default::default(),
137 } 143 }
138 } 144 }
diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs
index a85978737..670ca9a45 100644
--- a/crates/rust-analyzer/src/lsp_ext.rs
+++ b/crates/rust-analyzer/src/lsp_ext.rs
@@ -234,6 +234,7 @@ pub enum StatusNotification {}
234#[derive(Serialize, Deserialize)] 234#[derive(Serialize, Deserialize)]
235pub enum Status { 235pub enum Status {
236 Loading, 236 Loading,
237 ReadyPartial,
237 Ready, 238 Ready,
238 NeedsReload, 239 NeedsReload,
239 Invalid, 240 Invalid,
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 6d2475a59..f4fd1ac13 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -21,7 +21,7 @@ use crate::{
21 global_state::{file_id_to_url, url_to_file_id, GlobalState, Status}, 21 global_state::{file_id_to_url, url_to_file_id, GlobalState, Status},
22 handlers, lsp_ext, 22 handlers, lsp_ext,
23 lsp_utils::{apply_document_changes, is_canceled, notification_is, Progress}, 23 lsp_utils::{apply_document_changes, is_canceled, notification_is, Progress},
24 reload::ProjectWorkspaceProgress, 24 reload::{BuildDataProgress, ProjectWorkspaceProgress},
25 Result, 25 Result,
26}; 26};
27 27
@@ -63,6 +63,7 @@ pub(crate) enum Task {
63 Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>), 63 Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
64 PrimeCaches(PrimeCachesProgress), 64 PrimeCaches(PrimeCachesProgress),
65 FetchWorkspace(ProjectWorkspaceProgress), 65 FetchWorkspace(ProjectWorkspaceProgress),
66 FetchBuildData(BuildDataProgress),
66} 67}
67 68
68impl fmt::Debug for Event { 69impl fmt::Debug for Event {
@@ -226,12 +227,33 @@ impl GlobalState {
226 } 227 }
227 ProjectWorkspaceProgress::End(workspaces) => { 228 ProjectWorkspaceProgress::End(workspaces) => {
228 self.fetch_workspaces_completed(); 229 self.fetch_workspaces_completed();
229 self.switch_workspaces(workspaces); 230 self.switch_workspaces(workspaces, None);
230 (Progress::End, None) 231 (Progress::End, None)
231 } 232 }
232 }; 233 };
233 self.report_progress("fetching", state, msg, None); 234 self.report_progress("fetching", state, msg, None);
234 } 235 }
236 Task::FetchBuildData(progress) => {
237 let (state, msg) = match progress {
238 BuildDataProgress::Begin => (Some(Progress::Begin), None),
239 BuildDataProgress::Report(msg) => {
240 (Some(Progress::Report), Some(msg))
241 }
242 BuildDataProgress::End(collector) => {
243 self.fetch_build_data_completed();
244 let workspaces = (*self.workspaces)
245 .clone()
246 .into_iter()
247 .map(|it| Ok(it))
248 .collect();
249 self.switch_workspaces(workspaces, Some(collector));
250 (Some(Progress::End), None)
251 }
252 };
253 if let Some(state) = state {
254 self.report_progress("loading", state, msg, None);
255 }
256 }
235 } 257 }
236 // Coalesce multiple task events into one loop turn 258 // Coalesce multiple task events into one loop turn
237 task = match self.task_pool.receiver.try_recv() { 259 task = match self.task_pool.receiver.try_recv() {
@@ -287,7 +309,11 @@ impl GlobalState {
287 Progress::Report 309 Progress::Report
288 } else { 310 } else {
289 assert_eq!(n_done, n_total); 311 assert_eq!(n_done, n_total);
290 self.transition(Status::Ready); 312 let status = Status::Ready {
313 partial: self.config.load_out_dirs_from_check()
314 && self.workspace_build_data.is_none(),
315 };
316 self.transition(status);
291 Progress::End 317 Progress::End
292 }; 318 };
293 self.report_progress( 319 self.report_progress(
@@ -372,13 +398,14 @@ impl GlobalState {
372 } 398 }
373 399
374 let state_changed = self.process_changes(); 400 let state_changed = self.process_changes();
375 if prev_status == Status::Loading && self.status == Status::Ready { 401 let is_ready = matches!(self.status, Status::Ready { .. } );
402 if prev_status == Status::Loading && is_ready {
376 for flycheck in &self.flycheck { 403 for flycheck in &self.flycheck {
377 flycheck.update(); 404 flycheck.update();
378 } 405 }
379 } 406 }
380 407
381 if self.status == Status::Ready && (state_changed || prev_status == Status::Loading) { 408 if is_ready && (state_changed || prev_status == Status::Loading) {
382 self.update_file_notifications_on_threadpool(); 409 self.update_file_notifications_on_threadpool();
383 410
384 // Refresh semantic tokens if the client supports it. 411 // Refresh semantic tokens if the client supports it.
@@ -408,6 +435,7 @@ impl GlobalState {
408 } 435 }
409 436
410 self.fetch_workspaces_if_needed(); 437 self.fetch_workspaces_if_needed();
438 self.fetch_build_data_if_needed();
411 439
412 let loop_duration = loop_start.elapsed(); 440 let loop_duration = loop_start.elapsed();
413 if loop_duration > Duration::from_millis(100) { 441 if loop_duration > Duration::from_millis(100) {
diff --git a/crates/rust-analyzer/src/op_queue.rs b/crates/rust-analyzer/src/op_queue.rs
index 51d66f4b3..761b9ad39 100644
--- a/crates/rust-analyzer/src/op_queue.rs
+++ b/crates/rust-analyzer/src/op_queue.rs
@@ -1,22 +1,26 @@
1//! Bookkeeping to make sure only one long-running operation is executed. 1//! Bookkeeping to make sure only one long-running operation is executed.
2 2
3#[derive(Default)] 3pub(crate) struct OpQueue<D> {
4pub(crate) struct OpQueue { 4 op_scheduled: Option<D>,
5 op_scheduled: bool,
6 op_in_progress: bool, 5 op_in_progress: bool,
7} 6}
8 7
9impl OpQueue { 8impl<D> Default for OpQueue<D> {
10 pub(crate) fn request_op(&mut self) { 9 fn default() -> Self {
11 self.op_scheduled = true; 10 Self { op_scheduled: None, op_in_progress: false }
12 } 11 }
13 pub(crate) fn should_start_op(&mut self) -> bool { 12}
14 if !self.op_in_progress && self.op_scheduled { 13
15 self.op_in_progress = true; 14impl<D> OpQueue<D> {
16 self.op_scheduled = false; 15 pub(crate) fn request_op(&mut self, data: D) {
17 return true; 16 self.op_scheduled = Some(data);
17 }
18 pub(crate) fn should_start_op(&mut self) -> Option<D> {
19 if self.op_in_progress {
20 return None;
18 } 21 }
19 false 22 self.op_in_progress = self.op_scheduled.is_some();
23 self.op_scheduled.take()
20 } 24 }
21 pub(crate) fn op_completed(&mut self) { 25 pub(crate) fn op_completed(&mut self) {
22 assert!(self.op_in_progress); 26 assert!(self.op_in_progress);
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index ef73099cf..031a0cef4 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -4,7 +4,7 @@ use std::{mem, sync::Arc};
4use flycheck::{FlycheckConfig, FlycheckHandle}; 4use flycheck::{FlycheckConfig, FlycheckHandle};
5use ide::Change; 5use ide::Change;
6use ide_db::base_db::{CrateGraph, SourceRoot, VfsPath}; 6use ide_db::base_db::{CrateGraph, SourceRoot, VfsPath};
7use project_model::{ProcMacroClient, ProjectWorkspace}; 7use project_model::{BuildDataCollector, BuildDataResult, ProcMacroClient, ProjectWorkspace};
8use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind}; 8use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
9 9
10use crate::{ 10use crate::{
@@ -22,6 +22,13 @@ pub(crate) enum ProjectWorkspaceProgress {
22 End(Vec<anyhow::Result<ProjectWorkspace>>), 22 End(Vec<anyhow::Result<ProjectWorkspace>>),
23} 23}
24 24
25#[derive(Debug)]
26pub(crate) enum BuildDataProgress {
27 Begin,
28 Report(String),
29 End(anyhow::Result<BuildDataResult>),
30}
31
25impl GlobalState { 32impl GlobalState {
26 pub(crate) fn update_configuration(&mut self, config: Config) { 33 pub(crate) fn update_configuration(&mut self, config: Config) {
27 let _p = profile::span("GlobalState::update_configuration"); 34 let _p = profile::span("GlobalState::update_configuration");
@@ -41,7 +48,7 @@ impl GlobalState {
41 } 48 }
42 match self.status { 49 match self.status {
43 Status::Loading | Status::NeedsReload => return, 50 Status::Loading | Status::NeedsReload => return,
44 Status::Ready | Status::Invalid => (), 51 Status::Ready { .. } | Status::Invalid => (),
45 } 52 }
46 if self.config.cargo_autoreload() { 53 if self.config.cargo_autoreload() {
47 self.fetch_workspaces_request(); 54 self.fetch_workspaces_request();
@@ -89,7 +96,8 @@ impl GlobalState {
89 if self.config.status_notification() { 96 if self.config.status_notification() {
90 let lsp_status = match new_status { 97 let lsp_status = match new_status {
91 Status::Loading => lsp_ext::Status::Loading, 98 Status::Loading => lsp_ext::Status::Loading,
92 Status::Ready => lsp_ext::Status::Ready, 99 Status::Ready { partial: true } => lsp_ext::Status::ReadyPartial,
100 Status::Ready { partial: false } => lsp_ext::Status::Ready,
93 Status::Invalid => lsp_ext::Status::Invalid, 101 Status::Invalid => lsp_ext::Status::Invalid,
94 Status::NeedsReload => lsp_ext::Status::NeedsReload, 102 Status::NeedsReload => lsp_ext::Status::NeedsReload,
95 }; 103 };
@@ -99,11 +107,37 @@ impl GlobalState {
99 } 107 }
100 } 108 }
101 109
110 pub(crate) fn fetch_build_data_request(&mut self, build_data_collector: BuildDataCollector) {
111 self.fetch_build_data_queue.request_op(build_data_collector);
112 }
113
114 pub(crate) fn fetch_build_data_if_needed(&mut self) {
115 let mut build_data_collector = match self.fetch_build_data_queue.should_start_op() {
116 Some(it) => it,
117 None => return,
118 };
119 self.task_pool.handle.spawn_with_sender(move |sender| {
120 sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
121
122 let progress = {
123 let sender = sender.clone();
124 move |msg| {
125 sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
126 }
127 };
128 let res = build_data_collector.collect(&progress);
129 sender.send(Task::FetchBuildData(BuildDataProgress::End(res))).unwrap();
130 });
131 }
132 pub(crate) fn fetch_build_data_completed(&mut self) {
133 self.fetch_build_data_queue.op_completed()
134 }
135
102 pub(crate) fn fetch_workspaces_request(&mut self) { 136 pub(crate) fn fetch_workspaces_request(&mut self) {
103 self.fetch_workspaces_queue.request_op() 137 self.fetch_workspaces_queue.request_op(())
104 } 138 }
105 pub(crate) fn fetch_workspaces_if_needed(&mut self) { 139 pub(crate) fn fetch_workspaces_if_needed(&mut self) {
106 if !self.fetch_workspaces_queue.should_start_op() { 140 if self.fetch_workspaces_queue.should_start_op().is_none() {
107 return; 141 return;
108 } 142 }
109 log::info!("will fetch workspaces"); 143 log::info!("will fetch workspaces");
@@ -154,7 +188,11 @@ impl GlobalState {
154 self.fetch_workspaces_queue.op_completed() 188 self.fetch_workspaces_queue.op_completed()
155 } 189 }
156 190
157 pub(crate) fn switch_workspaces(&mut self, workspaces: Vec<anyhow::Result<ProjectWorkspace>>) { 191 pub(crate) fn switch_workspaces(
192 &mut self,
193 workspaces: Vec<anyhow::Result<ProjectWorkspace>>,
194 workspace_build_data: Option<anyhow::Result<BuildDataResult>>,
195 ) {
158 let _p = profile::span("GlobalState::switch_workspaces"); 196 let _p = profile::span("GlobalState::switch_workspaces");
159 log::info!("will switch workspaces: {:?}", workspaces); 197 log::info!("will switch workspaces: {:?}", workspaces);
160 198
@@ -176,7 +214,20 @@ impl GlobalState {
176 }) 214 })
177 .collect::<Vec<_>>(); 215 .collect::<Vec<_>>();
178 216
179 if &*self.workspaces == &workspaces { 217 let workspace_build_data = match workspace_build_data {
218 Some(Ok(it)) => Some(it),
219 Some(Err(err)) => {
220 log::error!("failed to fetch build data: {:#}", err);
221 self.show_message(
222 lsp_types::MessageType::Error,
223 format!("rust-analyzer failed to fetch build data: {:#}", err),
224 );
225 return;
226 }
227 None => None,
228 };
229
230 if &*self.workspaces == &workspaces && self.workspace_build_data == workspace_build_data {
180 return; 231 return;
181 } 232 }
182 233
@@ -189,7 +240,7 @@ impl GlobalState {
189 let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions { 240 let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
190 watchers: workspaces 241 watchers: workspaces
191 .iter() 242 .iter()
192 .flat_map(ProjectWorkspace::to_roots) 243 .flat_map(|it| it.to_roots(workspace_build_data.as_ref()))
193 .filter(|it| it.is_member) 244 .filter(|it| it.is_member)
194 .flat_map(|root| { 245 .flat_map(|root| {
195 root.include.into_iter().map(|it| format!("{}/**/*.rs", it.display())) 246 root.include.into_iter().map(|it| format!("{}/**/*.rs", it.display()))
@@ -215,7 +266,8 @@ impl GlobalState {
215 let mut change = Change::new(); 266 let mut change = Change::new();
216 267
217 let files_config = self.config.files(); 268 let files_config = self.config.files();
218 let project_folders = ProjectFolders::new(&workspaces, &files_config.exclude); 269 let project_folders =
270 ProjectFolders::new(&workspaces, &files_config.exclude, workspace_build_data.as_ref());
219 271
220 self.proc_macro_client = match self.config.proc_macro_srv() { 272 self.proc_macro_client = match self.config.proc_macro_srv() {
221 None => None, 273 None => None,
@@ -257,15 +309,28 @@ impl GlobalState {
257 res 309 res
258 }; 310 };
259 for ws in workspaces.iter() { 311 for ws in workspaces.iter() {
260 crate_graph.extend(ws.to_crate_graph(self.proc_macro_client.as_ref(), &mut load)); 312 crate_graph.extend(ws.to_crate_graph(
313 workspace_build_data.as_ref(),
314 self.proc_macro_client.as_ref(),
315 &mut load,
316 ));
261 } 317 }
262 318
263 crate_graph 319 crate_graph
264 }; 320 };
265 change.set_crate_graph(crate_graph); 321 change.set_crate_graph(crate_graph);
266 322
323 if self.config.load_out_dirs_from_check() && workspace_build_data.is_none() {
324 let mut collector = BuildDataCollector::default();
325 for ws in &workspaces {
326 ws.collect_build_data_configs(&mut collector);
327 }
328 self.fetch_build_data_request(collector)
329 }
330
267 self.source_root_config = project_folders.source_root_config; 331 self.source_root_config = project_folders.source_root_config;
268 self.workspaces = Arc::new(workspaces); 332 self.workspaces = Arc::new(workspaces);
333 self.workspace_build_data = workspace_build_data;
269 334
270 self.analysis_host.apply_change(change); 335 self.analysis_host.apply_change(change);
271 self.process_changes(); 336 self.process_changes();
@@ -323,12 +388,13 @@ impl ProjectFolders {
323 pub(crate) fn new( 388 pub(crate) fn new(
324 workspaces: &[ProjectWorkspace], 389 workspaces: &[ProjectWorkspace],
325 global_excludes: &[AbsPathBuf], 390 global_excludes: &[AbsPathBuf],
391 build_data: Option<&BuildDataResult>,
326 ) -> ProjectFolders { 392 ) -> ProjectFolders {
327 let mut res = ProjectFolders::default(); 393 let mut res = ProjectFolders::default();
328 let mut fsc = FileSetConfig::builder(); 394 let mut fsc = FileSetConfig::builder();
329 let mut local_filesets = vec![]; 395 let mut local_filesets = vec![];
330 396
331 for root in workspaces.iter().flat_map(|it| it.to_roots()) { 397 for root in workspaces.iter().flat_map(|it| it.to_roots(build_data)) {
332 let file_set_roots: Vec<VfsPath> = 398 let file_set_roots: Vec<VfsPath> =
333 root.include.iter().cloned().map(VfsPath::from).collect(); 399 root.include.iter().cloned().map(VfsPath::from).collect();
334 400
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md
index 78d86f060..d7f287894 100644
--- a/docs/dev/lsp-extensions.md
+++ b/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
1<!--- 1<!---
2lsp_ext.rs hash: 91f2c62457e0a20f 2lsp_ext.rs hash: 7609fd6d7b4ab231
3 3
4If you need to change the above hash to make the test pass, please check if you 4If you need to change the above hash to make the test pass, please check if you
5need to adjust this doc as well and ping this issue: 5need to adjust this doc as well and ping this issue:
@@ -423,7 +423,7 @@ Reloads project information (that is, re-executes `cargo metadata`).
423 423
424```typescript 424```typescript
425interface StatusParams { 425interface StatusParams {
426 status: "loading" | "ready" | "invalid" | "needsReload", 426 status: "loading" | "readyPartial" | "ready" | "invalid" | "needsReload",
427} 427}
428``` 428```
429 429
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index e7585184b..c07583cfa 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -74,6 +74,12 @@ export class Ctx {
74 this.statusBar.command = undefined; 74 this.statusBar.command = undefined;
75 this.statusBar.color = undefined; 75 this.statusBar.color = undefined;
76 break; 76 break;
77 case "readyPartial":
78 this.statusBar.text = "rust-analyzer";
79 this.statusBar.tooltip = "Ready (Partial)";
80 this.statusBar.command = undefined;
81 this.statusBar.color = undefined;
82 break;
77 case "ready": 83 case "ready":
78 this.statusBar.text = "rust-analyzer"; 84 this.statusBar.text = "rust-analyzer";
79 this.statusBar.tooltip = "Ready"; 85 this.statusBar.tooltip = "Ready";
diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts
index d21a3db86..2de1e427d 100644
--- a/editors/code/src/lsp_ext.ts
+++ b/editors/code/src/lsp_ext.ts
@@ -10,7 +10,7 @@ export interface AnalyzerStatusParams {
10export const analyzerStatus = new lc.RequestType<AnalyzerStatusParams, string, void>("rust-analyzer/analyzerStatus"); 10export const analyzerStatus = new lc.RequestType<AnalyzerStatusParams, string, void>("rust-analyzer/analyzerStatus");
11export const memoryUsage = new lc.RequestType0<string, void>("rust-analyzer/memoryUsage"); 11export const memoryUsage = new lc.RequestType0<string, void>("rust-analyzer/memoryUsage");
12 12
13export type Status = "loading" | "ready" | "invalid" | "needsReload"; 13export type Status = "loading" | "ready" | "readyPartial" | "invalid" | "needsReload";
14export interface StatusParams { 14export interface StatusParams {
15 status: Status; 15 status: Status;
16} 16}