diff options
45 files changed, 1629 insertions, 1391 deletions
diff --git a/Cargo.lock b/Cargo.lock index b2b624212..044314a8f 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -1395,6 +1395,7 @@ dependencies = [ | |||
1395 | "ra_project_model", | 1395 | "ra_project_model", |
1396 | "ra_syntax", | 1396 | "ra_syntax", |
1397 | "ra_text_edit", | 1397 | "ra_text_edit", |
1398 | "ra_toolchain", | ||
1398 | "ra_tt", | 1399 | "ra_tt", |
1399 | "rand", | 1400 | "rand", |
1400 | "rustc-hash", | 1401 | "rustc-hash", |
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 9e8205ae7..92ec4f92e 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs | |||
@@ -7,7 +7,7 @@ use std::{ | |||
7 | io::{self, BufReader}, | 7 | io::{self, BufReader}, |
8 | path::PathBuf, | 8 | path::PathBuf, |
9 | process::{Command, Stdio}, | 9 | process::{Command, Stdio}, |
10 | time::Instant, | 10 | time::Duration, |
11 | }; | 11 | }; |
12 | 12 | ||
13 | use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; | 13 | use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; |
@@ -74,9 +74,6 @@ impl FlycheckHandle { | |||
74 | 74 | ||
75 | #[derive(Debug)] | 75 | #[derive(Debug)] |
76 | pub enum Message { | 76 | pub enum Message { |
77 | /// Request a clearing of all cached diagnostics from the check watcher | ||
78 | ClearDiagnostics, | ||
79 | |||
80 | /// Request adding a diagnostic with fixes included to a file | 77 | /// Request adding a diagnostic with fixes included to a file |
81 | AddDiagnostic { workspace_root: PathBuf, diagnostic: Diagnostic }, | 78 | AddDiagnostic { workspace_root: PathBuf, diagnostic: Diagnostic }, |
82 | 79 | ||
@@ -86,9 +83,10 @@ pub enum Message { | |||
86 | 83 | ||
87 | #[derive(Debug)] | 84 | #[derive(Debug)] |
88 | pub enum Progress { | 85 | pub enum Progress { |
89 | Being, | 86 | DidStart, |
90 | DidCheckCrate(String), | 87 | DidCheckCrate(String), |
91 | End, | 88 | DidFinish, |
89 | DidCancel, | ||
92 | } | 90 | } |
93 | 91 | ||
94 | struct Restart; | 92 | struct Restart; |
@@ -97,19 +95,18 @@ struct FlycheckActor { | |||
97 | sender: Box<dyn Fn(Message) + Send>, | 95 | sender: Box<dyn Fn(Message) + Send>, |
98 | config: FlycheckConfig, | 96 | config: FlycheckConfig, |
99 | workspace_root: PathBuf, | 97 | workspace_root: PathBuf, |
100 | last_update_req: Option<Instant>, | ||
101 | /// WatchThread exists to wrap around the communication needed to be able to | 98 | /// WatchThread exists to wrap around the communication needed to be able to |
102 | /// run `cargo check` without blocking. Currently the Rust standard library | 99 | /// run `cargo check` without blocking. Currently the Rust standard library |
103 | /// doesn't provide a way to read sub-process output without blocking, so we | 100 | /// doesn't provide a way to read sub-process output without blocking, so we |
104 | /// have to wrap sub-processes output handling in a thread and pass messages | 101 | /// have to wrap sub-processes output handling in a thread and pass messages |
105 | /// back over a channel. | 102 | /// back over a channel. |
106 | // XXX: drop order is significant | 103 | // XXX: drop order is significant |
107 | check_process: Option<(Receiver<CheckEvent>, jod_thread::JoinHandle)>, | 104 | check_process: Option<(Receiver<cargo_metadata::Message>, jod_thread::JoinHandle)>, |
108 | } | 105 | } |
109 | 106 | ||
110 | enum Event { | 107 | enum Event { |
111 | Restart(Restart), | 108 | Restart(Restart), |
112 | CheckEvent(Option<CheckEvent>), | 109 | CheckEvent(Option<cargo_metadata::Message>), |
113 | } | 110 | } |
114 | 111 | ||
115 | impl FlycheckActor { | 112 | impl FlycheckActor { |
@@ -118,78 +115,58 @@ impl FlycheckActor { | |||
118 | config: FlycheckConfig, | 115 | config: FlycheckConfig, |
119 | workspace_root: PathBuf, | 116 | workspace_root: PathBuf, |
120 | ) -> FlycheckActor { | 117 | ) -> FlycheckActor { |
121 | FlycheckActor { sender, config, workspace_root, last_update_req: None, check_process: None } | 118 | FlycheckActor { sender, config, workspace_root, check_process: None } |
119 | } | ||
120 | fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> { | ||
121 | let check_chan = self.check_process.as_ref().map(|(chan, _thread)| chan); | ||
122 | select! { | ||
123 | recv(inbox) -> msg => msg.ok().map(Event::Restart), | ||
124 | recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())), | ||
125 | } | ||
122 | } | 126 | } |
123 | |||
124 | fn run(&mut self, inbox: Receiver<Restart>) { | 127 | fn run(&mut self, inbox: Receiver<Restart>) { |
125 | // If we rerun the thread, we need to discard the previous check results first | ||
126 | self.send(Message::ClearDiagnostics); | ||
127 | self.send(Message::Progress(Progress::End)); | ||
128 | |||
129 | while let Some(event) = self.next_event(&inbox) { | 128 | while let Some(event) = self.next_event(&inbox) { |
130 | match event { | 129 | match event { |
131 | Event::Restart(Restart) => self.last_update_req = Some(Instant::now()), | 130 | Event::Restart(Restart) => { |
131 | while let Ok(Restart) = inbox.recv_timeout(Duration::from_millis(50)) {} | ||
132 | self.cancel_check_process(); | ||
133 | self.check_process = Some(self.start_check_process()); | ||
134 | self.send(Message::Progress(Progress::DidStart)); | ||
135 | } | ||
132 | Event::CheckEvent(None) => { | 136 | Event::CheckEvent(None) => { |
133 | // Watcher finished, replace it with a never channel to | 137 | // Watcher finished, replace it with a never channel to |
134 | // avoid busy-waiting. | 138 | // avoid busy-waiting. |
135 | self.check_process = None; | 139 | assert!(self.check_process.take().is_some()); |
140 | self.send(Message::Progress(Progress::DidFinish)); | ||
136 | } | 141 | } |
137 | Event::CheckEvent(Some(event)) => match event { | 142 | Event::CheckEvent(Some(message)) => match message { |
138 | CheckEvent::Begin => { | 143 | cargo_metadata::Message::CompilerArtifact(msg) => { |
139 | self.send(Message::Progress(Progress::Being)); | ||
140 | } | ||
141 | |||
142 | CheckEvent::End => { | ||
143 | self.send(Message::Progress(Progress::End)); | ||
144 | } | ||
145 | |||
146 | CheckEvent::Msg(cargo_metadata::Message::CompilerArtifact(msg)) => { | ||
147 | self.send(Message::Progress(Progress::DidCheckCrate(msg.target.name))); | 144 | self.send(Message::Progress(Progress::DidCheckCrate(msg.target.name))); |
148 | } | 145 | } |
149 | 146 | ||
150 | CheckEvent::Msg(cargo_metadata::Message::CompilerMessage(msg)) => { | 147 | cargo_metadata::Message::CompilerMessage(msg) => { |
151 | self.send(Message::AddDiagnostic { | 148 | self.send(Message::AddDiagnostic { |
152 | workspace_root: self.workspace_root.clone(), | 149 | workspace_root: self.workspace_root.clone(), |
153 | diagnostic: msg.message, | 150 | diagnostic: msg.message, |
154 | }); | 151 | }); |
155 | } | 152 | } |
156 | 153 | ||
157 | CheckEvent::Msg(cargo_metadata::Message::BuildScriptExecuted(_)) | 154 | cargo_metadata::Message::BuildScriptExecuted(_) |
158 | | CheckEvent::Msg(cargo_metadata::Message::BuildFinished(_)) | 155 | | cargo_metadata::Message::BuildFinished(_) |
159 | | CheckEvent::Msg(cargo_metadata::Message::TextLine(_)) | 156 | | cargo_metadata::Message::TextLine(_) |
160 | | CheckEvent::Msg(cargo_metadata::Message::Unknown) => {} | 157 | | cargo_metadata::Message::Unknown => {} |
161 | }, | 158 | }, |
162 | } | 159 | } |
163 | if self.should_recheck() { | ||
164 | self.last_update_req = None; | ||
165 | self.send(Message::ClearDiagnostics); | ||
166 | self.restart_check_process(); | ||
167 | } | ||
168 | } | ||
169 | } | ||
170 | |||
171 | fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> { | ||
172 | let check_chan = self.check_process.as_ref().map(|(chan, _thread)| chan); | ||
173 | select! { | ||
174 | recv(inbox) -> msg => msg.ok().map(Event::Restart), | ||
175 | recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())), | ||
176 | } | 160 | } |
161 | // If we rerun the thread, we need to discard the previous check results first | ||
162 | self.cancel_check_process(); | ||
177 | } | 163 | } |
178 | 164 | fn cancel_check_process(&mut self) { | |
179 | fn should_recheck(&mut self) -> bool { | 165 | if self.check_process.take().is_some() { |
180 | if let Some(_last_update_req) = &self.last_update_req { | 166 | self.send(Message::Progress(Progress::DidCancel)); |
181 | // We currently only request an update on save, as we need up to | ||
182 | // date source on disk for cargo check to do it's magic, so we | ||
183 | // don't really need to debounce the requests at this point. | ||
184 | return true; | ||
185 | } | 167 | } |
186 | false | ||
187 | } | 168 | } |
188 | 169 | fn start_check_process(&self) -> (Receiver<cargo_metadata::Message>, jod_thread::JoinHandle) { | |
189 | fn restart_check_process(&mut self) { | ||
190 | // First, clear and cancel the old thread | ||
191 | self.check_process = None; | ||
192 | |||
193 | let mut cmd = match &self.config { | 170 | let mut cmd = match &self.config { |
194 | FlycheckConfig::CargoCommand { | 171 | FlycheckConfig::CargoCommand { |
195 | command, | 172 | command, |
@@ -226,8 +203,6 @@ impl FlycheckActor { | |||
226 | let thread = jod_thread::spawn(move || { | 203 | let thread = jod_thread::spawn(move || { |
227 | // If we trigger an error here, we will do so in the loop instead, | 204 | // If we trigger an error here, we will do so in the loop instead, |
228 | // which will break out of the loop, and continue the shutdown | 205 | // which will break out of the loop, and continue the shutdown |
229 | let _ = message_send.send(CheckEvent::Begin); | ||
230 | |||
231 | let res = run_cargo(cmd, &mut |message| { | 206 | let res = run_cargo(cmd, &mut |message| { |
232 | // Skip certain kinds of messages to only spend time on what's useful | 207 | // Skip certain kinds of messages to only spend time on what's useful |
233 | match &message { | 208 | match &message { |
@@ -240,7 +215,7 @@ impl FlycheckActor { | |||
240 | } | 215 | } |
241 | 216 | ||
242 | // if the send channel was closed, we want to shutdown | 217 | // if the send channel was closed, we want to shutdown |
243 | message_send.send(CheckEvent::Msg(message)).is_ok() | 218 | message_send.send(message).is_ok() |
244 | }); | 219 | }); |
245 | 220 | ||
246 | if let Err(err) = res { | 221 | if let Err(err) = res { |
@@ -248,12 +223,8 @@ impl FlycheckActor { | |||
248 | // to display user-caused misconfiguration errors instead of just logging them here | 223 | // to display user-caused misconfiguration errors instead of just logging them here |
249 | log::error!("Cargo watcher failed {:?}", err); | 224 | log::error!("Cargo watcher failed {:?}", err); |
250 | } | 225 | } |
251 | |||
252 | // We can ignore any error here, as we are already in the progress | ||
253 | // of shutting down. | ||
254 | let _ = message_send.send(CheckEvent::End); | ||
255 | }); | 226 | }); |
256 | self.check_process = Some((message_recv, thread)) | 227 | (message_recv, thread) |
257 | } | 228 | } |
258 | 229 | ||
259 | fn send(&self, check_task: Message) { | 230 | fn send(&self, check_task: Message) { |
@@ -261,12 +232,6 @@ impl FlycheckActor { | |||
261 | } | 232 | } |
262 | } | 233 | } |
263 | 234 | ||
264 | enum CheckEvent { | ||
265 | Begin, | ||
266 | Msg(cargo_metadata::Message), | ||
267 | End, | ||
268 | } | ||
269 | |||
270 | fn run_cargo( | 235 | fn run_cargo( |
271 | mut command: Command, | 236 | mut command: Command, |
272 | on_message: &mut dyn FnMut(cargo_metadata::Message) -> bool, | 237 | on_message: &mut dyn FnMut(cargo_metadata::Message) -> bool, |
diff --git a/crates/ra_assists/src/handlers/change_return_type_to_result.rs b/crates/ra_assists/src/handlers/change_return_type_to_result.rs index c6baa0a57..855baf187 100644 --- a/crates/ra_assists/src/handlers/change_return_type_to_result.rs +++ b/crates/ra_assists/src/handlers/change_return_type_to_result.rs | |||
@@ -4,6 +4,7 @@ use ra_syntax::{ | |||
4 | }; | 4 | }; |
5 | 5 | ||
6 | use crate::{AssistContext, AssistId, Assists}; | 6 | use crate::{AssistContext, AssistId, Assists}; |
7 | use test_utils::mark; | ||
7 | 8 | ||
8 | // Assist: change_return_type_to_result | 9 | // Assist: change_return_type_to_result |
9 | // | 10 | // |
@@ -22,8 +23,13 @@ pub(crate) fn change_return_type_to_result(acc: &mut Assists, ctx: &AssistContex | |||
22 | let fn_def = ret_type.syntax().parent().and_then(ast::FnDef::cast)?; | 23 | let fn_def = ret_type.syntax().parent().and_then(ast::FnDef::cast)?; |
23 | 24 | ||
24 | let type_ref = &ret_type.type_ref()?; | 25 | let type_ref = &ret_type.type_ref()?; |
25 | if type_ref.syntax().text().to_string().starts_with("Result<") { | 26 | let ret_type_str = type_ref.syntax().text().to_string(); |
26 | return None; | 27 | let first_part_ret_type = ret_type_str.splitn(2, '<').next(); |
28 | if let Some(ret_type_first_part) = first_part_ret_type { | ||
29 | if ret_type_first_part.ends_with("Result") { | ||
30 | mark::hit!(change_return_type_to_result_simple_return_type_already_result); | ||
31 | return None; | ||
32 | } | ||
27 | } | 33 | } |
28 | 34 | ||
29 | let block_expr = &fn_def.body()?; | 35 | let block_expr = &fn_def.body()?; |
@@ -297,6 +303,29 @@ mod tests { | |||
297 | } | 303 | } |
298 | 304 | ||
299 | #[test] | 305 | #[test] |
306 | fn change_return_type_to_result_simple_return_type_already_result_std() { | ||
307 | check_assist_not_applicable( | ||
308 | change_return_type_to_result, | ||
309 | r#"fn foo() -> std::result::Result<i32<|>, String> { | ||
310 | let test = "test"; | ||
311 | return 42i32; | ||
312 | }"#, | ||
313 | ); | ||
314 | } | ||
315 | |||
316 | #[test] | ||
317 | fn change_return_type_to_result_simple_return_type_already_result() { | ||
318 | mark::check!(change_return_type_to_result_simple_return_type_already_result); | ||
319 | check_assist_not_applicable( | ||
320 | change_return_type_to_result, | ||
321 | r#"fn foo() -> Result<i32<|>, String> { | ||
322 | let test = "test"; | ||
323 | return 42i32; | ||
324 | }"#, | ||
325 | ); | ||
326 | } | ||
327 | |||
328 | #[test] | ||
300 | fn change_return_type_to_result_simple_with_cursor() { | 329 | fn change_return_type_to_result_simple_with_cursor() { |
301 | check_assist( | 330 | check_assist( |
302 | change_return_type_to_result, | 331 | change_return_type_to_result, |
diff --git a/crates/ra_assists/src/handlers/introduce_variable.rs b/crates/ra_assists/src/handlers/extract_variable.rs index 88b62278f..c4150d2bb 100644 --- a/crates/ra_assists/src/handlers/introduce_variable.rs +++ b/crates/ra_assists/src/handlers/extract_variable.rs | |||
@@ -11,7 +11,7 @@ use test_utils::mark; | |||
11 | 11 | ||
12 | use crate::{AssistContext, AssistId, Assists}; | 12 | use crate::{AssistContext, AssistId, Assists}; |
13 | 13 | ||
14 | // Assist: introduce_variable | 14 | // Assist: extract_variable |
15 | // | 15 | // |
16 | // Extracts subexpression into a variable. | 16 | // Extracts subexpression into a variable. |
17 | // | 17 | // |
@@ -27,13 +27,13 @@ use crate::{AssistContext, AssistId, Assists}; | |||
27 | // var_name * 4; | 27 | // var_name * 4; |
28 | // } | 28 | // } |
29 | // ``` | 29 | // ``` |
30 | pub(crate) fn introduce_variable(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 30 | pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
31 | if ctx.frange.range.is_empty() { | 31 | if ctx.frange.range.is_empty() { |
32 | return None; | 32 | return None; |
33 | } | 33 | } |
34 | let node = ctx.covering_element(); | 34 | let node = ctx.covering_element(); |
35 | if node.kind() == COMMENT { | 35 | if node.kind() == COMMENT { |
36 | mark::hit!(introduce_var_in_comment_is_not_applicable); | 36 | mark::hit!(extract_var_in_comment_is_not_applicable); |
37 | return None; | 37 | return None; |
38 | } | 38 | } |
39 | let expr = node.ancestors().find_map(valid_target_expr)?; | 39 | let expr = node.ancestors().find_map(valid_target_expr)?; |
@@ -43,13 +43,27 @@ pub(crate) fn introduce_variable(acc: &mut Assists, ctx: &AssistContext) -> Opti | |||
43 | return None; | 43 | return None; |
44 | } | 44 | } |
45 | let target = expr.syntax().text_range(); | 45 | let target = expr.syntax().text_range(); |
46 | acc.add(AssistId("introduce_variable"), "Extract into variable", target, move |edit| { | 46 | acc.add(AssistId("extract_variable"), "Extract into variable", target, move |edit| { |
47 | let field_shorthand = match expr.syntax().parent().and_then(ast::RecordField::cast) { | ||
48 | Some(field) => field.name_ref(), | ||
49 | None => None, | ||
50 | }; | ||
51 | |||
47 | let mut buf = String::new(); | 52 | let mut buf = String::new(); |
48 | 53 | ||
54 | let var_name = match &field_shorthand { | ||
55 | Some(it) => it.to_string(), | ||
56 | None => "var_name".to_string(), | ||
57 | }; | ||
58 | let expr_range = match &field_shorthand { | ||
59 | Some(it) => it.syntax().text_range().cover(expr.syntax().text_range()), | ||
60 | None => expr.syntax().text_range(), | ||
61 | }; | ||
62 | |||
49 | if wrap_in_block { | 63 | if wrap_in_block { |
50 | buf.push_str("{ let var_name = "); | 64 | format_to!(buf, "{{ let {} = ", var_name); |
51 | } else { | 65 | } else { |
52 | buf.push_str("let var_name = "); | 66 | format_to!(buf, "let {} = ", var_name); |
53 | }; | 67 | }; |
54 | format_to!(buf, "{}", expr.syntax()); | 68 | format_to!(buf, "{}", expr.syntax()); |
55 | 69 | ||
@@ -60,17 +74,17 @@ pub(crate) fn introduce_variable(acc: &mut Assists, ctx: &AssistContext) -> Opti | |||
60 | false | 74 | false |
61 | }; | 75 | }; |
62 | if is_full_stmt { | 76 | if is_full_stmt { |
63 | mark::hit!(test_introduce_var_expr_stmt); | 77 | mark::hit!(test_extract_var_expr_stmt); |
64 | if full_stmt.unwrap().semicolon_token().is_none() { | 78 | if full_stmt.unwrap().semicolon_token().is_none() { |
65 | buf.push_str(";"); | 79 | buf.push_str(";"); |
66 | } | 80 | } |
67 | let offset = expr.syntax().text_range(); | ||
68 | match ctx.config.snippet_cap { | 81 | match ctx.config.snippet_cap { |
69 | Some(cap) => { | 82 | Some(cap) => { |
70 | let snip = buf.replace("let var_name", "let $0var_name"); | 83 | let snip = |
71 | edit.replace_snippet(cap, offset, snip) | 84 | buf.replace(&format!("let {}", var_name), &format!("let $0{}", var_name)); |
85 | edit.replace_snippet(cap, expr_range, snip) | ||
72 | } | 86 | } |
73 | None => edit.replace(offset, buf), | 87 | None => edit.replace(expr_range, buf), |
74 | } | 88 | } |
75 | return; | 89 | return; |
76 | } | 90 | } |
@@ -88,11 +102,12 @@ pub(crate) fn introduce_variable(acc: &mut Assists, ctx: &AssistContext) -> Opti | |||
88 | buf.push_str(text); | 102 | buf.push_str(text); |
89 | } | 103 | } |
90 | 104 | ||
91 | edit.replace(expr.syntax().text_range(), "var_name".to_string()); | 105 | edit.replace(expr_range, var_name.clone()); |
92 | let offset = anchor_stmt.text_range().start(); | 106 | let offset = anchor_stmt.text_range().start(); |
93 | match ctx.config.snippet_cap { | 107 | match ctx.config.snippet_cap { |
94 | Some(cap) => { | 108 | Some(cap) => { |
95 | let snip = buf.replace("let var_name", "let $0var_name"); | 109 | let snip = |
110 | buf.replace(&format!("let {}", var_name), &format!("let $0{}", var_name)); | ||
96 | edit.insert_snippet(cap, offset, snip) | 111 | edit.insert_snippet(cap, offset, snip) |
97 | } | 112 | } |
98 | None => edit.insert(offset, buf), | 113 | None => edit.insert(offset, buf), |
@@ -118,7 +133,7 @@ fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> { | |||
118 | } | 133 | } |
119 | } | 134 | } |
120 | 135 | ||
121 | /// Returns the syntax node which will follow the freshly introduced var | 136 | /// Returns the syntax node which will follow the freshly extractd var |
122 | /// and a boolean indicating whether we have to wrap it within a { } block | 137 | /// and a boolean indicating whether we have to wrap it within a { } block |
123 | /// to produce correct code. | 138 | /// to produce correct code. |
124 | /// It can be a statement, the last in a block expression or a wanna be block | 139 | /// It can be a statement, the last in a block expression or a wanna be block |
@@ -127,7 +142,7 @@ fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> { | |||
127 | expr.syntax().ancestors().find_map(|node| { | 142 | expr.syntax().ancestors().find_map(|node| { |
128 | if let Some(expr) = node.parent().and_then(ast::BlockExpr::cast).and_then(|it| it.expr()) { | 143 | if let Some(expr) = node.parent().and_then(ast::BlockExpr::cast).and_then(|it| it.expr()) { |
129 | if expr.syntax() == &node { | 144 | if expr.syntax() == &node { |
130 | mark::hit!(test_introduce_var_last_expr); | 145 | mark::hit!(test_extract_var_last_expr); |
131 | return Some((node, false)); | 146 | return Some((node, false)); |
132 | } | 147 | } |
133 | } | 148 | } |
@@ -155,9 +170,9 @@ mod tests { | |||
155 | use super::*; | 170 | use super::*; |
156 | 171 | ||
157 | #[test] | 172 | #[test] |
158 | fn test_introduce_var_simple() { | 173 | fn test_extract_var_simple() { |
159 | check_assist( | 174 | check_assist( |
160 | introduce_variable, | 175 | extract_variable, |
161 | r#" | 176 | r#" |
162 | fn foo() { | 177 | fn foo() { |
163 | foo(<|>1 + 1<|>); | 178 | foo(<|>1 + 1<|>); |
@@ -171,16 +186,16 @@ fn foo() { | |||
171 | } | 186 | } |
172 | 187 | ||
173 | #[test] | 188 | #[test] |
174 | fn introduce_var_in_comment_is_not_applicable() { | 189 | fn extract_var_in_comment_is_not_applicable() { |
175 | mark::check!(introduce_var_in_comment_is_not_applicable); | 190 | mark::check!(extract_var_in_comment_is_not_applicable); |
176 | check_assist_not_applicable(introduce_variable, "fn main() { 1 + /* <|>comment<|> */ 1; }"); | 191 | check_assist_not_applicable(extract_variable, "fn main() { 1 + /* <|>comment<|> */ 1; }"); |
177 | } | 192 | } |
178 | 193 | ||
179 | #[test] | 194 | #[test] |
180 | fn test_introduce_var_expr_stmt() { | 195 | fn test_extract_var_expr_stmt() { |
181 | mark::check!(test_introduce_var_expr_stmt); | 196 | mark::check!(test_extract_var_expr_stmt); |
182 | check_assist( | 197 | check_assist( |
183 | introduce_variable, | 198 | extract_variable, |
184 | r#" | 199 | r#" |
185 | fn foo() { | 200 | fn foo() { |
186 | <|>1 + 1<|>; | 201 | <|>1 + 1<|>; |
@@ -191,7 +206,7 @@ fn foo() { | |||
191 | }"#, | 206 | }"#, |
192 | ); | 207 | ); |
193 | check_assist( | 208 | check_assist( |
194 | introduce_variable, | 209 | extract_variable, |
195 | " | 210 | " |
196 | fn foo() { | 211 | fn foo() { |
197 | <|>{ let x = 0; x }<|> | 212 | <|>{ let x = 0; x }<|> |
@@ -206,9 +221,9 @@ fn foo() { | |||
206 | } | 221 | } |
207 | 222 | ||
208 | #[test] | 223 | #[test] |
209 | fn test_introduce_var_part_of_expr_stmt() { | 224 | fn test_extract_var_part_of_expr_stmt() { |
210 | check_assist( | 225 | check_assist( |
211 | introduce_variable, | 226 | extract_variable, |
212 | " | 227 | " |
213 | fn foo() { | 228 | fn foo() { |
214 | <|>1<|> + 1; | 229 | <|>1<|> + 1; |
@@ -222,10 +237,10 @@ fn foo() { | |||
222 | } | 237 | } |
223 | 238 | ||
224 | #[test] | 239 | #[test] |
225 | fn test_introduce_var_last_expr() { | 240 | fn test_extract_var_last_expr() { |
226 | mark::check!(test_introduce_var_last_expr); | 241 | mark::check!(test_extract_var_last_expr); |
227 | check_assist( | 242 | check_assist( |
228 | introduce_variable, | 243 | extract_variable, |
229 | r#" | 244 | r#" |
230 | fn foo() { | 245 | fn foo() { |
231 | bar(<|>1 + 1<|>) | 246 | bar(<|>1 + 1<|>) |
@@ -239,7 +254,7 @@ fn foo() { | |||
239 | "#, | 254 | "#, |
240 | ); | 255 | ); |
241 | check_assist( | 256 | check_assist( |
242 | introduce_variable, | 257 | extract_variable, |
243 | r#" | 258 | r#" |
244 | fn foo() { | 259 | fn foo() { |
245 | <|>bar(1 + 1)<|> | 260 | <|>bar(1 + 1)<|> |
@@ -255,9 +270,9 @@ fn foo() { | |||
255 | } | 270 | } |
256 | 271 | ||
257 | #[test] | 272 | #[test] |
258 | fn test_introduce_var_in_match_arm_no_block() { | 273 | fn test_extract_var_in_match_arm_no_block() { |
259 | check_assist( | 274 | check_assist( |
260 | introduce_variable, | 275 | extract_variable, |
261 | " | 276 | " |
262 | fn main() { | 277 | fn main() { |
263 | let x = true; | 278 | let x = true; |
@@ -280,9 +295,9 @@ fn main() { | |||
280 | } | 295 | } |
281 | 296 | ||
282 | #[test] | 297 | #[test] |
283 | fn test_introduce_var_in_match_arm_with_block() { | 298 | fn test_extract_var_in_match_arm_with_block() { |
284 | check_assist( | 299 | check_assist( |
285 | introduce_variable, | 300 | extract_variable, |
286 | " | 301 | " |
287 | fn main() { | 302 | fn main() { |
288 | let x = true; | 303 | let x = true; |
@@ -312,9 +327,9 @@ fn main() { | |||
312 | } | 327 | } |
313 | 328 | ||
314 | #[test] | 329 | #[test] |
315 | fn test_introduce_var_in_closure_no_block() { | 330 | fn test_extract_var_in_closure_no_block() { |
316 | check_assist( | 331 | check_assist( |
317 | introduce_variable, | 332 | extract_variable, |
318 | " | 333 | " |
319 | fn main() { | 334 | fn main() { |
320 | let lambda = |x: u32| <|>x * 2<|>; | 335 | let lambda = |x: u32| <|>x * 2<|>; |
@@ -329,9 +344,9 @@ fn main() { | |||
329 | } | 344 | } |
330 | 345 | ||
331 | #[test] | 346 | #[test] |
332 | fn test_introduce_var_in_closure_with_block() { | 347 | fn test_extract_var_in_closure_with_block() { |
333 | check_assist( | 348 | check_assist( |
334 | introduce_variable, | 349 | extract_variable, |
335 | " | 350 | " |
336 | fn main() { | 351 | fn main() { |
337 | let lambda = |x: u32| { <|>x * 2<|> }; | 352 | let lambda = |x: u32| { <|>x * 2<|> }; |
@@ -346,9 +361,9 @@ fn main() { | |||
346 | } | 361 | } |
347 | 362 | ||
348 | #[test] | 363 | #[test] |
349 | fn test_introduce_var_path_simple() { | 364 | fn test_extract_var_path_simple() { |
350 | check_assist( | 365 | check_assist( |
351 | introduce_variable, | 366 | extract_variable, |
352 | " | 367 | " |
353 | fn main() { | 368 | fn main() { |
354 | let o = <|>Some(true)<|>; | 369 | let o = <|>Some(true)<|>; |
@@ -364,9 +379,9 @@ fn main() { | |||
364 | } | 379 | } |
365 | 380 | ||
366 | #[test] | 381 | #[test] |
367 | fn test_introduce_var_path_method() { | 382 | fn test_extract_var_path_method() { |
368 | check_assist( | 383 | check_assist( |
369 | introduce_variable, | 384 | extract_variable, |
370 | " | 385 | " |
371 | fn main() { | 386 | fn main() { |
372 | let v = <|>bar.foo()<|>; | 387 | let v = <|>bar.foo()<|>; |
@@ -382,9 +397,9 @@ fn main() { | |||
382 | } | 397 | } |
383 | 398 | ||
384 | #[test] | 399 | #[test] |
385 | fn test_introduce_var_return() { | 400 | fn test_extract_var_return() { |
386 | check_assist( | 401 | check_assist( |
387 | introduce_variable, | 402 | extract_variable, |
388 | " | 403 | " |
389 | fn foo() -> u32 { | 404 | fn foo() -> u32 { |
390 | <|>return 2 + 2<|>; | 405 | <|>return 2 + 2<|>; |
@@ -400,9 +415,9 @@ fn foo() -> u32 { | |||
400 | } | 415 | } |
401 | 416 | ||
402 | #[test] | 417 | #[test] |
403 | fn test_introduce_var_does_not_add_extra_whitespace() { | 418 | fn test_extract_var_does_not_add_extra_whitespace() { |
404 | check_assist( | 419 | check_assist( |
405 | introduce_variable, | 420 | extract_variable, |
406 | " | 421 | " |
407 | fn foo() -> u32 { | 422 | fn foo() -> u32 { |
408 | 423 | ||
@@ -421,7 +436,7 @@ fn foo() -> u32 { | |||
421 | ); | 436 | ); |
422 | 437 | ||
423 | check_assist( | 438 | check_assist( |
424 | introduce_variable, | 439 | extract_variable, |
425 | " | 440 | " |
426 | fn foo() -> u32 { | 441 | fn foo() -> u32 { |
427 | 442 | ||
@@ -438,7 +453,7 @@ fn foo() -> u32 { | |||
438 | ); | 453 | ); |
439 | 454 | ||
440 | check_assist( | 455 | check_assist( |
441 | introduce_variable, | 456 | extract_variable, |
442 | " | 457 | " |
443 | fn foo() -> u32 { | 458 | fn foo() -> u32 { |
444 | let foo = 1; | 459 | let foo = 1; |
@@ -464,9 +479,9 @@ fn foo() -> u32 { | |||
464 | } | 479 | } |
465 | 480 | ||
466 | #[test] | 481 | #[test] |
467 | fn test_introduce_var_break() { | 482 | fn test_extract_var_break() { |
468 | check_assist( | 483 | check_assist( |
469 | introduce_variable, | 484 | extract_variable, |
470 | " | 485 | " |
471 | fn main() { | 486 | fn main() { |
472 | let result = loop { | 487 | let result = loop { |
@@ -486,9 +501,9 @@ fn main() { | |||
486 | } | 501 | } |
487 | 502 | ||
488 | #[test] | 503 | #[test] |
489 | fn test_introduce_var_for_cast() { | 504 | fn test_extract_var_for_cast() { |
490 | check_assist( | 505 | check_assist( |
491 | introduce_variable, | 506 | extract_variable, |
492 | " | 507 | " |
493 | fn main() { | 508 | fn main() { |
494 | let v = <|>0f32 as u32<|>; | 509 | let v = <|>0f32 as u32<|>; |
@@ -504,22 +519,48 @@ fn main() { | |||
504 | } | 519 | } |
505 | 520 | ||
506 | #[test] | 521 | #[test] |
507 | fn test_introduce_var_for_return_not_applicable() { | 522 | fn extract_var_field_shorthand() { |
508 | check_assist_not_applicable(introduce_variable, "fn foo() { <|>return<|>; } "); | 523 | check_assist( |
524 | extract_variable, | ||
525 | r#" | ||
526 | struct S { | ||
527 | foo: i32 | ||
528 | } | ||
529 | |||
530 | fn main() { | ||
531 | S { foo: <|>1 + 1<|> } | ||
532 | } | ||
533 | "#, | ||
534 | r#" | ||
535 | struct S { | ||
536 | foo: i32 | ||
537 | } | ||
538 | |||
539 | fn main() { | ||
540 | let $0foo = 1 + 1; | ||
541 | S { foo } | ||
542 | } | ||
543 | "#, | ||
544 | ) | ||
545 | } | ||
546 | |||
547 | #[test] | ||
548 | fn test_extract_var_for_return_not_applicable() { | ||
549 | check_assist_not_applicable(extract_variable, "fn foo() { <|>return<|>; } "); | ||
509 | } | 550 | } |
510 | 551 | ||
511 | #[test] | 552 | #[test] |
512 | fn test_introduce_var_for_break_not_applicable() { | 553 | fn test_extract_var_for_break_not_applicable() { |
513 | check_assist_not_applicable(introduce_variable, "fn main() { loop { <|>break<|>; }; }"); | 554 | check_assist_not_applicable(extract_variable, "fn main() { loop { <|>break<|>; }; }"); |
514 | } | 555 | } |
515 | 556 | ||
516 | // FIXME: This is not quite correct, but good enough(tm) for the sorting heuristic | 557 | // FIXME: This is not quite correct, but good enough(tm) for the sorting heuristic |
517 | #[test] | 558 | #[test] |
518 | fn introduce_var_target() { | 559 | fn extract_var_target() { |
519 | check_assist_target(introduce_variable, "fn foo() -> u32 { <|>return 2 + 2<|>; }", "2 + 2"); | 560 | check_assist_target(extract_variable, "fn foo() -> u32 { <|>return 2 + 2<|>; }", "2 + 2"); |
520 | 561 | ||
521 | check_assist_target( | 562 | check_assist_target( |
522 | introduce_variable, | 563 | extract_variable, |
523 | " | 564 | " |
524 | fn main() { | 565 | fn main() { |
525 | let x = true; | 566 | let x = true; |
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs index 185428bd5..1745f44a5 100644 --- a/crates/ra_assists/src/lib.rs +++ b/crates/ra_assists/src/lib.rs | |||
@@ -116,6 +116,7 @@ mod handlers { | |||
116 | mod change_visibility; | 116 | mod change_visibility; |
117 | mod early_return; | 117 | mod early_return; |
118 | mod extract_struct_from_enum_variant; | 118 | mod extract_struct_from_enum_variant; |
119 | mod extract_variable; | ||
119 | mod fill_match_arms; | 120 | mod fill_match_arms; |
120 | mod fix_visibility; | 121 | mod fix_visibility; |
121 | mod flip_binexpr; | 122 | mod flip_binexpr; |
@@ -123,7 +124,6 @@ mod handlers { | |||
123 | mod flip_trait_bound; | 124 | mod flip_trait_bound; |
124 | mod inline_local_variable; | 125 | mod inline_local_variable; |
125 | mod introduce_named_lifetime; | 126 | mod introduce_named_lifetime; |
126 | mod introduce_variable; | ||
127 | mod invert_if; | 127 | mod invert_if; |
128 | mod merge_imports; | 128 | mod merge_imports; |
129 | mod merge_match_arms; | 129 | mod merge_match_arms; |
@@ -157,6 +157,7 @@ mod handlers { | |||
157 | change_visibility::change_visibility, | 157 | change_visibility::change_visibility, |
158 | early_return::convert_to_guarded_return, | 158 | early_return::convert_to_guarded_return, |
159 | extract_struct_from_enum_variant::extract_struct_from_enum_variant, | 159 | extract_struct_from_enum_variant::extract_struct_from_enum_variant, |
160 | extract_variable::extract_variable, | ||
160 | fill_match_arms::fill_match_arms, | 161 | fill_match_arms::fill_match_arms, |
161 | fix_visibility::fix_visibility, | 162 | fix_visibility::fix_visibility, |
162 | flip_binexpr::flip_binexpr, | 163 | flip_binexpr::flip_binexpr, |
@@ -164,7 +165,6 @@ mod handlers { | |||
164 | flip_trait_bound::flip_trait_bound, | 165 | flip_trait_bound::flip_trait_bound, |
165 | inline_local_variable::inline_local_variable, | 166 | inline_local_variable::inline_local_variable, |
166 | introduce_named_lifetime::introduce_named_lifetime, | 167 | introduce_named_lifetime::introduce_named_lifetime, |
167 | introduce_variable::introduce_variable, | ||
168 | invert_if::invert_if, | 168 | invert_if::invert_if, |
169 | merge_imports::merge_imports, | 169 | merge_imports::merge_imports, |
170 | merge_match_arms::merge_match_arms, | 170 | merge_match_arms::merge_match_arms, |
diff --git a/crates/ra_assists/src/tests/generated.rs b/crates/ra_assists/src/tests/generated.rs index 40a223727..31ea888c5 100644 --- a/crates/ra_assists/src/tests/generated.rs +++ b/crates/ra_assists/src/tests/generated.rs | |||
@@ -353,6 +353,24 @@ enum A { One(One) } | |||
353 | } | 353 | } |
354 | 354 | ||
355 | #[test] | 355 | #[test] |
356 | fn doctest_extract_variable() { | ||
357 | check_doc_test( | ||
358 | "extract_variable", | ||
359 | r#####" | ||
360 | fn main() { | ||
361 | <|>(1 + 2)<|> * 4; | ||
362 | } | ||
363 | "#####, | ||
364 | r#####" | ||
365 | fn main() { | ||
366 | let $0var_name = (1 + 2); | ||
367 | var_name * 4; | ||
368 | } | ||
369 | "#####, | ||
370 | ) | ||
371 | } | ||
372 | |||
373 | #[test] | ||
356 | fn doctest_fill_match_arms() { | 374 | fn doctest_fill_match_arms() { |
357 | check_doc_test( | 375 | check_doc_test( |
358 | "fill_match_arms", | 376 | "fill_match_arms", |
@@ -492,24 +510,6 @@ impl<'a> Cursor<'a> { | |||
492 | } | 510 | } |
493 | 511 | ||
494 | #[test] | 512 | #[test] |
495 | fn doctest_introduce_variable() { | ||
496 | check_doc_test( | ||
497 | "introduce_variable", | ||
498 | r#####" | ||
499 | fn main() { | ||
500 | <|>(1 + 2)<|> * 4; | ||
501 | } | ||
502 | "#####, | ||
503 | r#####" | ||
504 | fn main() { | ||
505 | let $0var_name = (1 + 2); | ||
506 | var_name * 4; | ||
507 | } | ||
508 | "#####, | ||
509 | ) | ||
510 | } | ||
511 | |||
512 | #[test] | ||
513 | fn doctest_invert_if() { | 513 | fn doctest_invert_if() { |
514 | check_doc_test( | 514 | check_doc_test( |
515 | "invert_if", | 515 | "invert_if", |
diff --git a/crates/ra_hir_def/src/adt.rs b/crates/ra_hir_def/src/adt.rs index 2bc34d449..4994a2125 100644 --- a/crates/ra_hir_def/src/adt.rs +++ b/crates/ra_hir_def/src/adt.rs | |||
@@ -8,12 +8,12 @@ use hir_expand::{ | |||
8 | InFile, | 8 | InFile, |
9 | }; | 9 | }; |
10 | use ra_arena::{map::ArenaMap, Arena}; | 10 | use ra_arena::{map::ArenaMap, Arena}; |
11 | use ra_prof::profile; | ||
12 | use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner, VisibilityOwner}; | 11 | use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner, VisibilityOwner}; |
13 | 12 | ||
14 | use crate::{ | 13 | use crate::{ |
15 | body::{CfgExpander, LowerCtx}, | 14 | body::{CfgExpander, LowerCtx}, |
16 | db::DefDatabase, | 15 | db::DefDatabase, |
16 | item_tree::{Field, Fields, ItemTree}, | ||
17 | src::HasChildSource, | 17 | src::HasChildSource, |
18 | src::HasSource, | 18 | src::HasSource, |
19 | trace::Trace, | 19 | trace::Trace, |
@@ -22,6 +22,7 @@ use crate::{ | |||
22 | EnumId, HasModule, LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StructId, UnionId, | 22 | EnumId, HasModule, LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StructId, UnionId, |
23 | VariantId, | 23 | VariantId, |
24 | }; | 24 | }; |
25 | use ra_cfg::CfgOptions; | ||
25 | 26 | ||
26 | /// Note that we use `StructData` for unions as well! | 27 | /// Note that we use `StructData` for unions as well! |
27 | #[derive(Debug, Clone, PartialEq, Eq)] | 28 | #[derive(Debug, Clone, PartialEq, Eq)] |
@@ -59,39 +60,48 @@ pub struct FieldData { | |||
59 | 60 | ||
60 | impl StructData { | 61 | impl StructData { |
61 | pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc<StructData> { | 62 | pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc<StructData> { |
62 | let src = id.lookup(db).source(db); | 63 | let loc = id.lookup(db); |
64 | let item_tree = db.item_tree(loc.id.file_id); | ||
65 | let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone(); | ||
63 | 66 | ||
64 | let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); | 67 | let strukt = &item_tree[loc.id.value]; |
65 | let variant_data = | 68 | let variant_data = lower_fields(&item_tree, &cfg_options, &strukt.fields); |
66 | VariantData::new(db, src.map(|s| s.kind()), id.lookup(db).container.module(db)); | 69 | |
67 | let variant_data = Arc::new(variant_data); | 70 | Arc::new(StructData { name: strukt.name.clone(), variant_data: Arc::new(variant_data) }) |
68 | Arc::new(StructData { name, variant_data }) | ||
69 | } | 71 | } |
70 | pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc<StructData> { | 72 | pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc<StructData> { |
71 | let src = id.lookup(db).source(db); | 73 | let loc = id.lookup(db); |
72 | let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); | 74 | let item_tree = db.item_tree(loc.id.file_id); |
73 | let variant_data = VariantData::new( | 75 | let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone(); |
74 | db, | 76 | |
75 | src.map(|s| { | 77 | let union = &item_tree[loc.id.value]; |
76 | s.record_field_def_list() | 78 | let variant_data = lower_fields(&item_tree, &cfg_options, &union.fields); |
77 | .map(ast::StructKind::Record) | 79 | |
78 | .unwrap_or(ast::StructKind::Unit) | 80 | Arc::new(StructData { name: union.name.clone(), variant_data: Arc::new(variant_data) }) |
79 | }), | ||
80 | id.lookup(db).container.module(db), | ||
81 | ); | ||
82 | let variant_data = Arc::new(variant_data); | ||
83 | Arc::new(StructData { name, variant_data }) | ||
84 | } | 81 | } |
85 | } | 82 | } |
86 | 83 | ||
87 | impl EnumData { | 84 | impl EnumData { |
88 | pub(crate) fn enum_data_query(db: &dyn DefDatabase, e: EnumId) -> Arc<EnumData> { | 85 | pub(crate) fn enum_data_query(db: &dyn DefDatabase, e: EnumId) -> Arc<EnumData> { |
89 | let _p = profile("enum_data_query"); | 86 | let loc = e.lookup(db); |
90 | let src = e.lookup(db).source(db); | 87 | let item_tree = db.item_tree(loc.id.file_id); |
91 | let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); | 88 | let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone(); |
92 | let mut trace = Trace::new_for_arena(); | 89 | |
93 | lower_enum(db, &mut trace, &src, e.lookup(db).container.module(db)); | 90 | let enum_ = &item_tree[loc.id.value]; |
94 | Arc::new(EnumData { name, variants: trace.into_arena() }) | 91 | let mut variants = Arena::new(); |
92 | for var_id in enum_.variants.clone() { | ||
93 | if item_tree.attrs(var_id.into()).is_cfg_enabled(&cfg_options) { | ||
94 | let var = &item_tree[var_id]; | ||
95 | let var_data = lower_fields(&item_tree, &cfg_options, &var.fields); | ||
96 | |||
97 | variants.alloc(EnumVariantData { | ||
98 | name: var.name.clone(), | ||
99 | variant_data: Arc::new(var_data), | ||
100 | }); | ||
101 | } | ||
102 | } | ||
103 | |||
104 | Arc::new(EnumData { name: enum_.name.clone(), variants }) | ||
95 | } | 105 | } |
96 | 106 | ||
97 | pub fn variant(&self, name: &Name) -> Option<LocalEnumVariantId> { | 107 | pub fn variant(&self, name: &Name) -> Option<LocalEnumVariantId> { |
@@ -251,3 +261,35 @@ fn lower_struct( | |||
251 | ast::StructKind::Unit => StructKind::Unit, | 261 | ast::StructKind::Unit => StructKind::Unit, |
252 | } | 262 | } |
253 | } | 263 | } |
264 | |||
265 | fn lower_fields(item_tree: &ItemTree, cfg_options: &CfgOptions, fields: &Fields) -> VariantData { | ||
266 | match fields { | ||
267 | Fields::Record(flds) => { | ||
268 | let mut arena = Arena::new(); | ||
269 | for field_id in flds.clone() { | ||
270 | if item_tree.attrs(field_id.into()).is_cfg_enabled(cfg_options) { | ||
271 | arena.alloc(lower_field(item_tree, &item_tree[field_id])); | ||
272 | } | ||
273 | } | ||
274 | VariantData::Record(arena) | ||
275 | } | ||
276 | Fields::Tuple(flds) => { | ||
277 | let mut arena = Arena::new(); | ||
278 | for field_id in flds.clone() { | ||
279 | if item_tree.attrs(field_id.into()).is_cfg_enabled(cfg_options) { | ||
280 | arena.alloc(lower_field(item_tree, &item_tree[field_id])); | ||
281 | } | ||
282 | } | ||
283 | VariantData::Tuple(arena) | ||
284 | } | ||
285 | Fields::Unit => VariantData::Unit, | ||
286 | } | ||
287 | } | ||
288 | |||
289 | fn lower_field(item_tree: &ItemTree, field: &Field) -> FieldData { | ||
290 | FieldData { | ||
291 | name: field.name.clone(), | ||
292 | type_ref: field.type_ref.clone(), | ||
293 | visibility: item_tree[field.visibility].clone(), | ||
294 | } | ||
295 | } | ||
diff --git a/crates/ra_hir_def/src/attr.rs b/crates/ra_hir_def/src/attr.rs index 197737ffc..e228e2145 100644 --- a/crates/ra_hir_def/src/attr.rs +++ b/crates/ra_hir_def/src/attr.rs | |||
@@ -208,5 +208,5 @@ where | |||
208 | fn attrs_from_item_tree<N: ItemTreeNode>(id: ItemTreeId<N>, db: &dyn DefDatabase) -> Attrs { | 208 | fn attrs_from_item_tree<N: ItemTreeNode>(id: ItemTreeId<N>, db: &dyn DefDatabase) -> Attrs { |
209 | let tree = db.item_tree(id.file_id); | 209 | let tree = db.item_tree(id.file_id); |
210 | let mod_item = N::id_to_mod_item(id.value); | 210 | let mod_item = N::id_to_mod_item(id.value); |
211 | tree.attrs(mod_item).clone() | 211 | tree.attrs(mod_item.into()).clone() |
212 | } | 212 | } |
diff --git a/crates/ra_hir_def/src/body/lower.rs b/crates/ra_hir_def/src/body/lower.rs index 3ced648e5..a7e2e0982 100644 --- a/crates/ra_hir_def/src/body/lower.rs +++ b/crates/ra_hir_def/src/body/lower.rs | |||
@@ -5,7 +5,7 @@ use either::Either; | |||
5 | use hir_expand::{ | 5 | use hir_expand::{ |
6 | hygiene::Hygiene, | 6 | hygiene::Hygiene, |
7 | name::{name, AsName, Name}, | 7 | name::{name, AsName, Name}, |
8 | AstId, HirFileId, MacroDefId, MacroDefKind, | 8 | HirFileId, MacroDefId, MacroDefKind, |
9 | }; | 9 | }; |
10 | use ra_arena::Arena; | 10 | use ra_arena::Arena; |
11 | use ra_syntax::{ | 11 | use ra_syntax::{ |
@@ -27,7 +27,7 @@ use crate::{ | |||
27 | LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, | 27 | LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, |
28 | }, | 28 | }, |
29 | item_scope::BuiltinShadowMode, | 29 | item_scope::BuiltinShadowMode, |
30 | item_tree::{FileItemTreeId, ItemTree, ItemTreeNode}, | 30 | item_tree::{ItemTree, ItemTreeId, ItemTreeNode}, |
31 | path::{GenericArgs, Path}, | 31 | path::{GenericArgs, Path}, |
32 | type_ref::{Mutability, Rawness, TypeRef}, | 32 | type_ref::{Mutability, Rawness, TypeRef}, |
33 | AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId, | 33 | AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId, |
@@ -37,7 +37,7 @@ use crate::{ | |||
37 | use super::{ExprSource, PatSource}; | 37 | use super::{ExprSource, PatSource}; |
38 | use ast::AstChildren; | 38 | use ast::AstChildren; |
39 | use rustc_hash::FxHashMap; | 39 | use rustc_hash::FxHashMap; |
40 | use std::sync::Arc; | 40 | use std::{any::type_name, sync::Arc}; |
41 | 41 | ||
42 | pub(crate) struct LowerCtx { | 42 | pub(crate) struct LowerCtx { |
43 | hygiene: Hygiene, | 43 | hygiene: Hygiene, |
@@ -561,17 +561,30 @@ impl ExprCollector<'_> { | |||
561 | } | 561 | } |
562 | } | 562 | } |
563 | 563 | ||
564 | fn find_inner_item<S: ItemTreeNode>(&self, id: AstId<ast::ModuleItem>) -> FileItemTreeId<S> { | 564 | fn find_inner_item<N: ItemTreeNode>(&self, ast: &N::Source) -> Option<ItemTreeId<N>> { |
565 | let id = self.expander.ast_id(ast); | ||
565 | let tree = &self.item_trees[&id.file_id]; | 566 | let tree = &self.item_trees[&id.file_id]; |
566 | 567 | ||
567 | // FIXME: This probably breaks with `use` items, since they produce multiple item tree nodes | 568 | // FIXME: This probably breaks with `use` items, since they produce multiple item tree nodes |
568 | 569 | ||
569 | // Root file (non-macro). | 570 | // Root file (non-macro). |
570 | tree.all_inner_items() | 571 | let item_tree_id = tree |
572 | .all_inner_items() | ||
571 | .chain(tree.top_level_items().iter().copied()) | 573 | .chain(tree.top_level_items().iter().copied()) |
572 | .filter_map(|mod_item| mod_item.downcast::<S>()) | 574 | .filter_map(|mod_item| mod_item.downcast::<N>()) |
573 | .find(|tree_id| tree[*tree_id].ast_id().upcast() == id.value) | 575 | .find(|tree_id| tree[*tree_id].ast_id().upcast() == id.value.upcast()) |
574 | .unwrap_or_else(|| panic!("couldn't find inner item for {:?}", id)) | 576 | .or_else(|| { |
577 | log::debug!( | ||
578 | "couldn't find inner {} item for {:?} (AST: `{}` - {:?})", | ||
579 | type_name::<N>(), | ||
580 | id, | ||
581 | ast.syntax(), | ||
582 | ast.syntax(), | ||
583 | ); | ||
584 | None | ||
585 | })?; | ||
586 | |||
587 | Some(ItemTreeId::new(id.file_id, item_tree_id)) | ||
575 | } | 588 | } |
576 | 589 | ||
577 | fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId { | 590 | fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId { |
@@ -611,82 +624,45 @@ impl ExprCollector<'_> { | |||
611 | .filter_map(|item| { | 624 | .filter_map(|item| { |
612 | let (def, name): (ModuleDefId, Option<ast::Name>) = match item { | 625 | let (def, name): (ModuleDefId, Option<ast::Name>) = match item { |
613 | ast::ModuleItem::FnDef(def) => { | 626 | ast::ModuleItem::FnDef(def) => { |
614 | let ast_id = self.expander.ast_id(&def); | 627 | let id = self.find_inner_item(&def)?; |
615 | let id = self.find_inner_item(ast_id.map(|id| id.upcast())); | ||
616 | ( | 628 | ( |
617 | FunctionLoc { container: container.into(), id: ast_id.with_value(id) } | 629 | FunctionLoc { container: container.into(), id }.intern(self.db).into(), |
618 | .intern(self.db) | ||
619 | .into(), | ||
620 | def.name(), | 630 | def.name(), |
621 | ) | 631 | ) |
622 | } | 632 | } |
623 | ast::ModuleItem::TypeAliasDef(def) => { | 633 | ast::ModuleItem::TypeAliasDef(def) => { |
624 | let ast_id = self.expander.ast_id(&def); | 634 | let id = self.find_inner_item(&def)?; |
625 | let id = self.find_inner_item(ast_id.map(|id| id.upcast())); | ||
626 | ( | 635 | ( |
627 | TypeAliasLoc { container: container.into(), id: ast_id.with_value(id) } | 636 | TypeAliasLoc { container: container.into(), id }.intern(self.db).into(), |
628 | .intern(self.db) | ||
629 | .into(), | ||
630 | def.name(), | 637 | def.name(), |
631 | ) | 638 | ) |
632 | } | 639 | } |
633 | ast::ModuleItem::ConstDef(def) => { | 640 | ast::ModuleItem::ConstDef(def) => { |
634 | let ast_id = self.expander.ast_id(&def); | 641 | let id = self.find_inner_item(&def)?; |
635 | let id = self.find_inner_item(ast_id.map(|id| id.upcast())); | ||
636 | ( | 642 | ( |
637 | ConstLoc { container: container.into(), id: ast_id.with_value(id) } | 643 | ConstLoc { container: container.into(), id }.intern(self.db).into(), |
638 | .intern(self.db) | ||
639 | .into(), | ||
640 | def.name(), | 644 | def.name(), |
641 | ) | 645 | ) |
642 | } | 646 | } |
643 | ast::ModuleItem::StaticDef(def) => { | 647 | ast::ModuleItem::StaticDef(def) => { |
644 | let ast_id = self.expander.ast_id(&def); | 648 | let id = self.find_inner_item(&def)?; |
645 | let id = self.find_inner_item(ast_id.map(|id| id.upcast())); | 649 | (StaticLoc { container, id }.intern(self.db).into(), def.name()) |
646 | ( | ||
647 | StaticLoc { container, id: ast_id.with_value(id) } | ||
648 | .intern(self.db) | ||
649 | .into(), | ||
650 | def.name(), | ||
651 | ) | ||
652 | } | 650 | } |
653 | ast::ModuleItem::StructDef(def) => { | 651 | ast::ModuleItem::StructDef(def) => { |
654 | let ast_id = self.expander.ast_id(&def); | 652 | let id = self.find_inner_item(&def)?; |
655 | let id = self.find_inner_item(ast_id.map(|id| id.upcast())); | 653 | (StructLoc { container, id }.intern(self.db).into(), def.name()) |
656 | ( | ||
657 | StructLoc { container, id: ast_id.with_value(id) } | ||
658 | .intern(self.db) | ||
659 | .into(), | ||
660 | def.name(), | ||
661 | ) | ||
662 | } | 654 | } |
663 | ast::ModuleItem::EnumDef(def) => { | 655 | ast::ModuleItem::EnumDef(def) => { |
664 | let ast_id = self.expander.ast_id(&def); | 656 | let id = self.find_inner_item(&def)?; |
665 | let id = self.find_inner_item(ast_id.map(|id| id.upcast())); | 657 | (EnumLoc { container, id }.intern(self.db).into(), def.name()) |
666 | ( | ||
667 | EnumLoc { container, id: ast_id.with_value(id) }.intern(self.db).into(), | ||
668 | def.name(), | ||
669 | ) | ||
670 | } | 658 | } |
671 | ast::ModuleItem::UnionDef(def) => { | 659 | ast::ModuleItem::UnionDef(def) => { |
672 | let ast_id = self.expander.ast_id(&def); | 660 | let id = self.find_inner_item(&def)?; |
673 | let id = self.find_inner_item(ast_id.map(|id| id.upcast())); | 661 | (UnionLoc { container, id }.intern(self.db).into(), def.name()) |
674 | ( | ||
675 | UnionLoc { container, id: ast_id.with_value(id) } | ||
676 | .intern(self.db) | ||
677 | .into(), | ||
678 | def.name(), | ||
679 | ) | ||
680 | } | 662 | } |
681 | ast::ModuleItem::TraitDef(def) => { | 663 | ast::ModuleItem::TraitDef(def) => { |
682 | let ast_id = self.expander.ast_id(&def); | 664 | let id = self.find_inner_item(&def)?; |
683 | let id = self.find_inner_item(ast_id.map(|id| id.upcast())); | 665 | (TraitLoc { container, id }.intern(self.db).into(), def.name()) |
684 | ( | ||
685 | TraitLoc { container, id: ast_id.with_value(id) } | ||
686 | .intern(self.db) | ||
687 | .into(), | ||
688 | def.name(), | ||
689 | ) | ||
690 | } | 666 | } |
691 | ast::ModuleItem::ExternBlock(_) => return None, // FIXME: collect from extern blocks | 667 | ast::ModuleItem::ExternBlock(_) => return None, // FIXME: collect from extern blocks |
692 | ast::ModuleItem::ImplDef(_) | 668 | ast::ModuleItem::ImplDef(_) |
diff --git a/crates/ra_hir_def/src/body/scope.rs b/crates/ra_hir_def/src/body/scope.rs index 81397b063..99e876683 100644 --- a/crates/ra_hir_def/src/body/scope.rs +++ b/crates/ra_hir_def/src/body/scope.rs | |||
@@ -337,6 +337,19 @@ fn foo() { | |||
337 | ); | 337 | ); |
338 | } | 338 | } |
339 | 339 | ||
340 | #[test] | ||
341 | fn broken_inner_item() { | ||
342 | do_check( | ||
343 | r" | ||
344 | fn foo() { | ||
345 | trait {} | ||
346 | <|> | ||
347 | } | ||
348 | ", | ||
349 | &[], | ||
350 | ); | ||
351 | } | ||
352 | |||
340 | fn do_check_local_name(ra_fixture: &str, expected_offset: u32) { | 353 | fn do_check_local_name(ra_fixture: &str, expected_offset: u32) { |
341 | let (db, position) = TestDB::with_position(ra_fixture); | 354 | let (db, position) = TestDB::with_position(ra_fixture); |
342 | let file_id = position.file_id; | 355 | let file_id = position.file_id; |
diff --git a/crates/ra_hir_def/src/data.rs b/crates/ra_hir_def/src/data.rs index f9e5701db..282ade2a3 100644 --- a/crates/ra_hir_def/src/data.rs +++ b/crates/ra_hir_def/src/data.rs | |||
@@ -40,7 +40,7 @@ impl FunctionData { | |||
40 | name: func.name.clone(), | 40 | name: func.name.clone(), |
41 | params: func.params.to_vec(), | 41 | params: func.params.to_vec(), |
42 | ret_type: func.ret_type.clone(), | 42 | ret_type: func.ret_type.clone(), |
43 | attrs: item_tree.attrs(loc.id.value.into()).clone(), | 43 | attrs: item_tree.attrs(ModItem::from(loc.id.value).into()).clone(), |
44 | has_self_param: func.has_self_param, | 44 | has_self_param: func.has_self_param, |
45 | is_unsafe: func.is_unsafe, | 45 | is_unsafe: func.is_unsafe, |
46 | visibility: item_tree[func.visibility].clone(), | 46 | visibility: item_tree[func.visibility].clone(), |
@@ -224,7 +224,7 @@ fn collect_items( | |||
224 | match item { | 224 | match item { |
225 | AssocItem::Function(id) => { | 225 | AssocItem::Function(id) => { |
226 | let item = &item_tree[id]; | 226 | let item = &item_tree[id]; |
227 | let attrs = item_tree.attrs(id.into()); | 227 | let attrs = item_tree.attrs(ModItem::from(id).into()); |
228 | if !attrs.is_cfg_enabled(&cfg_options) { | 228 | if !attrs.is_cfg_enabled(&cfg_options) { |
229 | continue; | 229 | continue; |
230 | } | 230 | } |
diff --git a/crates/ra_hir_def/src/item_tree.rs b/crates/ra_hir_def/src/item_tree.rs index d7bc64e6c..3e603bd55 100644 --- a/crates/ra_hir_def/src/item_tree.rs +++ b/crates/ra_hir_def/src/item_tree.rs | |||
@@ -5,6 +5,7 @@ mod lower; | |||
5 | mod tests; | 5 | mod tests; |
6 | 6 | ||
7 | use std::{ | 7 | use std::{ |
8 | any::type_name, | ||
8 | fmt::{self, Debug}, | 9 | fmt::{self, Debug}, |
9 | hash::{Hash, Hasher}, | 10 | hash::{Hash, Hasher}, |
10 | marker::PhantomData, | 11 | marker::PhantomData, |
@@ -178,8 +179,8 @@ impl ItemTree { | |||
178 | self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&Attrs::EMPTY) | 179 | self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&Attrs::EMPTY) |
179 | } | 180 | } |
180 | 181 | ||
181 | pub fn attrs(&self, of: ModItem) -> &Attrs { | 182 | pub fn attrs(&self, of: AttrOwner) -> &Attrs { |
182 | self.attrs.get(&AttrOwner::ModItem(of)).unwrap_or(&Attrs::EMPTY) | 183 | self.attrs.get(&of).unwrap_or(&Attrs::EMPTY) |
183 | } | 184 | } |
184 | 185 | ||
185 | /// Returns the lowered inner items that `ast` corresponds to. | 186 | /// Returns the lowered inner items that `ast` corresponds to. |
@@ -282,15 +283,32 @@ struct ItemTreeData { | |||
282 | } | 283 | } |
283 | 284 | ||
284 | #[derive(Debug, Eq, PartialEq, Hash)] | 285 | #[derive(Debug, Eq, PartialEq, Hash)] |
285 | enum AttrOwner { | 286 | pub enum AttrOwner { |
286 | /// Attributes on an item. | 287 | /// Attributes on an item. |
287 | ModItem(ModItem), | 288 | ModItem(ModItem), |
288 | /// Inner attributes of the source file. | 289 | /// Inner attributes of the source file. |
289 | TopLevel, | 290 | TopLevel, |
291 | |||
292 | Variant(Idx<Variant>), | ||
293 | Field(Idx<Field>), | ||
290 | // FIXME: Store variant and field attrs, and stop reparsing them in `attrs_query`. | 294 | // FIXME: Store variant and field attrs, and stop reparsing them in `attrs_query`. |
291 | } | 295 | } |
292 | 296 | ||
293 | /// Trait implemented by all nodes in the item tree. | 297 | macro_rules! from_attrs { |
298 | ( $( $var:ident($t:ty) ),+ ) => { | ||
299 | $( | ||
300 | impl From<$t> for AttrOwner { | ||
301 | fn from(t: $t) -> AttrOwner { | ||
302 | AttrOwner::$var(t) | ||
303 | } | ||
304 | } | ||
305 | )+ | ||
306 | }; | ||
307 | } | ||
308 | |||
309 | from_attrs!(ModItem(ModItem), Variant(Idx<Variant>), Field(Idx<Field>)); | ||
310 | |||
311 | /// Trait implemented by all item nodes in the item tree. | ||
294 | pub trait ItemTreeNode: Clone { | 312 | pub trait ItemTreeNode: Clone { |
295 | type Source: AstNode + Into<ast::ModuleItem>; | 313 | type Source: AstNode + Into<ast::ModuleItem>; |
296 | 314 | ||
@@ -523,7 +541,7 @@ pub struct Enum { | |||
523 | pub name: Name, | 541 | pub name: Name, |
524 | pub visibility: RawVisibilityId, | 542 | pub visibility: RawVisibilityId, |
525 | pub generic_params: GenericParamsId, | 543 | pub generic_params: GenericParamsId, |
526 | pub variants: Range<Idx<Variant>>, | 544 | pub variants: IdRange<Variant>, |
527 | pub ast_id: FileAstId<ast::EnumDef>, | 545 | pub ast_id: FileAstId<ast::EnumDef>, |
528 | } | 546 | } |
529 | 547 | ||
@@ -681,10 +699,48 @@ pub struct Variant { | |||
681 | pub fields: Fields, | 699 | pub fields: Fields, |
682 | } | 700 | } |
683 | 701 | ||
702 | pub struct IdRange<T> { | ||
703 | range: Range<u32>, | ||
704 | _p: PhantomData<T>, | ||
705 | } | ||
706 | |||
707 | impl<T> IdRange<T> { | ||
708 | fn new(range: Range<Idx<T>>) -> Self { | ||
709 | Self { range: range.start.into_raw().into()..range.end.into_raw().into(), _p: PhantomData } | ||
710 | } | ||
711 | } | ||
712 | |||
713 | impl<T> Iterator for IdRange<T> { | ||
714 | type Item = Idx<T>; | ||
715 | fn next(&mut self) -> Option<Self::Item> { | ||
716 | self.range.next().map(|raw| Idx::from_raw(raw.into())) | ||
717 | } | ||
718 | } | ||
719 | |||
720 | impl<T> fmt::Debug for IdRange<T> { | ||
721 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
722 | f.debug_tuple(&format!("IdRange::<{}>", type_name::<T>())).field(&self.range).finish() | ||
723 | } | ||
724 | } | ||
725 | |||
726 | impl<T> Clone for IdRange<T> { | ||
727 | fn clone(&self) -> Self { | ||
728 | Self { range: self.range.clone(), _p: PhantomData } | ||
729 | } | ||
730 | } | ||
731 | |||
732 | impl<T> PartialEq for IdRange<T> { | ||
733 | fn eq(&self, other: &Self) -> bool { | ||
734 | self.range == other.range | ||
735 | } | ||
736 | } | ||
737 | |||
738 | impl<T> Eq for IdRange<T> {} | ||
739 | |||
684 | #[derive(Debug, Clone, PartialEq, Eq)] | 740 | #[derive(Debug, Clone, PartialEq, Eq)] |
685 | pub enum Fields { | 741 | pub enum Fields { |
686 | Record(Range<Idx<Field>>), | 742 | Record(IdRange<Field>), |
687 | Tuple(Range<Idx<Field>>), | 743 | Tuple(IdRange<Field>), |
688 | Unit, | 744 | Unit, |
689 | } | 745 | } |
690 | 746 | ||
diff --git a/crates/ra_hir_def/src/item_tree/lower.rs b/crates/ra_hir_def/src/item_tree/lower.rs index f10ad25f7..5149dd141 100644 --- a/crates/ra_hir_def/src/item_tree/lower.rs +++ b/crates/ra_hir_def/src/item_tree/lower.rs | |||
@@ -126,15 +126,15 @@ impl Ctx { | |||
126 | 126 | ||
127 | if !attrs.is_empty() { | 127 | if !attrs.is_empty() { |
128 | for item in items.iter().flat_map(|items| &items.0) { | 128 | for item in items.iter().flat_map(|items| &items.0) { |
129 | self.add_attrs(*item, attrs.clone()); | 129 | self.add_attrs((*item).into(), attrs.clone()); |
130 | } | 130 | } |
131 | } | 131 | } |
132 | 132 | ||
133 | items | 133 | items |
134 | } | 134 | } |
135 | 135 | ||
136 | fn add_attrs(&mut self, item: ModItem, attrs: Attrs) { | 136 | fn add_attrs(&mut self, item: AttrOwner, attrs: Attrs) { |
137 | match self.tree.attrs.entry(AttrOwner::ModItem(item)) { | 137 | match self.tree.attrs.entry(item) { |
138 | Entry::Occupied(mut entry) => { | 138 | Entry::Occupied(mut entry) => { |
139 | *entry.get_mut() = entry.get().merge(attrs); | 139 | *entry.get_mut() = entry.get().merge(attrs); |
140 | } | 140 | } |
@@ -196,15 +196,16 @@ impl Ctx { | |||
196 | } | 196 | } |
197 | } | 197 | } |
198 | 198 | ||
199 | fn lower_record_fields(&mut self, fields: &ast::RecordFieldDefList) -> Range<Idx<Field>> { | 199 | fn lower_record_fields(&mut self, fields: &ast::RecordFieldDefList) -> IdRange<Field> { |
200 | let start = self.next_field_idx(); | 200 | let start = self.next_field_idx(); |
201 | for field in fields.fields() { | 201 | for field in fields.fields() { |
202 | if let Some(data) = self.lower_record_field(&field) { | 202 | if let Some(data) = self.lower_record_field(&field) { |
203 | self.data().fields.alloc(data); | 203 | let idx = self.data().fields.alloc(data); |
204 | self.add_attrs(idx.into(), Attrs::new(&field, &self.hygiene)); | ||
204 | } | 205 | } |
205 | } | 206 | } |
206 | let end = self.next_field_idx(); | 207 | let end = self.next_field_idx(); |
207 | start..end | 208 | IdRange::new(start..end) |
208 | } | 209 | } |
209 | 210 | ||
210 | fn lower_record_field(&mut self, field: &ast::RecordFieldDef) -> Option<Field> { | 211 | fn lower_record_field(&mut self, field: &ast::RecordFieldDef) -> Option<Field> { |
@@ -215,15 +216,16 @@ impl Ctx { | |||
215 | Some(res) | 216 | Some(res) |
216 | } | 217 | } |
217 | 218 | ||
218 | fn lower_tuple_fields(&mut self, fields: &ast::TupleFieldDefList) -> Range<Idx<Field>> { | 219 | fn lower_tuple_fields(&mut self, fields: &ast::TupleFieldDefList) -> IdRange<Field> { |
219 | let start = self.next_field_idx(); | 220 | let start = self.next_field_idx(); |
220 | for (i, field) in fields.fields().enumerate() { | 221 | for (i, field) in fields.fields().enumerate() { |
221 | if let Some(data) = self.lower_tuple_field(i, &field) { | 222 | if let Some(data) = self.lower_tuple_field(i, &field) { |
222 | self.data().fields.alloc(data); | 223 | let idx = self.data().fields.alloc(data); |
224 | self.add_attrs(idx.into(), Attrs::new(&field, &self.hygiene)); | ||
223 | } | 225 | } |
224 | } | 226 | } |
225 | let end = self.next_field_idx(); | 227 | let end = self.next_field_idx(); |
226 | start..end | 228 | IdRange::new(start..end) |
227 | } | 229 | } |
228 | 230 | ||
229 | fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleFieldDef) -> Option<Field> { | 231 | fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleFieldDef) -> Option<Field> { |
@@ -242,7 +244,7 @@ impl Ctx { | |||
242 | Some(record_field_def_list) => { | 244 | Some(record_field_def_list) => { |
243 | self.lower_fields(&StructKind::Record(record_field_def_list)) | 245 | self.lower_fields(&StructKind::Record(record_field_def_list)) |
244 | } | 246 | } |
245 | None => Fields::Record(self.next_field_idx()..self.next_field_idx()), | 247 | None => Fields::Record(IdRange::new(self.next_field_idx()..self.next_field_idx())), |
246 | }; | 248 | }; |
247 | let ast_id = self.source_ast_id_map.ast_id(union); | 249 | let ast_id = self.source_ast_id_map.ast_id(union); |
248 | let res = Union { name, visibility, generic_params, fields, ast_id }; | 250 | let res = Union { name, visibility, generic_params, fields, ast_id }; |
@@ -255,22 +257,23 @@ impl Ctx { | |||
255 | let generic_params = self.lower_generic_params(GenericsOwner::Enum, enum_); | 257 | let generic_params = self.lower_generic_params(GenericsOwner::Enum, enum_); |
256 | let variants = match &enum_.variant_list() { | 258 | let variants = match &enum_.variant_list() { |
257 | Some(variant_list) => self.lower_variants(variant_list), | 259 | Some(variant_list) => self.lower_variants(variant_list), |
258 | None => self.next_variant_idx()..self.next_variant_idx(), | 260 | None => IdRange::new(self.next_variant_idx()..self.next_variant_idx()), |
259 | }; | 261 | }; |
260 | let ast_id = self.source_ast_id_map.ast_id(enum_); | 262 | let ast_id = self.source_ast_id_map.ast_id(enum_); |
261 | let res = Enum { name, visibility, generic_params, variants, ast_id }; | 263 | let res = Enum { name, visibility, generic_params, variants, ast_id }; |
262 | Some(id(self.data().enums.alloc(res))) | 264 | Some(id(self.data().enums.alloc(res))) |
263 | } | 265 | } |
264 | 266 | ||
265 | fn lower_variants(&mut self, variants: &ast::EnumVariantList) -> Range<Idx<Variant>> { | 267 | fn lower_variants(&mut self, variants: &ast::EnumVariantList) -> IdRange<Variant> { |
266 | let start = self.next_variant_idx(); | 268 | let start = self.next_variant_idx(); |
267 | for variant in variants.variants() { | 269 | for variant in variants.variants() { |
268 | if let Some(data) = self.lower_variant(&variant) { | 270 | if let Some(data) = self.lower_variant(&variant) { |
269 | self.data().variants.alloc(data); | 271 | let idx = self.data().variants.alloc(data); |
272 | self.add_attrs(idx.into(), Attrs::new(&variant, &self.hygiene)); | ||
270 | } | 273 | } |
271 | } | 274 | } |
272 | let end = self.next_variant_idx(); | 275 | let end = self.next_variant_idx(); |
273 | start..end | 276 | IdRange::new(start..end) |
274 | } | 277 | } |
275 | 278 | ||
276 | fn lower_variant(&mut self, variant: &ast::EnumVariant) -> Option<Variant> { | 279 | fn lower_variant(&mut self, variant: &ast::EnumVariant) -> Option<Variant> { |
@@ -419,7 +422,7 @@ impl Ctx { | |||
419 | let attrs = Attrs::new(&item, &this.hygiene); | 422 | let attrs = Attrs::new(&item, &this.hygiene); |
420 | this.collect_inner_items(item.syntax()); | 423 | this.collect_inner_items(item.syntax()); |
421 | this.lower_assoc_item(&item).map(|item| { | 424 | this.lower_assoc_item(&item).map(|item| { |
422 | this.add_attrs(item.into(), attrs); | 425 | this.add_attrs(ModItem::from(item).into(), attrs); |
423 | item | 426 | item |
424 | }) | 427 | }) |
425 | }) | 428 | }) |
@@ -453,7 +456,7 @@ impl Ctx { | |||
453 | self.collect_inner_items(item.syntax()); | 456 | self.collect_inner_items(item.syntax()); |
454 | let assoc = self.lower_assoc_item(&item)?; | 457 | let assoc = self.lower_assoc_item(&item)?; |
455 | let attrs = Attrs::new(&item, &self.hygiene); | 458 | let attrs = Attrs::new(&item, &self.hygiene); |
456 | self.add_attrs(assoc.into(), attrs); | 459 | self.add_attrs(ModItem::from(assoc).into(), attrs); |
457 | Some(assoc) | 460 | Some(assoc) |
458 | }) | 461 | }) |
459 | .collect(); | 462 | .collect(); |
@@ -539,7 +542,7 @@ impl Ctx { | |||
539 | .filter_map(|item| { | 542 | .filter_map(|item| { |
540 | self.collect_inner_items(item.syntax()); | 543 | self.collect_inner_items(item.syntax()); |
541 | let attrs = Attrs::new(&item, &self.hygiene); | 544 | let attrs = Attrs::new(&item, &self.hygiene); |
542 | let id = match item { | 545 | let id: ModItem = match item { |
543 | ast::ExternItem::FnDef(ast) => { | 546 | ast::ExternItem::FnDef(ast) => { |
544 | let func = self.lower_function(&ast)?; | 547 | let func = self.lower_function(&ast)?; |
545 | func.into() | 548 | func.into() |
@@ -549,7 +552,7 @@ impl Ctx { | |||
549 | statik.into() | 552 | statik.into() |
550 | } | 553 | } |
551 | }; | 554 | }; |
552 | self.add_attrs(id, attrs); | 555 | self.add_attrs(id.into(), attrs); |
553 | Some(id) | 556 | Some(id) |
554 | }) | 557 | }) |
555 | .collect() | 558 | .collect() |
diff --git a/crates/ra_hir_def/src/item_tree/tests.rs b/crates/ra_hir_def/src/item_tree/tests.rs index dc035d809..08559fb92 100644 --- a/crates/ra_hir_def/src/item_tree/tests.rs +++ b/crates/ra_hir_def/src/item_tree/tests.rs | |||
@@ -92,7 +92,7 @@ fn print_item_tree(ra_fixture: &str) -> String { | |||
92 | } | 92 | } |
93 | 93 | ||
94 | fn fmt_mod_item(out: &mut String, tree: &ItemTree, item: ModItem) { | 94 | fn fmt_mod_item(out: &mut String, tree: &ItemTree, item: ModItem) { |
95 | let attrs = tree.attrs(item); | 95 | let attrs = tree.attrs(item.into()); |
96 | if !attrs.is_empty() { | 96 | if !attrs.is_empty() { |
97 | format_to!(out, "#[{:?}]\n", attrs); | 97 | format_to!(out, "#[{:?}]\n", attrs); |
98 | } | 98 | } |
@@ -237,13 +237,13 @@ Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generi | |||
237 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct0"))] }, input: None }]) }] | 237 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct0"))] }, input: None }]) }] |
238 | Struct { name: Name(Text("Struct0")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), fields: Unit, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::StructDef>(3), kind: Unit } | 238 | Struct { name: Name(Text("Struct0")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), fields: Unit, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::StructDef>(3), kind: Unit } |
239 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct1"))] }, input: None }]) }] | 239 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct1"))] }, input: None }]) }] |
240 | Struct { name: Name(Text("Struct1")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(2), fields: Tuple(Idx::<Field>(0)..Idx::<Field>(1)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::StructDef>(4), kind: Tuple } | 240 | Struct { name: Name(Text("Struct1")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(2), fields: Tuple(IdRange::<ra_hir_def::item_tree::Field>(0..1)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::StructDef>(4), kind: Tuple } |
241 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct2"))] }, input: None }]) }] | 241 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct2"))] }, input: None }]) }] |
242 | Struct { name: Name(Text("Struct2")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(3), fields: Record(Idx::<Field>(1)..Idx::<Field>(2)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::StructDef>(5), kind: Record } | 242 | Struct { name: Name(Text("Struct2")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(3), fields: Record(IdRange::<ra_hir_def::item_tree::Field>(1..2)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::StructDef>(5), kind: Record } |
243 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("en"))] }, input: None }]) }] | 243 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("en"))] }, input: None }]) }] |
244 | Enum { name: Name(Text("En")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), variants: Idx::<Variant>(0)..Idx::<Variant>(1), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::EnumDef>(6) } | 244 | Enum { name: Name(Text("En")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), variants: IdRange::<ra_hir_def::item_tree::Variant>(0..1), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::EnumDef>(6) } |
245 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("un"))] }, input: None }]) }] | 245 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("un"))] }, input: None }]) }] |
246 | Union { name: Name(Text("Un")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), fields: Record(Idx::<Field>(3)..Idx::<Field>(4)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::UnionDef>(7) } | 246 | Union { name: Name(Text("Un")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), fields: Record(IdRange::<ra_hir_def::item_tree::Field>(3..4)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::UnionDef>(7) } |
247 | "###); | 247 | "###); |
248 | } | 248 | } |
249 | 249 | ||
diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs index 5a0eba437..a35ac1024 100644 --- a/crates/ra_hir_def/src/nameres/collector.rs +++ b/crates/ra_hir_def/src/nameres/collector.rs | |||
@@ -764,7 +764,7 @@ impl ModCollector<'_, '_> { | |||
764 | // `#[macro_use] extern crate` is hoisted to imports macros before collecting | 764 | // `#[macro_use] extern crate` is hoisted to imports macros before collecting |
765 | // any other items. | 765 | // any other items. |
766 | for item in items { | 766 | for item in items { |
767 | if self.is_cfg_enabled(self.item_tree.attrs(*item)) { | 767 | if self.is_cfg_enabled(self.item_tree.attrs((*item).into())) { |
768 | if let ModItem::ExternCrate(id) = item { | 768 | if let ModItem::ExternCrate(id) = item { |
769 | let import = self.item_tree[*id].clone(); | 769 | let import = self.item_tree[*id].clone(); |
770 | if import.is_macro_use { | 770 | if import.is_macro_use { |
@@ -775,7 +775,7 @@ impl ModCollector<'_, '_> { | |||
775 | } | 775 | } |
776 | 776 | ||
777 | for &item in items { | 777 | for &item in items { |
778 | let attrs = self.item_tree.attrs(item); | 778 | let attrs = self.item_tree.attrs(item.into()); |
779 | if self.is_cfg_enabled(attrs) { | 779 | if self.is_cfg_enabled(attrs) { |
780 | let module = | 780 | let module = |
781 | ModuleId { krate: self.def_collector.def_map.krate, local_id: self.module_id }; | 781 | ModuleId { krate: self.def_collector.def_map.krate, local_id: self.module_id }; |
diff --git a/crates/ra_hir_def/src/nameres/tests/mod_resolution.rs b/crates/ra_hir_def/src/nameres/tests/mod_resolution.rs index e9a5e4cba..753684201 100644 --- a/crates/ra_hir_def/src/nameres/tests/mod_resolution.rs +++ b/crates/ra_hir_def/src/nameres/tests/mod_resolution.rs | |||
@@ -335,6 +335,22 @@ fn module_resolution_relative_path_2() { | |||
335 | } | 335 | } |
336 | 336 | ||
337 | #[test] | 337 | #[test] |
338 | fn module_resolution_relative_path_outside_root() { | ||
339 | let map = def_map( | ||
340 | r###" | ||
341 | //- /main.rs | ||
342 | |||
343 | #[path="../../../../../outside.rs"] | ||
344 | mod foo; | ||
345 | "###, | ||
346 | ); | ||
347 | |||
348 | assert_snapshot!(map, @r###" | ||
349 | â‹®crate | ||
350 | "###); | ||
351 | } | ||
352 | |||
353 | #[test] | ||
338 | fn module_resolution_explicit_path_mod_rs_2() { | 354 | fn module_resolution_explicit_path_mod_rs_2() { |
339 | let map = def_map( | 355 | let map = def_map( |
340 | r###" | 356 | r###" |
diff --git a/crates/ra_hir_ty/src/infer/pat.rs b/crates/ra_hir_ty/src/infer/pat.rs index 23de2bd6b..4dd4f9802 100644 --- a/crates/ra_hir_ty/src/infer/pat.rs +++ b/crates/ra_hir_ty/src/infer/pat.rs | |||
@@ -184,7 +184,7 @@ impl<'a> InferenceContext<'a> { | |||
184 | self.write_pat_ty(pat, bound_ty); | 184 | self.write_pat_ty(pat, bound_ty); |
185 | return inner_ty; | 185 | return inner_ty; |
186 | } | 186 | } |
187 | Pat::Slice { prefix, slice: _slice, suffix } => { | 187 | Pat::Slice { prefix, slice, suffix } => { |
188 | let (container_ty, elem_ty) = match &expected { | 188 | let (container_ty, elem_ty) = match &expected { |
189 | ty_app!(TypeCtor::Array, st) => (TypeCtor::Array, st.as_single().clone()), | 189 | ty_app!(TypeCtor::Array, st) => (TypeCtor::Array, st.as_single().clone()), |
190 | ty_app!(TypeCtor::Slice, st) => (TypeCtor::Slice, st.as_single().clone()), | 190 | ty_app!(TypeCtor::Slice, st) => (TypeCtor::Slice, st.as_single().clone()), |
@@ -195,7 +195,12 @@ impl<'a> InferenceContext<'a> { | |||
195 | self.infer_pat(*pat_id, &elem_ty, default_bm); | 195 | self.infer_pat(*pat_id, &elem_ty, default_bm); |
196 | } | 196 | } |
197 | 197 | ||
198 | Ty::apply_one(container_ty, elem_ty) | 198 | let pat_ty = Ty::apply_one(container_ty, elem_ty); |
199 | if let Some(slice_pat_id) = slice { | ||
200 | self.infer_pat(*slice_pat_id, &pat_ty, default_bm); | ||
201 | } | ||
202 | |||
203 | pat_ty | ||
199 | } | 204 | } |
200 | Pat::Wild => expected.clone(), | 205 | Pat::Wild => expected.clone(), |
201 | Pat::Range { start, end } => { | 206 | Pat::Range { start, end } => { |
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs index 8b59a8bd6..c19519cf1 100644 --- a/crates/ra_hir_ty/src/method_resolution.rs +++ b/crates/ra_hir_ty/src/method_resolution.rs | |||
@@ -281,6 +281,7 @@ pub fn iterate_method_candidates<T>( | |||
281 | name, | 281 | name, |
282 | mode, | 282 | mode, |
283 | &mut |ty, item| { | 283 | &mut |ty, item| { |
284 | assert!(slot.is_none()); | ||
284 | slot = callback(ty, item); | 285 | slot = callback(ty, item); |
285 | slot.is_some() | 286 | slot.is_some() |
286 | }, | 287 | }, |
@@ -288,7 +289,7 @@ pub fn iterate_method_candidates<T>( | |||
288 | slot | 289 | slot |
289 | } | 290 | } |
290 | 291 | ||
291 | pub fn iterate_method_candidates_impl( | 292 | fn iterate_method_candidates_impl( |
292 | ty: &Canonical<Ty>, | 293 | ty: &Canonical<Ty>, |
293 | db: &dyn HirDatabase, | 294 | db: &dyn HirDatabase, |
294 | env: Arc<TraitEnvironment>, | 295 | env: Arc<TraitEnvironment>, |
diff --git a/crates/ra_hir_ty/src/tests/patterns.rs b/crates/ra_hir_ty/src/tests/patterns.rs index e5ef241ca..f937426bd 100644 --- a/crates/ra_hir_ty/src/tests/patterns.rs +++ b/crates/ra_hir_ty/src/tests/patterns.rs | |||
@@ -627,3 +627,28 @@ fn test() { | |||
627 | "### | 627 | "### |
628 | ); | 628 | ); |
629 | } | 629 | } |
630 | |||
631 | #[test] | ||
632 | fn slice_tail_pattern() { | ||
633 | assert_snapshot!( | ||
634 | infer(r#" | ||
635 | fn foo(params: &[i32]) { | ||
636 | match params { | ||
637 | [head, tail @ ..] => { | ||
638 | } | ||
639 | } | ||
640 | } | ||
641 | "#), | ||
642 | @r###" | ||
643 | 7..13 'params': &[i32] | ||
644 | 23..92 '{ ... } }': () | ||
645 | 29..90 'match ... }': () | ||
646 | 35..41 'params': &[i32] | ||
647 | 52..69 '[head,... @ ..]': [i32] | ||
648 | 53..57 'head': &i32 | ||
649 | 59..68 'tail @ ..': &[i32] | ||
650 | 66..68 '..': [i32] | ||
651 | 73..84 '{ }': () | ||
652 | "### | ||
653 | ); | ||
654 | } | ||
diff --git a/crates/ra_hir_ty/src/tests/regression.rs b/crates/ra_hir_ty/src/tests/regression.rs index eedaa27ba..aa37326df 100644 --- a/crates/ra_hir_ty/src/tests/regression.rs +++ b/crates/ra_hir_ty/src/tests/regression.rs | |||
@@ -500,6 +500,8 @@ fn foo(params: &[usize]) { | |||
500 | 31..78 'match ... }': () | 500 | 31..78 'match ... }': () |
501 | 37..43 'params': &[usize] | 501 | 37..43 'params': &[usize] |
502 | 54..66 '[ps @ .., _]': [usize] | 502 | 54..66 '[ps @ .., _]': [usize] |
503 | 55..62 'ps @ ..': &[usize] | ||
504 | 60..62 '..': [usize] | ||
503 | 64..65 '_': usize | 505 | 64..65 '_': usize |
504 | 70..72 '{}': () | 506 | 70..72 '{}': () |
505 | "### | 507 | "### |
diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs index 4f59b0a23..d797f2cc9 100644 --- a/crates/ra_parser/src/parser.rs +++ b/crates/ra_parser/src/parser.rs | |||
@@ -127,17 +127,24 @@ impl<'t> Parser<'t> { | |||
127 | 127 | ||
128 | fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool { | 128 | fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool { |
129 | let t1 = self.token_source.lookahead_nth(n); | 129 | let t1 = self.token_source.lookahead_nth(n); |
130 | if t1.kind != k1 || !t1.is_jointed_to_next { | ||
131 | return false; | ||
132 | } | ||
130 | let t2 = self.token_source.lookahead_nth(n + 1); | 133 | let t2 = self.token_source.lookahead_nth(n + 1); |
131 | t1.kind == k1 && t1.is_jointed_to_next && t2.kind == k2 | 134 | t2.kind == k2 |
132 | } | 135 | } |
133 | 136 | ||
134 | fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool { | 137 | fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool { |
135 | let t1 = self.token_source.lookahead_nth(n); | 138 | let t1 = self.token_source.lookahead_nth(n); |
139 | if t1.kind != k1 || !t1.is_jointed_to_next { | ||
140 | return false; | ||
141 | } | ||
136 | let t2 = self.token_source.lookahead_nth(n + 1); | 142 | let t2 = self.token_source.lookahead_nth(n + 1); |
143 | if t2.kind != k2 || !t2.is_jointed_to_next { | ||
144 | return false; | ||
145 | } | ||
137 | let t3 = self.token_source.lookahead_nth(n + 2); | 146 | let t3 = self.token_source.lookahead_nth(n + 2); |
138 | (t1.kind == k1 && t1.is_jointed_to_next) | 147 | t3.kind == k3 |
139 | && (t2.kind == k2 && t2.is_jointed_to_next) | ||
140 | && t3.kind == k3 | ||
141 | } | 148 | } |
142 | 149 | ||
143 | /// Checks if the current token is in `kinds`. | 150 | /// Checks if the current token is in `kinds`. |
diff --git a/crates/ra_toolchain/src/lib.rs b/crates/ra_toolchain/src/lib.rs index 3d2865e09..9916e52c4 100644 --- a/crates/ra_toolchain/src/lib.rs +++ b/crates/ra_toolchain/src/lib.rs | |||
@@ -15,6 +15,10 @@ pub fn rustup() -> PathBuf { | |||
15 | get_path_for_executable("rustup") | 15 | get_path_for_executable("rustup") |
16 | } | 16 | } |
17 | 17 | ||
18 | pub fn rustfmt() -> PathBuf { | ||
19 | get_path_for_executable("rustfmt") | ||
20 | } | ||
21 | |||
18 | /// Return a `PathBuf` to use for the given executable. | 22 | /// Return a `PathBuf` to use for the given executable. |
19 | /// | 23 | /// |
20 | /// E.g., `get_path_for_executable("cargo")` may return just `cargo` if that | 24 | /// E.g., `get_path_for_executable("cargo")` may return just `cargo` if that |
@@ -42,22 +46,23 @@ fn get_path_for_executable(executable_name: &'static str) -> PathBuf { | |||
42 | path.push(".cargo"); | 46 | path.push(".cargo"); |
43 | path.push("bin"); | 47 | path.push("bin"); |
44 | path.push(executable_name); | 48 | path.push(executable_name); |
45 | if path.is_file() { | 49 | if let Some(path) = probe(path) { |
46 | return path; | 50 | return path; |
47 | } | 51 | } |
48 | } | 52 | } |
53 | |||
49 | executable_name.into() | 54 | executable_name.into() |
50 | } | 55 | } |
51 | 56 | ||
52 | fn lookup_in_path(exec: &str) -> bool { | 57 | fn lookup_in_path(exec: &str) -> bool { |
53 | let paths = env::var_os("PATH").unwrap_or_default(); | 58 | let paths = env::var_os("PATH").unwrap_or_default(); |
54 | let mut candidates = env::split_paths(&paths).flat_map(|path| { | 59 | env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe).is_some() |
55 | let candidate = path.join(&exec); | 60 | } |
56 | let with_exe = match env::consts::EXE_EXTENSION { | 61 | |
57 | "" => None, | 62 | fn probe(path: PathBuf) -> Option<PathBuf> { |
58 | it => Some(candidate.with_extension(it)), | 63 | let with_extension = match env::consts::EXE_EXTENSION { |
59 | }; | 64 | "" => None, |
60 | iter::once(candidate).chain(with_exe) | 65 | it => Some(path.with_extension(it)), |
61 | }); | 66 | }; |
62 | candidates.any(|it| it.is_file()) | 67 | iter::once(path).chain(with_extension).find(|it| it.is_file()) |
63 | } | 68 | } |
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 08c67ddd0..122a1605f 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml | |||
@@ -41,6 +41,7 @@ ra_text_edit = { path = "../ra_text_edit" } | |||
41 | vfs = { path = "../vfs" } | 41 | vfs = { path = "../vfs" } |
42 | vfs-notify = { path = "../vfs-notify" } | 42 | vfs-notify = { path = "../vfs-notify" } |
43 | ra_cfg = { path = "../ra_cfg"} | 43 | ra_cfg = { path = "../ra_cfg"} |
44 | ra_toolchain = { path = "../ra_toolchain" } | ||
44 | 45 | ||
45 | # This should only be used in CLI | 46 | # This should only be used in CLI |
46 | ra_db = { path = "../ra_db" } | 47 | ra_db = { path = "../ra_db" } |
diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index c5cf5ff27..d8677c231 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs | |||
@@ -9,7 +9,7 @@ use ra_ide::{AnalysisChange, AnalysisHost}; | |||
9 | use ra_project_model::{CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace}; | 9 | use ra_project_model::{CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace}; |
10 | use vfs::{loader::Handle, AbsPath}; | 10 | use vfs::{loader::Handle, AbsPath}; |
11 | 11 | ||
12 | use crate::global_state::{ProjectFolders, SourceRootConfig}; | 12 | use crate::reload::{ProjectFolders, SourceRootConfig}; |
13 | 13 | ||
14 | pub fn load_cargo( | 14 | pub fn load_cargo( |
15 | root: &Path, | 15 | root: &Path, |
@@ -54,7 +54,7 @@ pub fn load_cargo( | |||
54 | Ok((host, vfs)) | 54 | Ok((host, vfs)) |
55 | } | 55 | } |
56 | 56 | ||
57 | pub(crate) fn load( | 57 | fn load( |
58 | crate_graph: CrateGraph, | 58 | crate_graph: CrateGraph, |
59 | source_root_config: SourceRootConfig, | 59 | source_root_config: SourceRootConfig, |
60 | vfs: &mut vfs::Vfs, | 60 | vfs: &mut vfs::Vfs, |
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 435bbbb6b..6b17ce18b 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs | |||
@@ -30,7 +30,7 @@ pub struct Config { | |||
30 | 30 | ||
31 | pub cargo: CargoConfig, | 31 | pub cargo: CargoConfig, |
32 | pub rustfmt: RustfmtConfig, | 32 | pub rustfmt: RustfmtConfig, |
33 | pub check: Option<FlycheckConfig>, | 33 | pub flycheck: Option<FlycheckConfig>, |
34 | 34 | ||
35 | pub inlay_hints: InlayHintsConfig, | 35 | pub inlay_hints: InlayHintsConfig, |
36 | pub completion: CompletionConfig, | 36 | pub completion: CompletionConfig, |
@@ -147,7 +147,7 @@ impl Config { | |||
147 | 147 | ||
148 | cargo: CargoConfig::default(), | 148 | cargo: CargoConfig::default(), |
149 | rustfmt: RustfmtConfig::Rustfmt { extra_args: Vec::new() }, | 149 | rustfmt: RustfmtConfig::Rustfmt { extra_args: Vec::new() }, |
150 | check: Some(FlycheckConfig::CargoCommand { | 150 | flycheck: Some(FlycheckConfig::CargoCommand { |
151 | command: "check".to_string(), | 151 | command: "check".to_string(), |
152 | all_targets: true, | 152 | all_targets: true, |
153 | all_features: false, | 153 | all_features: false, |
@@ -227,14 +227,14 @@ impl Config { | |||
227 | 227 | ||
228 | if let Some(false) = get(value, "/checkOnSave/enable") { | 228 | if let Some(false) = get(value, "/checkOnSave/enable") { |
229 | // check is disabled | 229 | // check is disabled |
230 | self.check = None; | 230 | self.flycheck = None; |
231 | } else { | 231 | } else { |
232 | // check is enabled | 232 | // check is enabled |
233 | match get::<Vec<String>>(value, "/checkOnSave/overrideCommand") { | 233 | match get::<Vec<String>>(value, "/checkOnSave/overrideCommand") { |
234 | // first see if the user has completely overridden the command | 234 | // first see if the user has completely overridden the command |
235 | Some(mut args) if !args.is_empty() => { | 235 | Some(mut args) if !args.is_empty() => { |
236 | let command = args.remove(0); | 236 | let command = args.remove(0); |
237 | self.check = Some(FlycheckConfig::CustomCommand { | 237 | self.flycheck = Some(FlycheckConfig::CustomCommand { |
238 | command, | 238 | command, |
239 | args, | 239 | args, |
240 | }); | 240 | }); |
@@ -242,7 +242,7 @@ impl Config { | |||
242 | // otherwise configure command customizations | 242 | // otherwise configure command customizations |
243 | _ => { | 243 | _ => { |
244 | if let Some(FlycheckConfig::CargoCommand { command, extra_args, all_targets, all_features, features }) | 244 | if let Some(FlycheckConfig::CargoCommand { command, extra_args, all_targets, all_features, features }) |
245 | = &mut self.check | 245 | = &mut self.flycheck |
246 | { | 246 | { |
247 | set(value, "/checkOnSave/extraArgs", extra_args); | 247 | set(value, "/checkOnSave/extraArgs", extra_args); |
248 | set(value, "/checkOnSave/command", command); | 248 | set(value, "/checkOnSave/command", command); |
diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index 290609e7f..1cf50b677 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs | |||
@@ -1,14 +1,14 @@ | |||
1 | //! Book keeping for keeping diagnostics easily in sync with the client. | 1 | //! Book keeping for keeping diagnostics easily in sync with the client. |
2 | pub(crate) mod to_proto; | 2 | pub(crate) mod to_proto; |
3 | 3 | ||
4 | use std::{collections::HashMap, sync::Arc}; | 4 | use std::{collections::HashMap, mem, sync::Arc}; |
5 | 5 | ||
6 | use lsp_types::{Diagnostic, Range}; | ||
7 | use ra_ide::FileId; | 6 | use ra_ide::FileId; |
7 | use rustc_hash::FxHashSet; | ||
8 | 8 | ||
9 | use crate::lsp_ext; | 9 | use crate::lsp_ext; |
10 | 10 | ||
11 | pub type CheckFixes = Arc<HashMap<FileId, Vec<Fix>>>; | 11 | pub(crate) type CheckFixes = Arc<HashMap<FileId, Vec<Fix>>>; |
12 | 12 | ||
13 | #[derive(Debug, Default, Clone)] | 13 | #[derive(Debug, Default, Clone)] |
14 | pub struct DiagnosticsConfig { | 14 | pub struct DiagnosticsConfig { |
@@ -17,35 +17,29 @@ pub struct DiagnosticsConfig { | |||
17 | } | 17 | } |
18 | 18 | ||
19 | #[derive(Debug, Default, Clone)] | 19 | #[derive(Debug, Default, Clone)] |
20 | pub struct DiagnosticCollection { | 20 | pub(crate) struct DiagnosticCollection { |
21 | pub native: HashMap<FileId, Vec<Diagnostic>>, | 21 | pub(crate) native: HashMap<FileId, Vec<lsp_types::Diagnostic>>, |
22 | pub check: HashMap<FileId, Vec<Diagnostic>>, | 22 | pub(crate) check: HashMap<FileId, Vec<lsp_types::Diagnostic>>, |
23 | pub check_fixes: CheckFixes, | 23 | pub(crate) check_fixes: CheckFixes, |
24 | changes: FxHashSet<FileId>, | ||
24 | } | 25 | } |
25 | 26 | ||
26 | #[derive(Debug, Clone)] | 27 | #[derive(Debug, Clone)] |
27 | pub struct Fix { | 28 | pub(crate) struct Fix { |
28 | pub range: Range, | 29 | pub(crate) range: lsp_types::Range, |
29 | pub action: lsp_ext::CodeAction, | 30 | pub(crate) action: lsp_ext::CodeAction, |
30 | } | ||
31 | |||
32 | #[derive(Debug)] | ||
33 | pub enum DiagnosticTask { | ||
34 | ClearCheck, | ||
35 | AddCheck(FileId, Diagnostic, Vec<lsp_ext::CodeAction>), | ||
36 | SetNative(FileId, Vec<Diagnostic>), | ||
37 | } | 31 | } |
38 | 32 | ||
39 | impl DiagnosticCollection { | 33 | impl DiagnosticCollection { |
40 | pub fn clear_check(&mut self) -> Vec<FileId> { | 34 | pub(crate) fn clear_check(&mut self) { |
41 | Arc::make_mut(&mut self.check_fixes).clear(); | 35 | Arc::make_mut(&mut self.check_fixes).clear(); |
42 | self.check.drain().map(|(key, _value)| key).collect() | 36 | self.changes.extend(self.check.drain().map(|(key, _value)| key)) |
43 | } | 37 | } |
44 | 38 | ||
45 | pub fn add_check_diagnostic( | 39 | pub(crate) fn add_check_diagnostic( |
46 | &mut self, | 40 | &mut self, |
47 | file_id: FileId, | 41 | file_id: FileId, |
48 | diagnostic: Diagnostic, | 42 | diagnostic: lsp_types::Diagnostic, |
49 | fixes: Vec<lsp_ext::CodeAction>, | 43 | fixes: Vec<lsp_ext::CodeAction>, |
50 | ) { | 44 | ) { |
51 | let diagnostics = self.check.entry(file_id).or_default(); | 45 | let diagnostics = self.check.entry(file_id).or_default(); |
@@ -61,34 +55,36 @@ impl DiagnosticCollection { | |||
61 | .or_default() | 55 | .or_default() |
62 | .extend(fixes.into_iter().map(|action| Fix { range: diagnostic.range, action })); | 56 | .extend(fixes.into_iter().map(|action| Fix { range: diagnostic.range, action })); |
63 | diagnostics.push(diagnostic); | 57 | diagnostics.push(diagnostic); |
58 | self.changes.insert(file_id); | ||
64 | } | 59 | } |
65 | 60 | ||
66 | pub fn set_native_diagnostics(&mut self, file_id: FileId, diagnostics: Vec<Diagnostic>) { | 61 | pub(crate) fn set_native_diagnostics( |
62 | &mut self, | ||
63 | file_id: FileId, | ||
64 | diagnostics: Vec<lsp_types::Diagnostic>, | ||
65 | ) { | ||
67 | self.native.insert(file_id, diagnostics); | 66 | self.native.insert(file_id, diagnostics); |
67 | self.changes.insert(file_id); | ||
68 | } | 68 | } |
69 | 69 | ||
70 | pub fn diagnostics_for(&self, file_id: FileId) -> impl Iterator<Item = &Diagnostic> { | 70 | pub(crate) fn diagnostics_for( |
71 | &self, | ||
72 | file_id: FileId, | ||
73 | ) -> impl Iterator<Item = &lsp_types::Diagnostic> { | ||
71 | let native = self.native.get(&file_id).into_iter().flatten(); | 74 | let native = self.native.get(&file_id).into_iter().flatten(); |
72 | let check = self.check.get(&file_id).into_iter().flatten(); | 75 | let check = self.check.get(&file_id).into_iter().flatten(); |
73 | native.chain(check) | 76 | native.chain(check) |
74 | } | 77 | } |
75 | 78 | ||
76 | pub fn handle_task(&mut self, task: DiagnosticTask) -> Vec<FileId> { | 79 | pub(crate) fn take_changes(&mut self) -> Option<FxHashSet<FileId>> { |
77 | match task { | 80 | if self.changes.is_empty() { |
78 | DiagnosticTask::ClearCheck => self.clear_check(), | 81 | return None; |
79 | DiagnosticTask::AddCheck(file_id, diagnostic, fixes) => { | ||
80 | self.add_check_diagnostic(file_id, diagnostic, fixes); | ||
81 | vec![file_id] | ||
82 | } | ||
83 | DiagnosticTask::SetNative(file_id, diagnostics) => { | ||
84 | self.set_native_diagnostics(file_id, diagnostics); | ||
85 | vec![file_id] | ||
86 | } | ||
87 | } | 82 | } |
83 | Some(mem::take(&mut self.changes)) | ||
88 | } | 84 | } |
89 | } | 85 | } |
90 | 86 | ||
91 | fn are_diagnostics_equal(left: &Diagnostic, right: &Diagnostic) -> bool { | 87 | fn are_diagnostics_equal(left: &lsp_types::Diagnostic, right: &lsp_types::Diagnostic) -> bool { |
92 | left.source == right.source | 88 | left.source == right.source |
93 | && left.severity == right.severity | 89 | && left.severity == right.severity |
94 | && left.range == right.range | 90 | && left.range == right.range |
diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index f379f5ed0..3eed118a9 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs | |||
@@ -167,9 +167,9 @@ fn map_rust_child_diagnostic( | |||
167 | 167 | ||
168 | #[derive(Debug)] | 168 | #[derive(Debug)] |
169 | pub(crate) struct MappedRustDiagnostic { | 169 | pub(crate) struct MappedRustDiagnostic { |
170 | pub location: Location, | 170 | pub(crate) location: Location, |
171 | pub diagnostic: Diagnostic, | 171 | pub(crate) diagnostic: Diagnostic, |
172 | pub fixes: Vec<lsp_ext::CodeAction>, | 172 | pub(crate) fixes: Vec<lsp_ext::CodeAction>, |
173 | } | 173 | } |
174 | 174 | ||
175 | /// Converts a Rust root diagnostic to LSP form | 175 | /// Converts a Rust root diagnostic to LSP form |
diff --git a/crates/rust-analyzer/src/dispatch.rs b/crates/rust-analyzer/src/dispatch.rs new file mode 100644 index 000000000..891fdb96d --- /dev/null +++ b/crates/rust-analyzer/src/dispatch.rs | |||
@@ -0,0 +1,170 @@ | |||
1 | //! A visitor for downcasting arbitrary request (JSON) into a specific type. | ||
2 | use std::panic; | ||
3 | |||
4 | use serde::{de::DeserializeOwned, Serialize}; | ||
5 | |||
6 | use crate::{ | ||
7 | global_state::{GlobalState, GlobalStateSnapshot}, | ||
8 | lsp_utils::is_canceled, | ||
9 | main_loop::Task, | ||
10 | LspError, Result, | ||
11 | }; | ||
12 | |||
13 | pub(crate) struct RequestDispatcher<'a> { | ||
14 | pub(crate) req: Option<lsp_server::Request>, | ||
15 | pub(crate) global_state: &'a mut GlobalState, | ||
16 | } | ||
17 | |||
18 | impl<'a> RequestDispatcher<'a> { | ||
19 | /// Dispatches the request onto the current thread | ||
20 | pub(crate) fn on_sync<R>( | ||
21 | &mut self, | ||
22 | f: fn(&mut GlobalState, R::Params) -> Result<R::Result>, | ||
23 | ) -> Result<&mut Self> | ||
24 | where | ||
25 | R: lsp_types::request::Request + 'static, | ||
26 | R::Params: DeserializeOwned + panic::UnwindSafe + 'static, | ||
27 | R::Result: Serialize + 'static, | ||
28 | { | ||
29 | let (id, params) = match self.parse::<R>() { | ||
30 | Some(it) => it, | ||
31 | None => { | ||
32 | return Ok(self); | ||
33 | } | ||
34 | }; | ||
35 | let world = panic::AssertUnwindSafe(&mut *self.global_state); | ||
36 | let response = panic::catch_unwind(move || { | ||
37 | let result = f(world.0, params); | ||
38 | result_to_response::<R>(id, result) | ||
39 | }) | ||
40 | .map_err(|_| format!("sync task {:?} panicked", R::METHOD))?; | ||
41 | self.global_state.respond(response); | ||
42 | Ok(self) | ||
43 | } | ||
44 | |||
45 | /// Dispatches the request onto thread pool | ||
46 | pub(crate) fn on<R>( | ||
47 | &mut self, | ||
48 | f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>, | ||
49 | ) -> Result<&mut Self> | ||
50 | where | ||
51 | R: lsp_types::request::Request + 'static, | ||
52 | R::Params: DeserializeOwned + Send + 'static, | ||
53 | R::Result: Serialize + 'static, | ||
54 | { | ||
55 | let (id, params) = match self.parse::<R>() { | ||
56 | Some(it) => it, | ||
57 | None => { | ||
58 | return Ok(self); | ||
59 | } | ||
60 | }; | ||
61 | |||
62 | self.global_state.task_pool.handle.spawn({ | ||
63 | let world = self.global_state.snapshot(); | ||
64 | move || { | ||
65 | let result = f(world, params); | ||
66 | Task::Response(result_to_response::<R>(id, result)) | ||
67 | } | ||
68 | }); | ||
69 | |||
70 | Ok(self) | ||
71 | } | ||
72 | |||
73 | pub(crate) fn finish(&mut self) { | ||
74 | if let Some(req) = self.req.take() { | ||
75 | log::error!("unknown request: {:?}", req); | ||
76 | let response = lsp_server::Response::new_err( | ||
77 | req.id, | ||
78 | lsp_server::ErrorCode::MethodNotFound as i32, | ||
79 | "unknown request".to_string(), | ||
80 | ); | ||
81 | self.global_state.respond(response) | ||
82 | } | ||
83 | } | ||
84 | |||
85 | fn parse<R>(&mut self) -> Option<(lsp_server::RequestId, R::Params)> | ||
86 | where | ||
87 | R: lsp_types::request::Request + 'static, | ||
88 | R::Params: DeserializeOwned + 'static, | ||
89 | { | ||
90 | let req = self.req.take()?; | ||
91 | let (id, params) = match req.extract::<R::Params>(R::METHOD) { | ||
92 | Ok(it) => it, | ||
93 | Err(req) => { | ||
94 | self.req = Some(req); | ||
95 | return None; | ||
96 | } | ||
97 | }; | ||
98 | Some((id, params)) | ||
99 | } | ||
100 | } | ||
101 | |||
102 | fn result_to_response<R>( | ||
103 | id: lsp_server::RequestId, | ||
104 | result: Result<R::Result>, | ||
105 | ) -> lsp_server::Response | ||
106 | where | ||
107 | R: lsp_types::request::Request + 'static, | ||
108 | R::Params: DeserializeOwned + 'static, | ||
109 | R::Result: Serialize + 'static, | ||
110 | { | ||
111 | match result { | ||
112 | Ok(resp) => lsp_server::Response::new_ok(id, &resp), | ||
113 | Err(e) => match e.downcast::<LspError>() { | ||
114 | Ok(lsp_error) => lsp_server::Response::new_err(id, lsp_error.code, lsp_error.message), | ||
115 | Err(e) => { | ||
116 | if is_canceled(&*e) { | ||
117 | lsp_server::Response::new_err( | ||
118 | id, | ||
119 | lsp_server::ErrorCode::ContentModified as i32, | ||
120 | "content modified".to_string(), | ||
121 | ) | ||
122 | } else { | ||
123 | lsp_server::Response::new_err( | ||
124 | id, | ||
125 | lsp_server::ErrorCode::InternalError as i32, | ||
126 | e.to_string(), | ||
127 | ) | ||
128 | } | ||
129 | } | ||
130 | }, | ||
131 | } | ||
132 | } | ||
133 | |||
134 | pub(crate) struct NotificationDispatcher<'a> { | ||
135 | pub(crate) not: Option<lsp_server::Notification>, | ||
136 | pub(crate) global_state: &'a mut GlobalState, | ||
137 | } | ||
138 | |||
139 | impl<'a> NotificationDispatcher<'a> { | ||
140 | pub(crate) fn on<N>( | ||
141 | &mut self, | ||
142 | f: fn(&mut GlobalState, N::Params) -> Result<()>, | ||
143 | ) -> Result<&mut Self> | ||
144 | where | ||
145 | N: lsp_types::notification::Notification + 'static, | ||
146 | N::Params: DeserializeOwned + Send + 'static, | ||
147 | { | ||
148 | let not = match self.not.take() { | ||
149 | Some(it) => it, | ||
150 | None => return Ok(self), | ||
151 | }; | ||
152 | let params = match not.extract::<N::Params>(N::METHOD) { | ||
153 | Ok(it) => it, | ||
154 | Err(not) => { | ||
155 | self.not = Some(not); | ||
156 | return Ok(self); | ||
157 | } | ||
158 | }; | ||
159 | f(self.global_state, params)?; | ||
160 | Ok(self) | ||
161 | } | ||
162 | |||
163 | pub(crate) fn finish(&mut self) { | ||
164 | if let Some(not) = &self.not { | ||
165 | if !not.method.starts_with("$/") { | ||
166 | log::error!("unhandled notification: {:?}", not); | ||
167 | } | ||
168 | } | ||
169 | } | ||
170 | } | ||
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 446207e9e..b8aa1e5b5 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs | |||
@@ -3,49 +3,29 @@ | |||
3 | //! | 3 | //! |
4 | //! Each tick provides an immutable snapshot of the state as `WorldSnapshot`. | 4 | //! Each tick provides an immutable snapshot of the state as `WorldSnapshot`. |
5 | 5 | ||
6 | use std::{convert::TryFrom, sync::Arc}; | 6 | use std::{sync::Arc, time::Instant}; |
7 | 7 | ||
8 | use crossbeam_channel::{unbounded, Receiver}; | 8 | use crossbeam_channel::{unbounded, Receiver, Sender}; |
9 | use flycheck::{FlycheckConfig, FlycheckHandle}; | 9 | use flycheck::FlycheckHandle; |
10 | use lsp_types::Url; | 10 | use lsp_types::Url; |
11 | use parking_lot::RwLock; | 11 | use parking_lot::RwLock; |
12 | use ra_db::{CrateId, SourceRoot, VfsPath}; | 12 | use ra_db::{CrateId, VfsPath}; |
13 | use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId}; | 13 | use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId}; |
14 | use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; | 14 | use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; |
15 | use stdx::format_to; | 15 | use rustc_hash::{FxHashMap, FxHashSet}; |
16 | use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf}; | ||
17 | 16 | ||
18 | use crate::{ | 17 | use crate::{ |
19 | config::{Config, FilesWatcher}, | 18 | config::Config, |
20 | diagnostics::{CheckFixes, DiagnosticCollection}, | 19 | diagnostics::{CheckFixes, DiagnosticCollection}, |
21 | from_proto, | 20 | from_proto, |
22 | line_endings::LineEndings, | 21 | line_endings::LineEndings, |
23 | main_loop::ReqQueue, | 22 | main_loop::Task, |
23 | reload::SourceRootConfig, | ||
24 | request_metrics::{LatestRequests, RequestMetrics}, | 24 | request_metrics::{LatestRequests, RequestMetrics}, |
25 | thread_pool::TaskPool, | ||
25 | to_proto::url_from_abs_path, | 26 | to_proto::url_from_abs_path, |
26 | Result, | 27 | Result, |
27 | }; | 28 | }; |
28 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
29 | |||
30 | fn create_flycheck( | ||
31 | workspaces: &[ProjectWorkspace], | ||
32 | config: &FlycheckConfig, | ||
33 | ) -> Option<(FlycheckHandle, Receiver<flycheck::Message>)> { | ||
34 | // FIXME: Figure out the multi-workspace situation | ||
35 | workspaces.iter().find_map(move |w| match w { | ||
36 | ProjectWorkspace::Cargo { cargo, .. } => { | ||
37 | let (sender, receiver) = unbounded(); | ||
38 | let sender = Box::new(move |msg| sender.send(msg).unwrap()); | ||
39 | let cargo_project_root = cargo.workspace_root().to_path_buf(); | ||
40 | let flycheck = FlycheckHandle::spawn(sender, config.clone(), cargo_project_root.into()); | ||
41 | Some((flycheck, receiver)) | ||
42 | } | ||
43 | ProjectWorkspace::Json { .. } => { | ||
44 | log::warn!("Cargo check watching only supported for cargo workspaces, disabling"); | ||
45 | None | ||
46 | } | ||
47 | }) | ||
48 | } | ||
49 | 29 | ||
50 | #[derive(Eq, PartialEq)] | 30 | #[derive(Eq, PartialEq)] |
51 | pub(crate) enum Status { | 31 | pub(crate) enum Status { |
@@ -59,26 +39,38 @@ impl Default for Status { | |||
59 | } | 39 | } |
60 | } | 40 | } |
61 | 41 | ||
42 | // Enforces drop order | ||
43 | pub(crate) struct Handle<H, C> { | ||
44 | pub(crate) handle: H, | ||
45 | pub(crate) receiver: C, | ||
46 | } | ||
47 | |||
48 | pub(crate) type ReqHandler = fn(&mut GlobalState, lsp_server::Response); | ||
49 | pub(crate) type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>; | ||
50 | |||
62 | /// `GlobalState` is the primary mutable state of the language server | 51 | /// `GlobalState` is the primary mutable state of the language server |
63 | /// | 52 | /// |
64 | /// The most interesting components are `vfs`, which stores a consistent | 53 | /// The most interesting components are `vfs`, which stores a consistent |
65 | /// snapshot of the file systems, and `analysis_host`, which stores our | 54 | /// snapshot of the file systems, and `analysis_host`, which stores our |
66 | /// incremental salsa database. | 55 | /// incremental salsa database. |
56 | /// | ||
57 | /// Note that this struct has more than on impl in various modules! | ||
67 | pub(crate) struct GlobalState { | 58 | pub(crate) struct GlobalState { |
59 | sender: Sender<lsp_server::Message>, | ||
60 | req_queue: ReqQueue, | ||
61 | pub(crate) task_pool: Handle<TaskPool<Task>, Receiver<Task>>, | ||
62 | pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>, | ||
63 | pub(crate) flycheck: Option<Handle<FlycheckHandle, Receiver<flycheck::Message>>>, | ||
68 | pub(crate) config: Config, | 64 | pub(crate) config: Config, |
69 | pub(crate) analysis_host: AnalysisHost, | 65 | pub(crate) analysis_host: AnalysisHost, |
70 | pub(crate) loader: Box<dyn vfs::loader::Handle>, | ||
71 | pub(crate) task_receiver: Receiver<vfs::loader::Message>, | ||
72 | pub(crate) flycheck: Option<(FlycheckHandle, Receiver<flycheck::Message>)>, | ||
73 | pub(crate) diagnostics: DiagnosticCollection, | 66 | pub(crate) diagnostics: DiagnosticCollection, |
74 | pub(crate) mem_docs: FxHashSet<VfsPath>, | 67 | pub(crate) mem_docs: FxHashSet<VfsPath>, |
75 | pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, | 68 | pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, |
76 | pub(crate) status: Status, | 69 | pub(crate) status: Status, |
77 | pub(crate) req_queue: ReqQueue, | 70 | pub(crate) source_root_config: SourceRootConfig, |
71 | pub(crate) proc_macro_client: ProcMacroClient, | ||
72 | pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>, | ||
78 | latest_requests: Arc<RwLock<LatestRequests>>, | 73 | latest_requests: Arc<RwLock<LatestRequests>>, |
79 | source_root_config: SourceRootConfig, | ||
80 | _proc_macro_client: ProcMacroClient, | ||
81 | workspaces: Arc<Vec<ProjectWorkspace>>, | ||
82 | } | 74 | } |
83 | 75 | ||
84 | /// An immutable snapshot of the world's state at a point in time. | 76 | /// An immutable snapshot of the world's state at a point in time. |
@@ -88,99 +80,43 @@ pub(crate) struct GlobalStateSnapshot { | |||
88 | pub(crate) check_fixes: CheckFixes, | 80 | pub(crate) check_fixes: CheckFixes, |
89 | pub(crate) latest_requests: Arc<RwLock<LatestRequests>>, | 81 | pub(crate) latest_requests: Arc<RwLock<LatestRequests>>, |
90 | vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, | 82 | vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, |
91 | workspaces: Arc<Vec<ProjectWorkspace>>, | 83 | pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>, |
92 | } | 84 | } |
93 | 85 | ||
94 | impl GlobalState { | 86 | impl GlobalState { |
95 | pub(crate) fn new( | 87 | pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> GlobalState { |
96 | workspaces: Vec<ProjectWorkspace>, | 88 | let loader = { |
97 | lru_capacity: Option<usize>, | 89 | let (sender, receiver) = unbounded::<vfs::loader::Message>(); |
98 | config: Config, | 90 | let handle: vfs_notify::NotifyHandle = |
99 | req_queue: ReqQueue, | 91 | vfs::loader::Handle::spawn(Box::new(move |msg| sender.send(msg).unwrap())); |
100 | ) -> GlobalState { | 92 | let handle = Box::new(handle) as Box<dyn vfs::loader::Handle>; |
101 | let mut change = AnalysisChange::new(); | 93 | Handle { handle, receiver } |
102 | |||
103 | let project_folders = ProjectFolders::new(&workspaces); | ||
104 | |||
105 | let (task_sender, task_receiver) = unbounded::<vfs::loader::Message>(); | ||
106 | let mut vfs = vfs::Vfs::default(); | ||
107 | |||
108 | let proc_macro_client = match &config.proc_macro_srv { | ||
109 | None => ProcMacroClient::dummy(), | ||
110 | Some((path, args)) => match ProcMacroClient::extern_process(path.into(), args) { | ||
111 | Ok(it) => it, | ||
112 | Err(err) => { | ||
113 | log::error!( | ||
114 | "Failed to run ra_proc_macro_srv from path {}, error: {:?}", | ||
115 | path.display(), | ||
116 | err | ||
117 | ); | ||
118 | ProcMacroClient::dummy() | ||
119 | } | ||
120 | }, | ||
121 | }; | 94 | }; |
122 | 95 | ||
123 | let mut loader = { | 96 | let task_pool = { |
124 | let loader = vfs_notify::NotifyHandle::spawn(Box::new(move |msg| { | 97 | let (sender, receiver) = unbounded(); |
125 | task_sender.send(msg).unwrap() | 98 | let handle = TaskPool::new(sender); |
126 | })); | 99 | Handle { handle, receiver } |
127 | Box::new(loader) | ||
128 | }; | ||
129 | let watch = match config.files.watcher { | ||
130 | FilesWatcher::Client => vec![], | ||
131 | FilesWatcher::Notify => project_folders.watch, | ||
132 | }; | ||
133 | loader.set_config(vfs::loader::Config { load: project_folders.load, watch }); | ||
134 | |||
135 | // Create crate graph from all the workspaces | ||
136 | let mut crate_graph = CrateGraph::default(); | ||
137 | let mut load = |path: &AbsPath| { | ||
138 | let contents = loader.load_sync(path); | ||
139 | let path = vfs::VfsPath::from(path.to_path_buf()); | ||
140 | vfs.set_file_contents(path.clone(), contents); | ||
141 | vfs.file_id(&path) | ||
142 | }; | 100 | }; |
143 | for ws in workspaces.iter() { | ||
144 | crate_graph.extend(ws.to_crate_graph( | ||
145 | config.cargo.target.as_deref(), | ||
146 | &proc_macro_client, | ||
147 | &mut load, | ||
148 | )); | ||
149 | } | ||
150 | change.set_crate_graph(crate_graph); | ||
151 | 101 | ||
152 | let flycheck = config.check.as_ref().and_then(|c| create_flycheck(&workspaces, c)); | 102 | let analysis_host = AnalysisHost::new(config.lru_capacity); |
153 | 103 | GlobalState { | |
154 | let mut analysis_host = AnalysisHost::new(lru_capacity); | 104 | sender, |
155 | analysis_host.apply_change(change); | 105 | req_queue: ReqQueue::default(), |
156 | let mut res = GlobalState { | 106 | task_pool, |
107 | loader, | ||
108 | flycheck: None, | ||
157 | config, | 109 | config, |
158 | analysis_host, | 110 | analysis_host, |
159 | loader, | ||
160 | task_receiver, | ||
161 | flycheck, | ||
162 | diagnostics: Default::default(), | 111 | diagnostics: Default::default(), |
163 | mem_docs: FxHashSet::default(), | 112 | mem_docs: FxHashSet::default(), |
164 | vfs: Arc::new(RwLock::new((vfs, FxHashMap::default()))), | 113 | vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))), |
165 | status: Status::default(), | 114 | status: Status::default(), |
166 | req_queue, | 115 | source_root_config: SourceRootConfig::default(), |
116 | proc_macro_client: ProcMacroClient::dummy(), | ||
117 | workspaces: Arc::new(Vec::new()), | ||
167 | latest_requests: Default::default(), | 118 | latest_requests: Default::default(), |
168 | source_root_config: project_folders.source_root_config, | ||
169 | _proc_macro_client: proc_macro_client, | ||
170 | workspaces: Arc::new(workspaces), | ||
171 | }; | ||
172 | res.process_changes(); | ||
173 | res | ||
174 | } | ||
175 | |||
176 | pub(crate) fn update_configuration(&mut self, config: Config) { | ||
177 | self.analysis_host.update_lru_capacity(config.lru_capacity); | ||
178 | if config.check != self.config.check { | ||
179 | self.flycheck = | ||
180 | config.check.as_ref().and_then(|it| create_flycheck(&self.workspaces, it)); | ||
181 | } | 119 | } |
182 | |||
183 | self.config = config; | ||
184 | } | 120 | } |
185 | 121 | ||
186 | pub(crate) fn process_changes(&mut self) -> bool { | 122 | pub(crate) fn process_changes(&mut self) -> bool { |
@@ -232,26 +168,66 @@ impl GlobalState { | |||
232 | } | 168 | } |
233 | } | 169 | } |
234 | 170 | ||
235 | pub(crate) fn maybe_collect_garbage(&mut self) { | 171 | pub(crate) fn send_request<R: lsp_types::request::Request>( |
236 | self.analysis_host.maybe_collect_garbage() | 172 | &mut self, |
173 | params: R::Params, | ||
174 | handler: ReqHandler, | ||
175 | ) { | ||
176 | let request = self.req_queue.outgoing.register(R::METHOD.to_string(), params, handler); | ||
177 | self.send(request.into()); | ||
178 | } | ||
179 | pub(crate) fn complete_request(&mut self, response: lsp_server::Response) { | ||
180 | let handler = self.req_queue.outgoing.complete(response.id.clone()); | ||
181 | handler(self, response) | ||
182 | } | ||
183 | |||
184 | pub(crate) fn send_notification<N: lsp_types::notification::Notification>( | ||
185 | &mut self, | ||
186 | params: N::Params, | ||
187 | ) { | ||
188 | let not = lsp_server::Notification::new(N::METHOD.to_string(), params); | ||
189 | self.send(not.into()); | ||
190 | } | ||
191 | |||
192 | pub(crate) fn register_request( | ||
193 | &mut self, | ||
194 | request: &lsp_server::Request, | ||
195 | request_received: Instant, | ||
196 | ) { | ||
197 | self.req_queue | ||
198 | .incoming | ||
199 | .register(request.id.clone(), (request.method.clone(), request_received)); | ||
200 | } | ||
201 | pub(crate) fn respond(&mut self, response: lsp_server::Response) { | ||
202 | if let Some((method, start)) = self.req_queue.incoming.complete(response.id.clone()) { | ||
203 | let duration = start.elapsed(); | ||
204 | log::info!("handled req#{} in {:?}", response.id, duration); | ||
205 | let metrics = | ||
206 | RequestMetrics { id: response.id.clone(), method: method.to_string(), duration }; | ||
207 | self.latest_requests.write().record(metrics); | ||
208 | self.send(response.into()); | ||
209 | } | ||
210 | } | ||
211 | pub(crate) fn cancel(&mut self, request_id: lsp_server::RequestId) { | ||
212 | if let Some(response) = self.req_queue.incoming.cancel(request_id) { | ||
213 | self.send(response.into()); | ||
214 | } | ||
237 | } | 215 | } |
238 | 216 | ||
239 | pub(crate) fn collect_garbage(&mut self) { | 217 | fn send(&mut self, message: lsp_server::Message) { |
240 | self.analysis_host.collect_garbage() | 218 | self.sender.send(message).unwrap() |
241 | } | 219 | } |
220 | } | ||
242 | 221 | ||
243 | pub(crate) fn complete_request(&mut self, request: RequestMetrics) { | 222 | impl Drop for GlobalState { |
244 | self.latest_requests.write().record(request) | 223 | fn drop(&mut self) { |
224 | self.analysis_host.request_cancellation() | ||
245 | } | 225 | } |
246 | } | 226 | } |
247 | 227 | ||
248 | impl GlobalStateSnapshot { | 228 | impl GlobalStateSnapshot { |
249 | pub(crate) fn url_to_file_id(&self, url: &Url) -> Result<FileId> { | 229 | pub(crate) fn url_to_file_id(&self, url: &Url) -> Result<FileId> { |
250 | let path = from_proto::abs_path(url)?; | 230 | url_to_file_id(&self.vfs.read().0, url) |
251 | let path = path.into(); | ||
252 | let res = | ||
253 | self.vfs.read().0.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?; | ||
254 | Ok(res) | ||
255 | } | 231 | } |
256 | 232 | ||
257 | pub(crate) fn file_id_to_url(&self, id: FileId) -> Url { | 233 | pub(crate) fn file_id_to_url(&self, id: FileId) -> Url { |
@@ -265,7 +241,7 @@ impl GlobalStateSnapshot { | |||
265 | pub(crate) fn anchored_path(&self, file_id: FileId, path: &str) -> Url { | 241 | pub(crate) fn anchored_path(&self, file_id: FileId, path: &str) -> Url { |
266 | let mut base = self.vfs.read().0.file_path(file_id); | 242 | let mut base = self.vfs.read().0.file_path(file_id); |
267 | base.pop(); | 243 | base.pop(); |
268 | let path = base.join(path); | 244 | let path = base.join(path).unwrap(); |
269 | let path = path.as_path().unwrap(); | 245 | let path = path.as_path().unwrap(); |
270 | url_from_abs_path(&path) | 246 | url_from_abs_path(&path) |
271 | } | 247 | } |
@@ -284,26 +260,6 @@ impl GlobalStateSnapshot { | |||
284 | ProjectWorkspace::Json { .. } => None, | 260 | ProjectWorkspace::Json { .. } => None, |
285 | }) | 261 | }) |
286 | } | 262 | } |
287 | |||
288 | pub(crate) fn status(&self) -> String { | ||
289 | let mut buf = String::new(); | ||
290 | if self.workspaces.is_empty() { | ||
291 | buf.push_str("no workspaces\n") | ||
292 | } else { | ||
293 | buf.push_str("workspaces:\n"); | ||
294 | for w in self.workspaces.iter() { | ||
295 | format_to!(buf, "{} packages loaded\n", w.n_packages()); | ||
296 | } | ||
297 | } | ||
298 | buf.push_str("\nanalysis:\n"); | ||
299 | buf.push_str( | ||
300 | &self | ||
301 | .analysis | ||
302 | .status() | ||
303 | .unwrap_or_else(|_| "Analysis retrieval was cancelled".to_owned()), | ||
304 | ); | ||
305 | buf | ||
306 | } | ||
307 | } | 263 | } |
308 | 264 | ||
309 | pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { | 265 | pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { |
@@ -312,77 +268,8 @@ pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { | |||
312 | url_from_abs_path(&path) | 268 | url_from_abs_path(&path) |
313 | } | 269 | } |
314 | 270 | ||
315 | #[derive(Default)] | 271 | pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> { |
316 | pub(crate) struct ProjectFolders { | 272 | let path = from_proto::vfs_path(url)?; |
317 | pub(crate) load: Vec<vfs::loader::Entry>, | 273 | let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?; |
318 | pub(crate) watch: Vec<usize>, | 274 | Ok(res) |
319 | pub(crate) source_root_config: SourceRootConfig, | ||
320 | } | ||
321 | |||
322 | impl ProjectFolders { | ||
323 | pub(crate) fn new(workspaces: &[ProjectWorkspace]) -> ProjectFolders { | ||
324 | let mut res = ProjectFolders::default(); | ||
325 | let mut fsc = FileSetConfig::builder(); | ||
326 | let mut local_filesets = vec![]; | ||
327 | |||
328 | for root in workspaces.iter().flat_map(|it| it.to_roots()) { | ||
329 | let path = root.path().to_owned(); | ||
330 | |||
331 | let mut file_set_roots: Vec<VfsPath> = vec![]; | ||
332 | |||
333 | let entry = if root.is_member() { | ||
334 | vfs::loader::Entry::local_cargo_package(path.to_path_buf()) | ||
335 | } else { | ||
336 | vfs::loader::Entry::cargo_package_dependency(path.to_path_buf()) | ||
337 | }; | ||
338 | res.load.push(entry); | ||
339 | if root.is_member() { | ||
340 | res.watch.push(res.load.len() - 1); | ||
341 | } | ||
342 | |||
343 | if let Some(out_dir) = root.out_dir() { | ||
344 | let out_dir = AbsPathBuf::try_from(out_dir.to_path_buf()).unwrap(); | ||
345 | res.load.push(vfs::loader::Entry::rs_files_recursively(out_dir.clone())); | ||
346 | if root.is_member() { | ||
347 | res.watch.push(res.load.len() - 1); | ||
348 | } | ||
349 | file_set_roots.push(out_dir.into()); | ||
350 | } | ||
351 | file_set_roots.push(path.to_path_buf().into()); | ||
352 | |||
353 | if root.is_member() { | ||
354 | local_filesets.push(fsc.len()); | ||
355 | } | ||
356 | fsc.add_file_set(file_set_roots) | ||
357 | } | ||
358 | |||
359 | let fsc = fsc.build(); | ||
360 | res.source_root_config = SourceRootConfig { fsc, local_filesets }; | ||
361 | |||
362 | res | ||
363 | } | ||
364 | } | ||
365 | |||
366 | #[derive(Default, Debug)] | ||
367 | pub(crate) struct SourceRootConfig { | ||
368 | pub(crate) fsc: FileSetConfig, | ||
369 | pub(crate) local_filesets: Vec<usize>, | ||
370 | } | ||
371 | |||
372 | impl SourceRootConfig { | ||
373 | pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> { | ||
374 | self.fsc | ||
375 | .partition(vfs) | ||
376 | .into_iter() | ||
377 | .enumerate() | ||
378 | .map(|(idx, file_set)| { | ||
379 | let is_local = self.local_filesets.contains(&idx); | ||
380 | if is_local { | ||
381 | SourceRoot::new_local(file_set) | ||
382 | } else { | ||
383 | SourceRoot::new_library(file_set) | ||
384 | } | ||
385 | }) | ||
386 | .collect() | ||
387 | } | ||
388 | } | 275 | } |
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index b2ff9a157..38e3c3324 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs | |||
@@ -31,7 +31,6 @@ use stdx::{format_to, split_delim}; | |||
31 | use crate::{ | 31 | use crate::{ |
32 | cargo_target_spec::CargoTargetSpec, | 32 | cargo_target_spec::CargoTargetSpec, |
33 | config::RustfmtConfig, | 33 | config::RustfmtConfig, |
34 | diagnostics::DiagnosticTask, | ||
35 | from_json, from_proto, | 34 | from_json, from_proto, |
36 | global_state::GlobalStateSnapshot, | 35 | global_state::GlobalStateSnapshot, |
37 | lsp_ext::{self, InlayHint, InlayHintsParams}, | 36 | lsp_ext::{self, InlayHint, InlayHintsParams}, |
@@ -40,7 +39,20 @@ use crate::{ | |||
40 | 39 | ||
41 | pub(crate) fn handle_analyzer_status(snap: GlobalStateSnapshot, _: ()) -> Result<String> { | 40 | pub(crate) fn handle_analyzer_status(snap: GlobalStateSnapshot, _: ()) -> Result<String> { |
42 | let _p = profile("handle_analyzer_status"); | 41 | let _p = profile("handle_analyzer_status"); |
43 | let mut buf = snap.status(); | 42 | |
43 | let mut buf = String::new(); | ||
44 | if snap.workspaces.is_empty() { | ||
45 | buf.push_str("no workspaces\n") | ||
46 | } else { | ||
47 | buf.push_str("workspaces:\n"); | ||
48 | for w in snap.workspaces.iter() { | ||
49 | format_to!(buf, "{} packages loaded\n", w.n_packages()); | ||
50 | } | ||
51 | } | ||
52 | buf.push_str("\nanalysis:\n"); | ||
53 | buf.push_str( | ||
54 | &snap.analysis.status().unwrap_or_else(|_| "Analysis retrieval was cancelled".to_owned()), | ||
55 | ); | ||
44 | format_to!(buf, "\n\nrequests:\n"); | 56 | format_to!(buf, "\n\nrequests:\n"); |
45 | let requests = snap.latest_requests.read(); | 57 | let requests = snap.latest_requests.read(); |
46 | for (is_last, r) in requests.iter() { | 58 | for (is_last, r) in requests.iter() { |
@@ -638,7 +650,7 @@ pub(crate) fn handle_formatting( | |||
638 | 650 | ||
639 | let mut rustfmt = match &snap.config.rustfmt { | 651 | let mut rustfmt = match &snap.config.rustfmt { |
640 | RustfmtConfig::Rustfmt { extra_args } => { | 652 | RustfmtConfig::Rustfmt { extra_args } => { |
641 | let mut cmd = process::Command::new("rustfmt"); | 653 | let mut cmd = process::Command::new(ra_toolchain::rustfmt()); |
642 | cmd.args(extra_args); | 654 | cmd.args(extra_args); |
643 | if let Some(&crate_id) = crate_ids.first() { | 655 | if let Some(&crate_id) = crate_ids.first() { |
644 | // Assume all crates are in the same edition | 656 | // Assume all crates are in the same edition |
@@ -950,7 +962,7 @@ pub(crate) fn handle_ssr( | |||
950 | pub(crate) fn publish_diagnostics( | 962 | pub(crate) fn publish_diagnostics( |
951 | snap: &GlobalStateSnapshot, | 963 | snap: &GlobalStateSnapshot, |
952 | file_id: FileId, | 964 | file_id: FileId, |
953 | ) -> Result<DiagnosticTask> { | 965 | ) -> Result<Vec<Diagnostic>> { |
954 | let _p = profile("publish_diagnostics"); | 966 | let _p = profile("publish_diagnostics"); |
955 | let line_index = snap.analysis.file_line_index(file_id)?; | 967 | let line_index = snap.analysis.file_line_index(file_id)?; |
956 | let diagnostics: Vec<Diagnostic> = snap | 968 | let diagnostics: Vec<Diagnostic> = snap |
@@ -967,7 +979,7 @@ pub(crate) fn publish_diagnostics( | |||
967 | tags: None, | 979 | tags: None, |
968 | }) | 980 | }) |
969 | .collect(); | 981 | .collect(); |
970 | Ok(DiagnosticTask::SetNative(file_id, diagnostics)) | 982 | Ok(diagnostics) |
971 | } | 983 | } |
972 | 984 | ||
973 | pub(crate) fn handle_inlay_hints( | 985 | pub(crate) fn handle_inlay_hints( |
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index 794286672..407944d85 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs | |||
@@ -18,7 +18,9 @@ macro_rules! eprintln { | |||
18 | } | 18 | } |
19 | 19 | ||
20 | mod global_state; | 20 | mod global_state; |
21 | mod reload; | ||
21 | mod main_loop; | 22 | mod main_loop; |
23 | mod dispatch; | ||
22 | mod handlers; | 24 | mod handlers; |
23 | mod caps; | 25 | mod caps; |
24 | mod cargo_target_spec; | 26 | mod cargo_target_spec; |
@@ -30,13 +32,14 @@ mod diagnostics; | |||
30 | mod line_endings; | 32 | mod line_endings; |
31 | mod request_metrics; | 33 | mod request_metrics; |
32 | mod lsp_utils; | 34 | mod lsp_utils; |
35 | mod thread_pool; | ||
33 | pub mod lsp_ext; | 36 | pub mod lsp_ext; |
34 | pub mod config; | 37 | pub mod config; |
35 | 38 | ||
36 | use serde::de::DeserializeOwned; | 39 | use serde::de::DeserializeOwned; |
37 | 40 | ||
38 | pub type Result<T, E = Box<dyn std::error::Error + Send + Sync>> = std::result::Result<T, E>; | 41 | pub type Result<T, E = Box<dyn std::error::Error + Send + Sync>> = std::result::Result<T, E>; |
39 | pub use crate::{caps::server_capabilities, lsp_utils::show_message, main_loop::main_loop}; | 42 | pub use crate::{caps::server_capabilities, main_loop::main_loop}; |
40 | use std::fmt; | 43 | use std::fmt; |
41 | 44 | ||
42 | pub fn from_json<T: DeserializeOwned>(what: &'static str, json: serde_json::Value) -> Result<T> { | 45 | pub fn from_json<T: DeserializeOwned>(what: &'static str, json: serde_json::Value) -> Result<T> { |
diff --git a/crates/rust-analyzer/src/lsp_utils.rs b/crates/rust-analyzer/src/lsp_utils.rs index 14adb8ae7..0bc3ff115 100644 --- a/crates/rust-analyzer/src/lsp_utils.rs +++ b/crates/rust-analyzer/src/lsp_utils.rs | |||
@@ -1,23 +1,11 @@ | |||
1 | //! Utilities for LSP-related boilerplate code. | 1 | //! Utilities for LSP-related boilerplate code. |
2 | use std::{error::Error, ops::Range}; | 2 | use std::{error::Error, ops::Range}; |
3 | 3 | ||
4 | use crate::from_proto; | 4 | use lsp_server::Notification; |
5 | use crossbeam_channel::Sender; | ||
6 | use lsp_server::{Message, Notification}; | ||
7 | use ra_db::Canceled; | 5 | use ra_db::Canceled; |
8 | use ra_ide::LineIndex; | 6 | use ra_ide::LineIndex; |
9 | use serde::{de::DeserializeOwned, Serialize}; | ||
10 | 7 | ||
11 | pub fn show_message( | 8 | use crate::{from_proto, global_state::GlobalState}; |
12 | typ: lsp_types::MessageType, | ||
13 | message: impl Into<String>, | ||
14 | sender: &Sender<Message>, | ||
15 | ) { | ||
16 | let message = message.into(); | ||
17 | let params = lsp_types::ShowMessageParams { typ, message }; | ||
18 | let not = notification_new::<lsp_types::notification::ShowMessage>(params); | ||
19 | sender.send(not.into()).unwrap(); | ||
20 | } | ||
21 | 9 | ||
22 | pub(crate) fn is_canceled(e: &(dyn Error + 'static)) -> bool { | 10 | pub(crate) fn is_canceled(e: &(dyn Error + 'static)) -> bool { |
23 | e.downcast_ref::<Canceled>().is_some() | 11 | e.downcast_ref::<Canceled>().is_some() |
@@ -29,20 +17,68 @@ pub(crate) fn notification_is<N: lsp_types::notification::Notification>( | |||
29 | notification.method == N::METHOD | 17 | notification.method == N::METHOD |
30 | } | 18 | } |
31 | 19 | ||
32 | pub(crate) fn notification_cast<N>(notification: Notification) -> Result<N::Params, Notification> | 20 | #[derive(Debug, Eq, PartialEq)] |
33 | where | 21 | pub(crate) enum Progress { |
34 | N: lsp_types::notification::Notification, | 22 | Begin, |
35 | N::Params: DeserializeOwned, | 23 | Report, |
36 | { | 24 | End, |
37 | notification.extract(N::METHOD) | 25 | } |
26 | |||
27 | impl Progress { | ||
28 | pub(crate) fn percentage(done: usize, total: usize) -> f64 { | ||
29 | (done as f64 / total.max(1) as f64) * 100.0 | ||
30 | } | ||
38 | } | 31 | } |
39 | 32 | ||
40 | pub(crate) fn notification_new<N>(params: N::Params) -> Notification | 33 | impl GlobalState { |
41 | where | 34 | pub(crate) fn show_message(&mut self, typ: lsp_types::MessageType, message: String) { |
42 | N: lsp_types::notification::Notification, | 35 | let message = message.into(); |
43 | N::Params: Serialize, | 36 | self.send_notification::<lsp_types::notification::ShowMessage>( |
44 | { | 37 | lsp_types::ShowMessageParams { typ, message }, |
45 | Notification::new(N::METHOD.to_string(), params) | 38 | ) |
39 | } | ||
40 | |||
41 | pub(crate) fn report_progress( | ||
42 | &mut self, | ||
43 | title: &str, | ||
44 | state: Progress, | ||
45 | message: Option<String>, | ||
46 | percentage: Option<f64>, | ||
47 | ) { | ||
48 | if !self.config.client_caps.work_done_progress { | ||
49 | return; | ||
50 | } | ||
51 | let token = lsp_types::ProgressToken::String(format!("rustAnalyzer/{}", title)); | ||
52 | let work_done_progress = match state { | ||
53 | Progress::Begin => { | ||
54 | self.send_request::<lsp_types::request::WorkDoneProgressCreate>( | ||
55 | lsp_types::WorkDoneProgressCreateParams { token: token.clone() }, | ||
56 | |_, _| (), | ||
57 | ); | ||
58 | |||
59 | lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin { | ||
60 | title: title.into(), | ||
61 | cancellable: None, | ||
62 | message, | ||
63 | percentage, | ||
64 | }) | ||
65 | } | ||
66 | Progress::Report => { | ||
67 | lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport { | ||
68 | cancellable: None, | ||
69 | message, | ||
70 | percentage, | ||
71 | }) | ||
72 | } | ||
73 | Progress::End => { | ||
74 | lsp_types::WorkDoneProgress::End(lsp_types::WorkDoneProgressEnd { message }) | ||
75 | } | ||
76 | }; | ||
77 | self.send_notification::<lsp_types::notification::Progress>(lsp_types::ProgressParams { | ||
78 | token, | ||
79 | value: lsp_types::ProgressParamsValue::WorkDone(work_done_progress), | ||
80 | }); | ||
81 | } | ||
46 | } | 82 | } |
47 | 83 | ||
48 | pub(crate) fn apply_document_changes( | 84 | pub(crate) fn apply_document_changes( |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index a7a7d2eb7..e5194fe41 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -2,32 +2,24 @@ | |||
2 | //! requests/replies and notifications back to the client. | 2 | //! requests/replies and notifications back to the client. |
3 | use std::{ | 3 | use std::{ |
4 | env, fmt, panic, | 4 | env, fmt, panic, |
5 | sync::Arc, | ||
6 | time::{Duration, Instant}, | 5 | time::{Duration, Instant}, |
7 | }; | 6 | }; |
8 | 7 | ||
9 | use crossbeam_channel::{never, select, unbounded, RecvError, Sender}; | 8 | use crossbeam_channel::{never, select, Receiver}; |
10 | use lsp_server::{Connection, ErrorCode, Notification, Request, RequestId, Response}; | 9 | use lsp_server::{Connection, Notification, Request, Response}; |
11 | use lsp_types::{request::Request as _, NumberOrString}; | 10 | use lsp_types::notification::Notification as _; |
12 | use ra_db::VfsPath; | 11 | use ra_db::VfsPath; |
13 | use ra_ide::{Canceled, FileId}; | 12 | use ra_ide::{Canceled, FileId}; |
14 | use ra_prof::profile; | 13 | use ra_prof::profile; |
15 | use ra_project_model::{PackageRoot, ProjectWorkspace}; | ||
16 | use serde::{de::DeserializeOwned, Serialize}; | ||
17 | use threadpool::ThreadPool; | ||
18 | 14 | ||
19 | use crate::{ | 15 | use crate::{ |
20 | config::{Config, FilesWatcher, LinkedProject}, | 16 | config::Config, |
21 | diagnostics::DiagnosticTask, | 17 | dispatch::{NotificationDispatcher, RequestDispatcher}, |
22 | from_proto, | 18 | from_proto, |
23 | global_state::{file_id_to_url, GlobalState, GlobalStateSnapshot, Status}, | 19 | global_state::{file_id_to_url, url_to_file_id, GlobalState, Status}, |
24 | handlers, lsp_ext, | 20 | handlers, lsp_ext, |
25 | lsp_utils::{ | 21 | lsp_utils::{apply_document_changes, is_canceled, notification_is, Progress}, |
26 | apply_document_changes, is_canceled, notification_cast, notification_is, notification_new, | 22 | Result, |
27 | show_message, | ||
28 | }, | ||
29 | request_metrics::RequestMetrics, | ||
30 | LspError, Result, | ||
31 | }; | 23 | }; |
32 | 24 | ||
33 | pub fn main_loop(config: Config, connection: Connection) -> Result<()> { | 25 | pub fn main_loop(config: Config, connection: Connection) -> Result<()> { |
@@ -52,124 +44,7 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> { | |||
52 | SetThreadPriority(thread, thread_priority_above_normal); | 44 | SetThreadPriority(thread, thread_priority_above_normal); |
53 | } | 45 | } |
54 | 46 | ||
55 | let mut global_state = { | 47 | GlobalState::new(connection.sender.clone(), config).run(connection.receiver) |
56 | let workspaces = { | ||
57 | if config.linked_projects.is_empty() && config.notifications.cargo_toml_not_found { | ||
58 | show_message( | ||
59 | lsp_types::MessageType::Error, | ||
60 | "rust-analyzer failed to discover workspace".to_string(), | ||
61 | &connection.sender, | ||
62 | ); | ||
63 | }; | ||
64 | |||
65 | config | ||
66 | .linked_projects | ||
67 | .iter() | ||
68 | .filter_map(|project| match project { | ||
69 | LinkedProject::ProjectManifest(manifest) => { | ||
70 | ra_project_model::ProjectWorkspace::load( | ||
71 | manifest.clone(), | ||
72 | &config.cargo, | ||
73 | config.with_sysroot, | ||
74 | ) | ||
75 | .map_err(|err| { | ||
76 | log::error!("failed to load workspace: {:#}", err); | ||
77 | show_message( | ||
78 | lsp_types::MessageType::Error, | ||
79 | format!("rust-analyzer failed to load workspace: {:#}", err), | ||
80 | &connection.sender, | ||
81 | ); | ||
82 | }) | ||
83 | .ok() | ||
84 | } | ||
85 | LinkedProject::InlineJsonProject(it) => { | ||
86 | Some(ra_project_model::ProjectWorkspace::Json { project: it.clone() }) | ||
87 | } | ||
88 | }) | ||
89 | .collect::<Vec<_>>() | ||
90 | }; | ||
91 | |||
92 | let mut req_queue = ReqQueue::default(); | ||
93 | |||
94 | if let FilesWatcher::Client = config.files.watcher { | ||
95 | let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions { | ||
96 | watchers: workspaces | ||
97 | .iter() | ||
98 | .flat_map(ProjectWorkspace::to_roots) | ||
99 | .filter(PackageRoot::is_member) | ||
100 | .map(|root| format!("{}/**/*.rs", root.path().display())) | ||
101 | .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None }) | ||
102 | .collect(), | ||
103 | }; | ||
104 | let registration = lsp_types::Registration { | ||
105 | id: "file-watcher".to_string(), | ||
106 | method: "workspace/didChangeWatchedFiles".to_string(), | ||
107 | register_options: Some(serde_json::to_value(registration_options).unwrap()), | ||
108 | }; | ||
109 | let params = lsp_types::RegistrationParams { registrations: vec![registration] }; | ||
110 | let request = req_queue.outgoing.register( | ||
111 | lsp_types::request::RegisterCapability::METHOD.to_string(), | ||
112 | params, | ||
113 | DO_NOTHING, | ||
114 | ); | ||
115 | connection.sender.send(request.into()).unwrap(); | ||
116 | } | ||
117 | |||
118 | GlobalState::new(workspaces, config.lru_capacity, config, req_queue) | ||
119 | }; | ||
120 | |||
121 | let pool = ThreadPool::default(); | ||
122 | let (task_sender, task_receiver) = unbounded::<Task>(); | ||
123 | |||
124 | log::info!("server initialized, serving requests"); | ||
125 | { | ||
126 | let task_sender = task_sender; | ||
127 | loop { | ||
128 | log::trace!("selecting"); | ||
129 | let event = select! { | ||
130 | recv(&connection.receiver) -> msg => match msg { | ||
131 | Ok(msg) => Event::Lsp(msg), | ||
132 | Err(RecvError) => return Err("client exited without shutdown".into()), | ||
133 | }, | ||
134 | recv(task_receiver) -> task => Event::Task(task.unwrap()), | ||
135 | recv(global_state.task_receiver) -> task => match task { | ||
136 | Ok(task) => Event::Vfs(task), | ||
137 | Err(RecvError) => return Err("vfs died".into()), | ||
138 | }, | ||
139 | recv(global_state.flycheck.as_ref().map_or(&never(), |it| &it.1)) -> task => match task { | ||
140 | Ok(task) => Event::Flycheck(task), | ||
141 | Err(RecvError) => return Err("check watcher died".into()), | ||
142 | }, | ||
143 | }; | ||
144 | if let Event::Lsp(lsp_server::Message::Request(req)) = &event { | ||
145 | if connection.handle_shutdown(&req)? { | ||
146 | break; | ||
147 | }; | ||
148 | } | ||
149 | assert!(!global_state.vfs.read().0.has_changes()); | ||
150 | loop_turn(&pool, &task_sender, &connection, &mut global_state, event)?; | ||
151 | assert!(!global_state.vfs.read().0.has_changes()); | ||
152 | } | ||
153 | } | ||
154 | global_state.analysis_host.request_cancellation(); | ||
155 | log::info!("waiting for tasks to finish..."); | ||
156 | task_receiver.into_iter().for_each(|task| on_task(task, &connection.sender, &mut global_state)); | ||
157 | log::info!("...tasks have finished"); | ||
158 | log::info!("joining threadpool..."); | ||
159 | pool.join(); | ||
160 | drop(pool); | ||
161 | log::info!("...threadpool has finished"); | ||
162 | |||
163 | let vfs = Arc::try_unwrap(global_state.vfs).expect("all snapshots should be dead"); | ||
164 | drop(vfs); | ||
165 | |||
166 | Ok(()) | ||
167 | } | ||
168 | |||
169 | #[derive(Debug)] | ||
170 | enum Task { | ||
171 | Respond(Response), | ||
172 | Diagnostic(DiagnosticTask), | ||
173 | } | 48 | } |
174 | 49 | ||
175 | enum Event { | 50 | enum Event { |
@@ -179,6 +54,13 @@ enum Event { | |||
179 | Flycheck(flycheck::Message), | 54 | Flycheck(flycheck::Message), |
180 | } | 55 | } |
181 | 56 | ||
57 | #[derive(Debug)] | ||
58 | pub(crate) enum Task { | ||
59 | Response(Response), | ||
60 | Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>), | ||
61 | Unit, | ||
62 | } | ||
63 | |||
182 | impl fmt::Debug for Event { | 64 | impl fmt::Debug for Event { |
183 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | 65 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
184 | let debug_verbose_not = |not: &Notification, f: &mut fmt::Formatter| { | 66 | let debug_verbose_not = |not: &Notification, f: &mut fmt::Formatter| { |
@@ -193,7 +75,7 @@ impl fmt::Debug for Event { | |||
193 | return debug_verbose_not(not, f); | 75 | return debug_verbose_not(not, f); |
194 | } | 76 | } |
195 | } | 77 | } |
196 | Event::Task(Task::Respond(resp)) => { | 78 | Event::Task(Task::Response(resp)) => { |
197 | return f | 79 | return f |
198 | .debug_struct("Response") | 80 | .debug_struct("Response") |
199 | .field("id", &resp.id) | 81 | .field("id", &resp.id) |
@@ -211,616 +93,369 @@ impl fmt::Debug for Event { | |||
211 | } | 93 | } |
212 | } | 94 | } |
213 | 95 | ||
214 | pub(crate) type ReqHandler = fn(&mut GlobalState, Response); | 96 | impl GlobalState { |
215 | pub(crate) type ReqQueue = lsp_server::ReqQueue<(&'static str, Instant), ReqHandler>; | 97 | fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> { |
216 | const DO_NOTHING: ReqHandler = |_, _| (); | 98 | select! { |
217 | 99 | recv(inbox) -> msg => | |
218 | fn loop_turn( | 100 | msg.ok().map(Event::Lsp), |
219 | pool: &ThreadPool, | ||
220 | task_sender: &Sender<Task>, | ||
221 | connection: &Connection, | ||
222 | global_state: &mut GlobalState, | ||
223 | event: Event, | ||
224 | ) -> Result<()> { | ||
225 | let loop_start = Instant::now(); | ||
226 | |||
227 | // NOTE: don't count blocking select! call as a loop-turn time | ||
228 | let _p = profile("main_loop_inner/loop-turn"); | ||
229 | log::info!("loop turn = {:?}", event); | ||
230 | let queue_count = pool.queued_count(); | ||
231 | if queue_count > 0 { | ||
232 | log::info!("queued count = {}", queue_count); | ||
233 | } | ||
234 | |||
235 | let mut became_ready = false; | ||
236 | match event { | ||
237 | Event::Task(task) => { | ||
238 | on_task(task, &connection.sender, global_state); | ||
239 | global_state.maybe_collect_garbage(); | ||
240 | } | ||
241 | Event::Vfs(task) => match task { | ||
242 | vfs::loader::Message::Loaded { files } => { | ||
243 | let vfs = &mut global_state.vfs.write().0; | ||
244 | for (path, contents) in files { | ||
245 | let path = VfsPath::from(path); | ||
246 | if !global_state.mem_docs.contains(&path) { | ||
247 | vfs.set_file_contents(path, contents) | ||
248 | } | ||
249 | } | ||
250 | } | ||
251 | vfs::loader::Message::Progress { n_total, n_done } => { | ||
252 | let state = if n_done == 0 { | ||
253 | Progress::Begin | ||
254 | } else if n_done < n_total { | ||
255 | Progress::Report | ||
256 | } else { | ||
257 | assert_eq!(n_done, n_total); | ||
258 | global_state.status = Status::Ready; | ||
259 | became_ready = true; | ||
260 | Progress::End | ||
261 | }; | ||
262 | report_progress( | ||
263 | global_state, | ||
264 | &connection.sender, | ||
265 | "roots scanned", | ||
266 | state, | ||
267 | Some(format!("{}/{}", n_done, n_total)), | ||
268 | Some(percentage(n_done, n_total)), | ||
269 | ) | ||
270 | } | ||
271 | }, | ||
272 | Event::Flycheck(task) => { | ||
273 | on_check_task(task, global_state, task_sender, &connection.sender)? | ||
274 | } | ||
275 | Event::Lsp(msg) => match msg { | ||
276 | lsp_server::Message::Request(req) => { | ||
277 | on_request(global_state, pool, task_sender, &connection.sender, loop_start, req)? | ||
278 | } | ||
279 | lsp_server::Message::Notification(not) => { | ||
280 | on_notification(&connection.sender, global_state, not)?; | ||
281 | } | ||
282 | lsp_server::Message::Response(resp) => { | ||
283 | let handler = global_state.req_queue.outgoing.complete(resp.id.clone()); | ||
284 | handler(global_state, resp) | ||
285 | } | ||
286 | }, | ||
287 | }; | ||
288 | |||
289 | let state_changed = global_state.process_changes(); | ||
290 | |||
291 | if became_ready { | ||
292 | if let Some(flycheck) = &global_state.flycheck { | ||
293 | flycheck.0.update(); | ||
294 | } | ||
295 | } | ||
296 | 101 | ||
297 | if global_state.status == Status::Ready && (state_changed || became_ready) { | 102 | recv(self.task_pool.receiver) -> task => |
298 | let subscriptions = global_state | 103 | Some(Event::Task(task.unwrap())), |
299 | .mem_docs | ||
300 | .iter() | ||
301 | .map(|path| global_state.vfs.read().0.file_id(&path).unwrap()) | ||
302 | .collect::<Vec<_>>(); | ||
303 | 104 | ||
304 | update_file_notifications_on_threadpool( | 105 | recv(self.loader.receiver) -> task => |
305 | pool, | 106 | Some(Event::Vfs(task.unwrap())), |
306 | global_state.snapshot(), | ||
307 | task_sender.clone(), | ||
308 | subscriptions.clone(), | ||
309 | ); | ||
310 | pool.execute({ | ||
311 | let subs = subscriptions; | ||
312 | let snap = global_state.snapshot(); | ||
313 | move || snap.analysis.prime_caches(subs).unwrap_or_else(|_: Canceled| ()) | ||
314 | }); | ||
315 | } | ||
316 | 107 | ||
317 | let loop_duration = loop_start.elapsed(); | 108 | recv(self.flycheck.as_ref().map_or(&never(), |it| &it.receiver)) -> task => |
318 | if loop_duration > Duration::from_millis(100) { | 109 | Some(Event::Flycheck(task.unwrap())), |
319 | log::error!("overly long loop turn: {:?}", loop_duration); | ||
320 | if env::var("RA_PROFILE").is_ok() { | ||
321 | show_message( | ||
322 | lsp_types::MessageType::Error, | ||
323 | format!("overly long loop turn: {:?}", loop_duration), | ||
324 | &connection.sender, | ||
325 | ); | ||
326 | } | 110 | } |
327 | } | 111 | } |
328 | 112 | ||
329 | Ok(()) | 113 | fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> { |
330 | } | 114 | self.reload(); |
331 | 115 | ||
332 | fn on_task(task: Task, msg_sender: &Sender<lsp_server::Message>, global_state: &mut GlobalState) { | 116 | while let Some(event) = self.next_event(&inbox) { |
333 | match task { | 117 | if let Event::Lsp(lsp_server::Message::Notification(not)) = &event { |
334 | Task::Respond(response) => { | 118 | if not.method == lsp_types::notification::Exit::METHOD { |
335 | if let Some((method, start)) = | 119 | return Ok(()); |
336 | global_state.req_queue.incoming.complete(response.id.clone()) | 120 | } |
337 | { | ||
338 | let duration = start.elapsed(); | ||
339 | log::info!("handled req#{} in {:?}", response.id, duration); | ||
340 | global_state.complete_request(RequestMetrics { | ||
341 | id: response.id.clone(), | ||
342 | method: method.to_string(), | ||
343 | duration, | ||
344 | }); | ||
345 | msg_sender.send(response.into()).unwrap(); | ||
346 | } | 121 | } |
122 | self.handle_event(event)? | ||
347 | } | 123 | } |
348 | Task::Diagnostic(task) => on_diagnostic_task(task, msg_sender, global_state), | 124 | |
125 | Err("client exited without proper shutdown sequence")? | ||
349 | } | 126 | } |
350 | } | ||
351 | 127 | ||
352 | fn on_request( | 128 | fn handle_event(&mut self, event: Event) -> Result<()> { |
353 | global_state: &mut GlobalState, | 129 | let loop_start = Instant::now(); |
354 | pool: &ThreadPool, | 130 | // NOTE: don't count blocking select! call as a loop-turn time |
355 | task_sender: &Sender<Task>, | 131 | let _p = profile("GlobalState::handle_event"); |
356 | msg_sender: &Sender<lsp_server::Message>, | ||
357 | request_received: Instant, | ||
358 | req: Request, | ||
359 | ) -> Result<()> { | ||
360 | let mut pool_dispatcher = PoolDispatcher { | ||
361 | req: Some(req), | ||
362 | pool, | ||
363 | global_state, | ||
364 | task_sender, | ||
365 | msg_sender, | ||
366 | request_received, | ||
367 | }; | ||
368 | pool_dispatcher | ||
369 | .on_sync::<lsp_ext::CollectGarbage>(|s, ()| Ok(s.collect_garbage()))? | ||
370 | .on_sync::<lsp_ext::JoinLines>(|s, p| handlers::handle_join_lines(s.snapshot(), p))? | ||
371 | .on_sync::<lsp_ext::OnEnter>(|s, p| handlers::handle_on_enter(s.snapshot(), p))? | ||
372 | .on_sync::<lsp_types::request::SelectionRangeRequest>(|s, p| { | ||
373 | handlers::handle_selection_range(s.snapshot(), p) | ||
374 | })? | ||
375 | .on_sync::<lsp_ext::MatchingBrace>(|s, p| handlers::handle_matching_brace(s.snapshot(), p))? | ||
376 | .on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)? | ||
377 | .on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)? | ||
378 | .on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)? | ||
379 | .on::<lsp_ext::ParentModule>(handlers::handle_parent_module)? | ||
380 | .on::<lsp_ext::Runnables>(handlers::handle_runnables)? | ||
381 | .on::<lsp_ext::InlayHints>(handlers::handle_inlay_hints)? | ||
382 | .on::<lsp_ext::CodeActionRequest>(handlers::handle_code_action)? | ||
383 | .on::<lsp_ext::ResolveCodeActionRequest>(handlers::handle_resolve_code_action)? | ||
384 | .on::<lsp_ext::HoverRequest>(handlers::handle_hover)? | ||
385 | .on::<lsp_types::request::OnTypeFormatting>(handlers::handle_on_type_formatting)? | ||
386 | .on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)? | ||
387 | .on::<lsp_types::request::WorkspaceSymbol>(handlers::handle_workspace_symbol)? | ||
388 | .on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)? | ||
389 | .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)? | ||
390 | .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)? | ||
391 | .on::<lsp_types::request::Completion>(handlers::handle_completion)? | ||
392 | .on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)? | ||
393 | .on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)? | ||
394 | .on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)? | ||
395 | .on::<lsp_types::request::SignatureHelpRequest>(handlers::handle_signature_help)? | ||
396 | .on::<lsp_types::request::PrepareRenameRequest>(handlers::handle_prepare_rename)? | ||
397 | .on::<lsp_types::request::Rename>(handlers::handle_rename)? | ||
398 | .on::<lsp_types::request::References>(handlers::handle_references)? | ||
399 | .on::<lsp_types::request::Formatting>(handlers::handle_formatting)? | ||
400 | .on::<lsp_types::request::DocumentHighlightRequest>(handlers::handle_document_highlight)? | ||
401 | .on::<lsp_types::request::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)? | ||
402 | .on::<lsp_types::request::CallHierarchyIncomingCalls>( | ||
403 | handlers::handle_call_hierarchy_incoming, | ||
404 | )? | ||
405 | .on::<lsp_types::request::CallHierarchyOutgoingCalls>( | ||
406 | handlers::handle_call_hierarchy_outgoing, | ||
407 | )? | ||
408 | .on::<lsp_types::request::SemanticTokensRequest>(handlers::handle_semantic_tokens)? | ||
409 | .on::<lsp_types::request::SemanticTokensRangeRequest>( | ||
410 | handlers::handle_semantic_tokens_range, | ||
411 | )? | ||
412 | .on::<lsp_ext::Ssr>(handlers::handle_ssr)? | ||
413 | .finish(); | ||
414 | Ok(()) | ||
415 | } | ||
416 | 132 | ||
417 | fn on_notification( | 133 | log::info!("handle_event({:?})", event); |
418 | msg_sender: &Sender<lsp_server::Message>, | 134 | let queue_count = self.task_pool.handle.len(); |
419 | global_state: &mut GlobalState, | 135 | if queue_count > 0 { |
420 | not: Notification, | 136 | log::info!("queued count = {}", queue_count); |
421 | ) -> Result<()> { | ||
422 | let not = match notification_cast::<lsp_types::notification::Cancel>(not) { | ||
423 | Ok(params) => { | ||
424 | let id: RequestId = match params.id { | ||
425 | NumberOrString::Number(id) => id.into(), | ||
426 | NumberOrString::String(id) => id.into(), | ||
427 | }; | ||
428 | if let Some(response) = global_state.req_queue.incoming.cancel(id) { | ||
429 | msg_sender.send(response.into()).unwrap() | ||
430 | } | ||
431 | return Ok(()); | ||
432 | } | 137 | } |
433 | Err(not) => not, | 138 | |
434 | }; | 139 | let mut became_ready = false; |
435 | let not = match notification_cast::<lsp_types::notification::DidOpenTextDocument>(not) { | 140 | match event { |
436 | Ok(params) => { | 141 | Event::Lsp(msg) => match msg { |
437 | if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { | 142 | lsp_server::Message::Request(req) => self.on_request(loop_start, req)?, |
438 | if !global_state.mem_docs.insert(path.clone()) { | 143 | lsp_server::Message::Notification(not) => { |
439 | log::error!("duplicate DidOpenTextDocument: {}", path) | 144 | self.on_notification(not)?; |
440 | } | 145 | } |
441 | global_state | 146 | lsp_server::Message::Response(resp) => self.complete_request(resp), |
442 | .vfs | 147 | }, |
443 | .write() | 148 | Event::Task(task) => { |
444 | .0 | 149 | match task { |
445 | .set_file_contents(path, Some(params.text_document.text.into_bytes())); | 150 | Task::Response(response) => self.respond(response), |
446 | } | 151 | Task::Diagnostics(diagnostics_per_file) => { |
447 | return Ok(()); | 152 | for (file_id, diagnostics) in diagnostics_per_file { |
448 | } | 153 | self.diagnostics.set_native_diagnostics(file_id, diagnostics) |
449 | Err(not) => not, | 154 | } |
450 | }; | 155 | } |
451 | let not = match notification_cast::<lsp_types::notification::DidChangeTextDocument>(not) { | 156 | Task::Unit => (), |
452 | Ok(params) => { | 157 | } |
453 | if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { | 158 | self.analysis_host.maybe_collect_garbage(); |
454 | assert!(global_state.mem_docs.contains(&path)); | ||
455 | let vfs = &mut global_state.vfs.write().0; | ||
456 | let file_id = vfs.file_id(&path).unwrap(); | ||
457 | let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap(); | ||
458 | apply_document_changes(&mut text, params.content_changes); | ||
459 | vfs.set_file_contents(path, Some(text.into_bytes())) | ||
460 | } | 159 | } |
461 | return Ok(()); | 160 | Event::Vfs(task) => match task { |
462 | } | 161 | vfs::loader::Message::Loaded { files } => { |
463 | Err(not) => not, | 162 | let vfs = &mut self.vfs.write().0; |
464 | }; | 163 | for (path, contents) in files { |
465 | let not = match notification_cast::<lsp_types::notification::DidCloseTextDocument>(not) { | 164 | let path = VfsPath::from(path); |
466 | Ok(params) => { | 165 | if !self.mem_docs.contains(&path) { |
467 | if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { | 166 | vfs.set_file_contents(path, contents) |
468 | if !global_state.mem_docs.remove(&path) { | 167 | } |
469 | log::error!("orphan DidCloseTextDocument: {}", path) | 168 | } |
470 | } | 169 | } |
471 | if let Some(path) = path.as_path() { | 170 | vfs::loader::Message::Progress { n_total, n_done } => { |
472 | global_state.loader.invalidate(path.to_path_buf()); | 171 | let state = if n_done == 0 { |
172 | Progress::Begin | ||
173 | } else if n_done < n_total { | ||
174 | Progress::Report | ||
175 | } else { | ||
176 | assert_eq!(n_done, n_total); | ||
177 | self.status = Status::Ready; | ||
178 | became_ready = true; | ||
179 | Progress::End | ||
180 | }; | ||
181 | self.report_progress( | ||
182 | "roots scanned", | ||
183 | state, | ||
184 | Some(format!("{}/{}", n_done, n_total)), | ||
185 | Some(Progress::percentage(n_done, n_total)), | ||
186 | ) | ||
187 | } | ||
188 | }, | ||
189 | Event::Flycheck(task) => match task { | ||
190 | flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => { | ||
191 | let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( | ||
192 | &self.config.diagnostics, | ||
193 | &diagnostic, | ||
194 | &workspace_root, | ||
195 | ); | ||
196 | for diag in diagnostics { | ||
197 | match url_to_file_id(&self.vfs.read().0, &diag.location.uri) { | ||
198 | Ok(file_id) => self.diagnostics.add_check_diagnostic( | ||
199 | file_id, | ||
200 | diag.diagnostic, | ||
201 | diag.fixes, | ||
202 | ), | ||
203 | Err(err) => { | ||
204 | log::error!("File with cargo diagnostic not found in VFS: {}", err); | ||
205 | } | ||
206 | }; | ||
207 | } | ||
473 | } | 208 | } |
474 | } | ||
475 | let params = lsp_types::PublishDiagnosticsParams { | ||
476 | uri: params.text_document.uri, | ||
477 | diagnostics: Vec::new(), | ||
478 | version: None, | ||
479 | }; | ||
480 | let not = notification_new::<lsp_types::notification::PublishDiagnostics>(params); | ||
481 | msg_sender.send(not.into()).unwrap(); | ||
482 | return Ok(()); | ||
483 | } | ||
484 | Err(not) => not, | ||
485 | }; | ||
486 | let not = match notification_cast::<lsp_types::notification::DidSaveTextDocument>(not) { | ||
487 | Ok(_params) => { | ||
488 | if let Some(flycheck) = &global_state.flycheck { | ||
489 | flycheck.0.update(); | ||
490 | } | ||
491 | return Ok(()); | ||
492 | } | ||
493 | Err(not) => not, | ||
494 | }; | ||
495 | let not = match notification_cast::<lsp_types::notification::DidChangeConfiguration>(not) { | ||
496 | Ok(_) => { | ||
497 | // As stated in https://github.com/microsoft/language-server-protocol/issues/676, | ||
498 | // this notification's parameters should be ignored and the actual config queried separately. | ||
499 | let request = global_state.req_queue.outgoing.register( | ||
500 | lsp_types::request::WorkspaceConfiguration::METHOD.to_string(), | ||
501 | lsp_types::ConfigurationParams { | ||
502 | items: vec![lsp_types::ConfigurationItem { | ||
503 | scope_uri: None, | ||
504 | section: Some("rust-analyzer".to_string()), | ||
505 | }], | ||
506 | }, | ||
507 | |global_state, resp| { | ||
508 | log::debug!("config update response: '{:?}", resp); | ||
509 | let Response { error, result, .. } = resp; | ||
510 | 209 | ||
511 | match (error, result) { | 210 | flycheck::Message::Progress(status) => { |
512 | (Some(err), _) => { | 211 | let (state, message) = match status { |
513 | log::error!("failed to fetch the server settings: {:?}", err) | 212 | flycheck::Progress::DidStart => { |
213 | self.diagnostics.clear_check(); | ||
214 | (Progress::Begin, None) | ||
514 | } | 215 | } |
515 | (None, Some(configs)) => { | 216 | flycheck::Progress::DidCheckCrate(target) => { |
516 | if let Some(new_config) = configs.get(0) { | 217 | (Progress::Report, Some(target)) |
517 | let mut config = global_state.config.clone(); | ||
518 | config.update(&new_config); | ||
519 | global_state.update_configuration(config); | ||
520 | } | ||
521 | } | 218 | } |
522 | (None, None) => { | 219 | flycheck::Progress::DidFinish | flycheck::Progress::DidCancel => { |
523 | log::error!("received empty server settings response from the client") | 220 | (Progress::End, None) |
524 | } | 221 | } |
525 | } | 222 | }; |
526 | }, | ||
527 | ); | ||
528 | msg_sender.send(request.into())?; | ||
529 | 223 | ||
530 | return Ok(()); | 224 | self.report_progress("cargo check", state, message, None); |
531 | } | ||
532 | Err(not) => not, | ||
533 | }; | ||
534 | let not = match notification_cast::<lsp_types::notification::DidChangeWatchedFiles>(not) { | ||
535 | Ok(params) => { | ||
536 | for change in params.changes { | ||
537 | if let Ok(path) = from_proto::abs_path(&change.uri) { | ||
538 | global_state.loader.invalidate(path) | ||
539 | } | 225 | } |
540 | } | 226 | }, |
541 | return Ok(()); | ||
542 | } | 227 | } |
543 | Err(not) => not, | ||
544 | }; | ||
545 | if not.method.starts_with("$/") { | ||
546 | return Ok(()); | ||
547 | } | ||
548 | log::error!("unhandled notification: {:?}", not); | ||
549 | Ok(()) | ||
550 | } | ||
551 | |||
552 | fn on_check_task( | ||
553 | task: flycheck::Message, | ||
554 | global_state: &mut GlobalState, | ||
555 | task_sender: &Sender<Task>, | ||
556 | msg_sender: &Sender<lsp_server::Message>, | ||
557 | ) -> Result<()> { | ||
558 | match task { | ||
559 | flycheck::Message::ClearDiagnostics => { | ||
560 | task_sender.send(Task::Diagnostic(DiagnosticTask::ClearCheck))?; | ||
561 | } | ||
562 | |||
563 | flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => { | ||
564 | let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( | ||
565 | &global_state.config.diagnostics, | ||
566 | &diagnostic, | ||
567 | &workspace_root, | ||
568 | ); | ||
569 | for diag in diagnostics { | ||
570 | let path = from_proto::vfs_path(&diag.location.uri)?; | ||
571 | let file_id = match global_state.vfs.read().0.file_id(&path) { | ||
572 | Some(file) => FileId(file.0), | ||
573 | None => { | ||
574 | log::error!("File with cargo diagnostic not found in VFS: {}", path); | ||
575 | return Ok(()); | ||
576 | } | ||
577 | }; | ||
578 | 228 | ||
579 | task_sender.send(Task::Diagnostic(DiagnosticTask::AddCheck( | 229 | let state_changed = self.process_changes(); |
580 | file_id, | 230 | if became_ready { |
581 | diag.diagnostic, | 231 | if let Some(flycheck) = &self.flycheck { |
582 | diag.fixes.into_iter().map(|it| it.into()).collect(), | 232 | flycheck.handle.update(); |
583 | )))?; | ||
584 | } | 233 | } |
585 | } | 234 | } |
586 | 235 | ||
587 | flycheck::Message::Progress(status) => { | 236 | if self.status == Status::Ready && (state_changed || became_ready) { |
588 | let (state, message) = match status { | 237 | let subscriptions = self |
589 | flycheck::Progress::Being => (Progress::Begin, None), | 238 | .mem_docs |
590 | flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)), | 239 | .iter() |
591 | flycheck::Progress::End => (Progress::End, None), | 240 | .map(|path| self.vfs.read().0.file_id(&path).unwrap()) |
592 | }; | 241 | .collect::<Vec<_>>(); |
593 | |||
594 | report_progress(global_state, msg_sender, "cargo check", state, message, None); | ||
595 | } | ||
596 | }; | ||
597 | |||
598 | Ok(()) | ||
599 | } | ||
600 | |||
601 | fn on_diagnostic_task( | ||
602 | task: DiagnosticTask, | ||
603 | msg_sender: &Sender<lsp_server::Message>, | ||
604 | state: &mut GlobalState, | ||
605 | ) { | ||
606 | let subscriptions = state.diagnostics.handle_task(task); | ||
607 | |||
608 | for file_id in subscriptions { | ||
609 | let url = file_id_to_url(&state.vfs.read().0, file_id); | ||
610 | let diagnostics = state.diagnostics.diagnostics_for(file_id).cloned().collect(); | ||
611 | let params = lsp_types::PublishDiagnosticsParams { uri: url, diagnostics, version: None }; | ||
612 | let not = notification_new::<lsp_types::notification::PublishDiagnostics>(params); | ||
613 | msg_sender.send(not.into()).unwrap(); | ||
614 | } | ||
615 | } | ||
616 | |||
617 | #[derive(Eq, PartialEq)] | ||
618 | enum Progress { | ||
619 | Begin, | ||
620 | Report, | ||
621 | End, | ||
622 | } | ||
623 | |||
624 | fn percentage(done: usize, total: usize) -> f64 { | ||
625 | (done as f64 / total.max(1) as f64) * 100.0 | ||
626 | } | ||
627 | |||
628 | fn report_progress( | ||
629 | global_state: &mut GlobalState, | ||
630 | sender: &Sender<lsp_server::Message>, | ||
631 | title: &str, | ||
632 | state: Progress, | ||
633 | message: Option<String>, | ||
634 | percentage: Option<f64>, | ||
635 | ) { | ||
636 | if !global_state.config.client_caps.work_done_progress { | ||
637 | return; | ||
638 | } | ||
639 | let token = lsp_types::ProgressToken::String(format!("rustAnalyzer/{}", title)); | ||
640 | let work_done_progress = match state { | ||
641 | Progress::Begin => { | ||
642 | let work_done_progress_create = global_state.req_queue.outgoing.register( | ||
643 | lsp_types::request::WorkDoneProgressCreate::METHOD.to_string(), | ||
644 | lsp_types::WorkDoneProgressCreateParams { token: token.clone() }, | ||
645 | DO_NOTHING, | ||
646 | ); | ||
647 | sender.send(work_done_progress_create.into()).unwrap(); | ||
648 | 242 | ||
649 | lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin { | 243 | self.update_file_notifications_on_threadpool(subscriptions); |
650 | title: title.into(), | ||
651 | cancellable: None, | ||
652 | message, | ||
653 | percentage, | ||
654 | }) | ||
655 | } | 244 | } |
656 | Progress::Report => { | ||
657 | lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport { | ||
658 | cancellable: None, | ||
659 | message, | ||
660 | percentage, | ||
661 | }) | ||
662 | } | ||
663 | Progress::End => { | ||
664 | lsp_types::WorkDoneProgress::End(lsp_types::WorkDoneProgressEnd { message }) | ||
665 | } | ||
666 | }; | ||
667 | let notification = | ||
668 | notification_new::<lsp_types::notification::Progress>(lsp_types::ProgressParams { | ||
669 | token, | ||
670 | value: lsp_types::ProgressParamsValue::WorkDone(work_done_progress), | ||
671 | }); | ||
672 | sender.send(notification.into()).unwrap(); | ||
673 | } | ||
674 | 245 | ||
675 | struct PoolDispatcher<'a> { | 246 | if let Some(diagnostic_changes) = self.diagnostics.take_changes() { |
676 | req: Option<Request>, | 247 | for file_id in diagnostic_changes { |
677 | pool: &'a ThreadPool, | 248 | let url = file_id_to_url(&self.vfs.read().0, file_id); |
678 | global_state: &'a mut GlobalState, | 249 | let diagnostics = self.diagnostics.diagnostics_for(file_id).cloned().collect(); |
679 | msg_sender: &'a Sender<lsp_server::Message>, | 250 | self.send_notification::<lsp_types::notification::PublishDiagnostics>( |
680 | task_sender: &'a Sender<Task>, | 251 | lsp_types::PublishDiagnosticsParams { uri: url, diagnostics, version: None }, |
681 | request_received: Instant, | 252 | ); |
682 | } | ||
683 | |||
684 | impl<'a> PoolDispatcher<'a> { | ||
685 | /// Dispatches the request onto the current thread | ||
686 | fn on_sync<R>( | ||
687 | &mut self, | ||
688 | f: fn(&mut GlobalState, R::Params) -> Result<R::Result>, | ||
689 | ) -> Result<&mut Self> | ||
690 | where | ||
691 | R: lsp_types::request::Request + 'static, | ||
692 | R::Params: DeserializeOwned + panic::UnwindSafe + 'static, | ||
693 | R::Result: Serialize + 'static, | ||
694 | { | ||
695 | let (id, params) = match self.parse::<R>() { | ||
696 | Some(it) => it, | ||
697 | None => { | ||
698 | return Ok(self); | ||
699 | } | ||
700 | }; | ||
701 | let world = panic::AssertUnwindSafe(&mut *self.global_state); | ||
702 | let task = panic::catch_unwind(move || { | ||
703 | let result = f(world.0, params); | ||
704 | result_to_task::<R>(id, result) | ||
705 | }) | ||
706 | .map_err(|_| format!("sync task {:?} panicked", R::METHOD))?; | ||
707 | on_task(task, self.msg_sender, self.global_state); | ||
708 | Ok(self) | ||
709 | } | ||
710 | |||
711 | /// Dispatches the request onto thread pool | ||
712 | fn on<R>( | ||
713 | &mut self, | ||
714 | f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>, | ||
715 | ) -> Result<&mut Self> | ||
716 | where | ||
717 | R: lsp_types::request::Request + 'static, | ||
718 | R::Params: DeserializeOwned + Send + 'static, | ||
719 | R::Result: Serialize + 'static, | ||
720 | { | ||
721 | let (id, params) = match self.parse::<R>() { | ||
722 | Some(it) => it, | ||
723 | None => { | ||
724 | return Ok(self); | ||
725 | } | 253 | } |
726 | }; | 254 | } |
727 | 255 | ||
728 | self.pool.execute({ | 256 | let loop_duration = loop_start.elapsed(); |
729 | let world = self.global_state.snapshot(); | 257 | if loop_duration > Duration::from_millis(100) { |
730 | let sender = self.task_sender.clone(); | 258 | log::warn!("overly long loop turn: {:?}", loop_duration); |
731 | move || { | 259 | if env::var("RA_PROFILE").is_ok() { |
732 | let result = f(world, params); | 260 | self.show_message( |
733 | let task = result_to_task::<R>(id, result); | 261 | lsp_types::MessageType::Error, |
734 | sender.send(task).unwrap(); | 262 | format!("overly long loop turn: {:?}", loop_duration), |
263 | ) | ||
735 | } | 264 | } |
736 | }); | 265 | } |
737 | 266 | Ok(()) | |
738 | Ok(self) | ||
739 | } | 267 | } |
740 | 268 | ||
741 | fn parse<R>(&mut self) -> Option<(RequestId, R::Params)> | 269 | fn on_request(&mut self, request_received: Instant, req: Request) -> Result<()> { |
742 | where | 270 | self.register_request(&req, request_received); |
743 | R: lsp_types::request::Request + 'static, | 271 | |
744 | R::Params: DeserializeOwned + 'static, | 272 | RequestDispatcher { req: Some(req), global_state: self } |
745 | { | 273 | .on_sync::<lsp_ext::CollectGarbage>(|s, ()| Ok(s.analysis_host.collect_garbage()))? |
746 | let req = self.req.take()?; | 274 | .on_sync::<lsp_ext::JoinLines>(|s, p| handlers::handle_join_lines(s.snapshot(), p))? |
747 | let (id, params) = match req.extract::<R::Params>(R::METHOD) { | 275 | .on_sync::<lsp_ext::OnEnter>(|s, p| handlers::handle_on_enter(s.snapshot(), p))? |
748 | Ok(it) => it, | 276 | .on_sync::<lsp_types::request::Shutdown>(|_, ()| Ok(()))? |
749 | Err(req) => { | 277 | .on_sync::<lsp_types::request::SelectionRangeRequest>(|s, p| { |
750 | self.req = Some(req); | 278 | handlers::handle_selection_range(s.snapshot(), p) |
751 | return None; | 279 | })? |
752 | } | 280 | .on_sync::<lsp_ext::MatchingBrace>(|s, p| { |
753 | }; | 281 | handlers::handle_matching_brace(s.snapshot(), p) |
754 | self.global_state | 282 | })? |
755 | .req_queue | 283 | .on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)? |
756 | .incoming | 284 | .on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)? |
757 | .register(id.clone(), (R::METHOD, self.request_received)); | 285 | .on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)? |
758 | Some((id, params)) | 286 | .on::<lsp_ext::ParentModule>(handlers::handle_parent_module)? |
287 | .on::<lsp_ext::Runnables>(handlers::handle_runnables)? | ||
288 | .on::<lsp_ext::InlayHints>(handlers::handle_inlay_hints)? | ||
289 | .on::<lsp_ext::CodeActionRequest>(handlers::handle_code_action)? | ||
290 | .on::<lsp_ext::ResolveCodeActionRequest>(handlers::handle_resolve_code_action)? | ||
291 | .on::<lsp_ext::HoverRequest>(handlers::handle_hover)? | ||
292 | .on::<lsp_types::request::OnTypeFormatting>(handlers::handle_on_type_formatting)? | ||
293 | .on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)? | ||
294 | .on::<lsp_types::request::WorkspaceSymbol>(handlers::handle_workspace_symbol)? | ||
295 | .on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)? | ||
296 | .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)? | ||
297 | .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)? | ||
298 | .on::<lsp_types::request::Completion>(handlers::handle_completion)? | ||
299 | .on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)? | ||
300 | .on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)? | ||
301 | .on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)? | ||
302 | .on::<lsp_types::request::SignatureHelpRequest>(handlers::handle_signature_help)? | ||
303 | .on::<lsp_types::request::PrepareRenameRequest>(handlers::handle_prepare_rename)? | ||
304 | .on::<lsp_types::request::Rename>(handlers::handle_rename)? | ||
305 | .on::<lsp_types::request::References>(handlers::handle_references)? | ||
306 | .on::<lsp_types::request::Formatting>(handlers::handle_formatting)? | ||
307 | .on::<lsp_types::request::DocumentHighlightRequest>( | ||
308 | handlers::handle_document_highlight, | ||
309 | )? | ||
310 | .on::<lsp_types::request::CallHierarchyPrepare>( | ||
311 | handlers::handle_call_hierarchy_prepare, | ||
312 | )? | ||
313 | .on::<lsp_types::request::CallHierarchyIncomingCalls>( | ||
314 | handlers::handle_call_hierarchy_incoming, | ||
315 | )? | ||
316 | .on::<lsp_types::request::CallHierarchyOutgoingCalls>( | ||
317 | handlers::handle_call_hierarchy_outgoing, | ||
318 | )? | ||
319 | .on::<lsp_types::request::SemanticTokensRequest>(handlers::handle_semantic_tokens)? | ||
320 | .on::<lsp_types::request::SemanticTokensRangeRequest>( | ||
321 | handlers::handle_semantic_tokens_range, | ||
322 | )? | ||
323 | .on::<lsp_ext::Ssr>(handlers::handle_ssr)? | ||
324 | .finish(); | ||
325 | Ok(()) | ||
759 | } | 326 | } |
760 | 327 | fn on_notification(&mut self, not: Notification) -> Result<()> { | |
761 | fn finish(&mut self) { | 328 | NotificationDispatcher { not: Some(not), global_state: self } |
762 | match self.req.take() { | 329 | .on::<lsp_types::notification::Cancel>(|this, params| { |
763 | None => (), | 330 | let id: lsp_server::RequestId = match params.id { |
764 | Some(req) => { | 331 | lsp_types::NumberOrString::Number(id) => id.into(), |
765 | log::error!("unknown request: {:?}", req); | 332 | lsp_types::NumberOrString::String(id) => id.into(), |
766 | let resp = Response::new_err( | 333 | }; |
767 | req.id, | 334 | this.cancel(id); |
768 | ErrorCode::MethodNotFound as i32, | 335 | Ok(()) |
769 | "unknown request".to_string(), | 336 | })? |
337 | .on::<lsp_types::notification::DidOpenTextDocument>(|this, params| { | ||
338 | if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { | ||
339 | if !this.mem_docs.insert(path.clone()) { | ||
340 | log::error!("duplicate DidOpenTextDocument: {}", path) | ||
341 | } | ||
342 | this.vfs | ||
343 | .write() | ||
344 | .0 | ||
345 | .set_file_contents(path, Some(params.text_document.text.into_bytes())); | ||
346 | } | ||
347 | Ok(()) | ||
348 | })? | ||
349 | .on::<lsp_types::notification::DidChangeTextDocument>(|this, params| { | ||
350 | if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { | ||
351 | assert!(this.mem_docs.contains(&path)); | ||
352 | let vfs = &mut this.vfs.write().0; | ||
353 | let file_id = vfs.file_id(&path).unwrap(); | ||
354 | let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap(); | ||
355 | apply_document_changes(&mut text, params.content_changes); | ||
356 | vfs.set_file_contents(path, Some(text.into_bytes())) | ||
357 | } | ||
358 | Ok(()) | ||
359 | })? | ||
360 | .on::<lsp_types::notification::DidCloseTextDocument>(|this, params| { | ||
361 | if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { | ||
362 | if !this.mem_docs.remove(&path) { | ||
363 | log::error!("orphan DidCloseTextDocument: {}", path) | ||
364 | } | ||
365 | if let Some(path) = path.as_path() { | ||
366 | this.loader.handle.invalidate(path.to_path_buf()); | ||
367 | } | ||
368 | } | ||
369 | this.send_notification::<lsp_types::notification::PublishDiagnostics>( | ||
370 | lsp_types::PublishDiagnosticsParams { | ||
371 | uri: params.text_document.uri, | ||
372 | diagnostics: Vec::new(), | ||
373 | version: None, | ||
374 | }, | ||
770 | ); | 375 | ); |
771 | self.msg_sender.send(resp.into()).unwrap(); | 376 | Ok(()) |
772 | } | 377 | })? |
773 | } | 378 | .on::<lsp_types::notification::DidSaveTextDocument>(|this, _params| { |
774 | } | 379 | if let Some(flycheck) = &this.flycheck { |
775 | } | 380 | flycheck.handle.update(); |
776 | |||
777 | fn result_to_task<R>(id: RequestId, result: Result<R::Result>) -> Task | ||
778 | where | ||
779 | R: lsp_types::request::Request + 'static, | ||
780 | R::Params: DeserializeOwned + 'static, | ||
781 | R::Result: Serialize + 'static, | ||
782 | { | ||
783 | let response = match result { | ||
784 | Ok(resp) => Response::new_ok(id, &resp), | ||
785 | Err(e) => match e.downcast::<LspError>() { | ||
786 | Ok(lsp_error) => Response::new_err(id, lsp_error.code, lsp_error.message), | ||
787 | Err(e) => { | ||
788 | if is_canceled(&*e) { | ||
789 | Response::new_err( | ||
790 | id, | ||
791 | ErrorCode::ContentModified as i32, | ||
792 | "content modified".to_string(), | ||
793 | ) | ||
794 | } else { | ||
795 | Response::new_err(id, ErrorCode::InternalError as i32, e.to_string()) | ||
796 | } | 381 | } |
797 | } | 382 | Ok(()) |
798 | }, | 383 | })? |
799 | }; | 384 | .on::<lsp_types::notification::DidChangeConfiguration>(|this, _params| { |
800 | Task::Respond(response) | 385 | // As stated in https://github.com/microsoft/language-server-protocol/issues/676, |
801 | } | 386 | // this notification's parameters should be ignored and the actual config queried separately. |
802 | 387 | this.send_request::<lsp_types::request::WorkspaceConfiguration>( | |
803 | fn update_file_notifications_on_threadpool( | 388 | lsp_types::ConfigurationParams { |
804 | pool: &ThreadPool, | 389 | items: vec![lsp_types::ConfigurationItem { |
805 | world: GlobalStateSnapshot, | 390 | scope_uri: None, |
806 | task_sender: Sender<Task>, | 391 | section: Some("rust-analyzer".to_string()), |
807 | subscriptions: Vec<FileId>, | 392 | }], |
808 | ) { | 393 | }, |
809 | log::trace!("updating notifications for {:?}", subscriptions); | 394 | |this, resp| { |
810 | if world.config.publish_diagnostics { | 395 | log::debug!("config update response: '{:?}", resp); |
811 | pool.execute(move || { | 396 | let Response { error, result, .. } = resp; |
812 | for file_id in subscriptions { | 397 | |
813 | match handlers::publish_diagnostics(&world, file_id) { | 398 | match (error, result) { |
814 | Err(e) => { | 399 | (Some(err), _) => { |
815 | if !is_canceled(&*e) { | 400 | log::error!("failed to fetch the server settings: {:?}", err) |
816 | log::error!("failed to compute diagnostics: {:?}", e); | 401 | } |
402 | (None, Some(configs)) => { | ||
403 | if let Some(new_config) = configs.get(0) { | ||
404 | let mut config = this.config.clone(); | ||
405 | config.update(&new_config); | ||
406 | this.update_configuration(config); | ||
407 | } | ||
408 | } | ||
409 | (None, None) => log::error!( | ||
410 | "received empty server settings response from the client" | ||
411 | ), | ||
817 | } | 412 | } |
818 | } | 413 | }, |
819 | Ok(task) => { | 414 | ); |
820 | task_sender.send(Task::Diagnostic(task)).unwrap(); | 415 | |
416 | return Ok(()); | ||
417 | })? | ||
418 | .on::<lsp_types::notification::DidChangeWatchedFiles>(|this, params| { | ||
419 | for change in params.changes { | ||
420 | if let Ok(path) = from_proto::abs_path(&change.uri) { | ||
421 | this.loader.handle.invalidate(path); | ||
821 | } | 422 | } |
822 | } | 423 | } |
424 | Ok(()) | ||
425 | })? | ||
426 | .finish(); | ||
427 | Ok(()) | ||
428 | } | ||
429 | fn update_file_notifications_on_threadpool(&mut self, subscriptions: Vec<FileId>) { | ||
430 | log::trace!("updating notifications for {:?}", subscriptions); | ||
431 | if self.config.publish_diagnostics { | ||
432 | let snapshot = self.snapshot(); | ||
433 | let subscriptions = subscriptions.clone(); | ||
434 | self.task_pool.handle.spawn(move || { | ||
435 | let diagnostics = subscriptions | ||
436 | .into_iter() | ||
437 | .filter_map(|file_id| { | ||
438 | handlers::publish_diagnostics(&snapshot, file_id) | ||
439 | .map_err(|err| { | ||
440 | if !is_canceled(&*err) { | ||
441 | log::error!("failed to compute diagnostics: {:?}", err); | ||
442 | } | ||
443 | () | ||
444 | }) | ||
445 | .ok() | ||
446 | .map(|diags| (file_id, diags)) | ||
447 | }) | ||
448 | .collect::<Vec<_>>(); | ||
449 | Task::Diagnostics(diagnostics) | ||
450 | }) | ||
451 | } | ||
452 | self.task_pool.handle.spawn({ | ||
453 | let subs = subscriptions; | ||
454 | let snap = self.snapshot(); | ||
455 | move || { | ||
456 | snap.analysis.prime_caches(subs).unwrap_or_else(|_: Canceled| ()); | ||
457 | Task::Unit | ||
823 | } | 458 | } |
824 | }) | 459 | }); |
825 | } | 460 | } |
826 | } | 461 | } |
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs new file mode 100644 index 000000000..ec71f8b29 --- /dev/null +++ b/crates/rust-analyzer/src/reload.rs | |||
@@ -0,0 +1,243 @@ | |||
1 | //! Project loading & configuration updates | ||
2 | use std::{mem, sync::Arc}; | ||
3 | |||
4 | use crossbeam_channel::unbounded; | ||
5 | use flycheck::FlycheckHandle; | ||
6 | use ra_db::{CrateGraph, SourceRoot, VfsPath}; | ||
7 | use ra_ide::AnalysisChange; | ||
8 | use ra_project_model::{PackageRoot, ProcMacroClient, ProjectWorkspace}; | ||
9 | use vfs::{file_set::FileSetConfig, AbsPath}; | ||
10 | |||
11 | use crate::{ | ||
12 | config::{Config, FilesWatcher, LinkedProject}, | ||
13 | global_state::{GlobalState, Handle}, | ||
14 | }; | ||
15 | |||
16 | impl GlobalState { | ||
17 | pub(crate) fn update_configuration(&mut self, config: Config) { | ||
18 | let old_config = mem::replace(&mut self.config, config); | ||
19 | if self.config.lru_capacity != old_config.lru_capacity { | ||
20 | self.analysis_host.update_lru_capacity(old_config.lru_capacity); | ||
21 | } | ||
22 | if self.config.flycheck != old_config.flycheck { | ||
23 | self.reload_flycheck(); | ||
24 | } | ||
25 | } | ||
26 | pub(crate) fn reload(&mut self) { | ||
27 | let workspaces = { | ||
28 | if self.config.linked_projects.is_empty() | ||
29 | && self.config.notifications.cargo_toml_not_found | ||
30 | { | ||
31 | self.show_message( | ||
32 | lsp_types::MessageType::Error, | ||
33 | "rust-analyzer failed to discover workspace".to_string(), | ||
34 | ); | ||
35 | }; | ||
36 | |||
37 | self.config | ||
38 | .linked_projects | ||
39 | .iter() | ||
40 | .map(|project| match project { | ||
41 | LinkedProject::ProjectManifest(manifest) => { | ||
42 | ra_project_model::ProjectWorkspace::load( | ||
43 | manifest.clone(), | ||
44 | &self.config.cargo, | ||
45 | self.config.with_sysroot, | ||
46 | ) | ||
47 | } | ||
48 | LinkedProject::InlineJsonProject(it) => { | ||
49 | Ok(ra_project_model::ProjectWorkspace::Json { project: it.clone() }) | ||
50 | } | ||
51 | }) | ||
52 | .collect::<Vec<_>>() | ||
53 | .into_iter() | ||
54 | .filter_map(|res| { | ||
55 | res.map_err(|err| { | ||
56 | log::error!("failed to load workspace: {:#}", err); | ||
57 | self.show_message( | ||
58 | lsp_types::MessageType::Error, | ||
59 | format!("rust-analyzer failed to load workspace: {:#}", err), | ||
60 | ); | ||
61 | }) | ||
62 | .ok() | ||
63 | }) | ||
64 | .collect::<Vec<_>>() | ||
65 | }; | ||
66 | |||
67 | if let FilesWatcher::Client = self.config.files.watcher { | ||
68 | let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions { | ||
69 | watchers: workspaces | ||
70 | .iter() | ||
71 | .flat_map(ProjectWorkspace::to_roots) | ||
72 | .filter(PackageRoot::is_member) | ||
73 | .map(|root| format!("{}/**/*.rs", root.path().display())) | ||
74 | .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None }) | ||
75 | .collect(), | ||
76 | }; | ||
77 | let registration = lsp_types::Registration { | ||
78 | id: "file-watcher".to_string(), | ||
79 | method: "workspace/didChangeWatchedFiles".to_string(), | ||
80 | register_options: Some(serde_json::to_value(registration_options).unwrap()), | ||
81 | }; | ||
82 | self.send_request::<lsp_types::request::RegisterCapability>( | ||
83 | lsp_types::RegistrationParams { registrations: vec![registration] }, | ||
84 | |_, _| (), | ||
85 | ); | ||
86 | } | ||
87 | |||
88 | let mut change = AnalysisChange::new(); | ||
89 | |||
90 | let project_folders = ProjectFolders::new(&workspaces); | ||
91 | |||
92 | self.proc_macro_client = match &self.config.proc_macro_srv { | ||
93 | None => ProcMacroClient::dummy(), | ||
94 | Some((path, args)) => match ProcMacroClient::extern_process(path.into(), args) { | ||
95 | Ok(it) => it, | ||
96 | Err(err) => { | ||
97 | log::error!( | ||
98 | "Failed to run ra_proc_macro_srv from path {}, error: {:?}", | ||
99 | path.display(), | ||
100 | err | ||
101 | ); | ||
102 | ProcMacroClient::dummy() | ||
103 | } | ||
104 | }, | ||
105 | }; | ||
106 | let watch = match self.config.files.watcher { | ||
107 | FilesWatcher::Client => vec![], | ||
108 | FilesWatcher::Notify => project_folders.watch, | ||
109 | }; | ||
110 | self.loader.handle.set_config(vfs::loader::Config { load: project_folders.load, watch }); | ||
111 | |||
112 | // Create crate graph from all the workspaces | ||
113 | let crate_graph = { | ||
114 | let mut crate_graph = CrateGraph::default(); | ||
115 | let vfs = &mut self.vfs.write().0; | ||
116 | let loader = &mut self.loader; | ||
117 | let mut load = |path: &AbsPath| { | ||
118 | let contents = loader.handle.load_sync(path); | ||
119 | let path = vfs::VfsPath::from(path.to_path_buf()); | ||
120 | vfs.set_file_contents(path.clone(), contents); | ||
121 | vfs.file_id(&path) | ||
122 | }; | ||
123 | for ws in workspaces.iter() { | ||
124 | crate_graph.extend(ws.to_crate_graph( | ||
125 | self.config.cargo.target.as_deref(), | ||
126 | &self.proc_macro_client, | ||
127 | &mut load, | ||
128 | )); | ||
129 | } | ||
130 | |||
131 | crate_graph | ||
132 | }; | ||
133 | change.set_crate_graph(crate_graph); | ||
134 | |||
135 | self.source_root_config = project_folders.source_root_config; | ||
136 | self.workspaces = Arc::new(workspaces); | ||
137 | |||
138 | self.analysis_host.apply_change(change); | ||
139 | self.process_changes(); | ||
140 | self.reload_flycheck(); | ||
141 | } | ||
142 | |||
143 | fn reload_flycheck(&mut self) { | ||
144 | let config = match self.config.flycheck.clone() { | ||
145 | Some(it) => it, | ||
146 | None => { | ||
147 | self.flycheck = None; | ||
148 | return; | ||
149 | } | ||
150 | }; | ||
151 | |||
152 | // FIXME: Figure out the multi-workspace situation | ||
153 | self.flycheck = self.workspaces.iter().find_map(move |w| match w { | ||
154 | ProjectWorkspace::Cargo { cargo, .. } => { | ||
155 | let (sender, receiver) = unbounded(); | ||
156 | let sender = Box::new(move |msg| sender.send(msg).unwrap()); | ||
157 | let cargo_project_root = cargo.workspace_root().to_path_buf(); | ||
158 | let handle = | ||
159 | FlycheckHandle::spawn(sender, config.clone(), cargo_project_root.into()); | ||
160 | Some(Handle { handle, receiver }) | ||
161 | } | ||
162 | ProjectWorkspace::Json { .. } => { | ||
163 | log::warn!("Cargo check watching only supported for cargo workspaces, disabling"); | ||
164 | None | ||
165 | } | ||
166 | }) | ||
167 | } | ||
168 | } | ||
169 | |||
170 | #[derive(Default)] | ||
171 | pub(crate) struct ProjectFolders { | ||
172 | pub(crate) load: Vec<vfs::loader::Entry>, | ||
173 | pub(crate) watch: Vec<usize>, | ||
174 | pub(crate) source_root_config: SourceRootConfig, | ||
175 | } | ||
176 | |||
177 | impl ProjectFolders { | ||
178 | pub(crate) fn new(workspaces: &[ProjectWorkspace]) -> ProjectFolders { | ||
179 | let mut res = ProjectFolders::default(); | ||
180 | let mut fsc = FileSetConfig::builder(); | ||
181 | let mut local_filesets = vec![]; | ||
182 | |||
183 | for root in workspaces.iter().flat_map(|it| it.to_roots()) { | ||
184 | let path = root.path().to_owned(); | ||
185 | |||
186 | let mut file_set_roots: Vec<VfsPath> = vec![]; | ||
187 | |||
188 | let entry = if root.is_member() { | ||
189 | vfs::loader::Entry::local_cargo_package(path.to_path_buf()) | ||
190 | } else { | ||
191 | vfs::loader::Entry::cargo_package_dependency(path.to_path_buf()) | ||
192 | }; | ||
193 | res.load.push(entry); | ||
194 | if root.is_member() { | ||
195 | res.watch.push(res.load.len() - 1); | ||
196 | } | ||
197 | |||
198 | if let Some(out_dir) = root.out_dir() { | ||
199 | let out_dir = out_dir.to_path_buf(); | ||
200 | res.load.push(vfs::loader::Entry::rs_files_recursively(out_dir.clone())); | ||
201 | if root.is_member() { | ||
202 | res.watch.push(res.load.len() - 1); | ||
203 | } | ||
204 | file_set_roots.push(out_dir.into()); | ||
205 | } | ||
206 | file_set_roots.push(path.to_path_buf().into()); | ||
207 | |||
208 | if root.is_member() { | ||
209 | local_filesets.push(fsc.len()); | ||
210 | } | ||
211 | fsc.add_file_set(file_set_roots) | ||
212 | } | ||
213 | |||
214 | let fsc = fsc.build(); | ||
215 | res.source_root_config = SourceRootConfig { fsc, local_filesets }; | ||
216 | |||
217 | res | ||
218 | } | ||
219 | } | ||
220 | |||
221 | #[derive(Default, Debug)] | ||
222 | pub(crate) struct SourceRootConfig { | ||
223 | pub(crate) fsc: FileSetConfig, | ||
224 | pub(crate) local_filesets: Vec<usize>, | ||
225 | } | ||
226 | |||
227 | impl SourceRootConfig { | ||
228 | pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> { | ||
229 | self.fsc | ||
230 | .partition(vfs) | ||
231 | .into_iter() | ||
232 | .enumerate() | ||
233 | .map(|(idx, file_set)| { | ||
234 | let is_local = self.local_filesets.contains(&idx); | ||
235 | if is_local { | ||
236 | SourceRoot::new_local(file_set) | ||
237 | } else { | ||
238 | SourceRoot::new_library(file_set) | ||
239 | } | ||
240 | }) | ||
241 | .collect() | ||
242 | } | ||
243 | } | ||
diff --git a/crates/rust-analyzer/src/thread_pool.rs b/crates/rust-analyzer/src/thread_pool.rs new file mode 100644 index 000000000..4fa502925 --- /dev/null +++ b/crates/rust-analyzer/src/thread_pool.rs | |||
@@ -0,0 +1,35 @@ | |||
1 | //! A thin wrapper around `ThreadPool` to make sure that we join all things | ||
2 | //! properly. | ||
3 | use crossbeam_channel::Sender; | ||
4 | |||
5 | pub(crate) struct TaskPool<T> { | ||
6 | sender: Sender<T>, | ||
7 | inner: threadpool::ThreadPool, | ||
8 | } | ||
9 | |||
10 | impl<T> TaskPool<T> { | ||
11 | pub(crate) fn new(sender: Sender<T>) -> TaskPool<T> { | ||
12 | TaskPool { sender, inner: threadpool::ThreadPool::default() } | ||
13 | } | ||
14 | |||
15 | pub(crate) fn spawn<F>(&mut self, task: F) | ||
16 | where | ||
17 | F: FnOnce() -> T + Send + 'static, | ||
18 | T: Send + 'static, | ||
19 | { | ||
20 | self.inner.execute({ | ||
21 | let sender = self.sender.clone(); | ||
22 | move || sender.send(task()).unwrap() | ||
23 | }) | ||
24 | } | ||
25 | |||
26 | pub(crate) fn len(&self) -> usize { | ||
27 | self.inner.queued_count() | ||
28 | } | ||
29 | } | ||
30 | |||
31 | impl<T> Drop for TaskPool<T> { | ||
32 | fn drop(&mut self) { | ||
33 | self.inner.join() | ||
34 | } | ||
35 | } | ||
diff --git a/crates/vfs-notify/src/lib.rs b/crates/vfs-notify/src/lib.rs index 68fdb8cb0..25ba8d798 100644 --- a/crates/vfs-notify/src/lib.rs +++ b/crates/vfs-notify/src/lib.rs | |||
@@ -82,7 +82,12 @@ impl NotifyActor { | |||
82 | watcher_receiver, | 82 | watcher_receiver, |
83 | } | 83 | } |
84 | } | 84 | } |
85 | 85 | fn next_event(&self, receiver: &Receiver<Message>) -> Option<Event> { | |
86 | select! { | ||
87 | recv(receiver) -> it => it.ok().map(Event::Message), | ||
88 | recv(&self.watcher_receiver) -> it => Some(Event::NotifyEvent(it.unwrap())), | ||
89 | } | ||
90 | } | ||
86 | fn run(mut self, inbox: Receiver<Message>) { | 91 | fn run(mut self, inbox: Receiver<Message>) { |
87 | while let Some(event) = self.next_event(&inbox) { | 92 | while let Some(event) = self.next_event(&inbox) { |
88 | log::debug!("vfs-notify event: {:?}", event); | 93 | log::debug!("vfs-notify event: {:?}", event); |
@@ -154,12 +159,6 @@ impl NotifyActor { | |||
154 | } | 159 | } |
155 | } | 160 | } |
156 | } | 161 | } |
157 | fn next_event(&self, receiver: &Receiver<Message>) -> Option<Event> { | ||
158 | select! { | ||
159 | recv(receiver) -> it => it.ok().map(Event::Message), | ||
160 | recv(&self.watcher_receiver) -> it => Some(Event::NotifyEvent(it.unwrap())), | ||
161 | } | ||
162 | } | ||
163 | fn load_entry( | 162 | fn load_entry( |
164 | &mut self, | 163 | &mut self, |
165 | entry: loader::Entry, | 164 | entry: loader::Entry, |
diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs index 0173f7464..d0ddeafe7 100644 --- a/crates/vfs/src/file_set.rs +++ b/crates/vfs/src/file_set.rs | |||
@@ -18,7 +18,7 @@ impl FileSet { | |||
18 | pub fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { | 18 | pub fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { |
19 | let mut base = self.paths[&anchor].clone(); | 19 | let mut base = self.paths[&anchor].clone(); |
20 | base.pop(); | 20 | base.pop(); |
21 | let path = base.join(path); | 21 | let path = base.join(path)?; |
22 | let res = self.files.get(&path).copied(); | 22 | let res = self.files.get(&path).copied(); |
23 | res | 23 | res |
24 | } | 24 | } |
diff --git a/crates/vfs/src/vfs_path.rs b/crates/vfs/src/vfs_path.rs index 940f91d0e..dc3031ada 100644 --- a/crates/vfs/src/vfs_path.rs +++ b/crates/vfs/src/vfs_path.rs | |||
@@ -22,15 +22,15 @@ impl VfsPath { | |||
22 | VfsPathRepr::VirtualPath(_) => None, | 22 | VfsPathRepr::VirtualPath(_) => None, |
23 | } | 23 | } |
24 | } | 24 | } |
25 | pub fn join(&self, path: &str) -> VfsPath { | 25 | pub fn join(&self, path: &str) -> Option<VfsPath> { |
26 | match &self.0 { | 26 | match &self.0 { |
27 | VfsPathRepr::PathBuf(it) => { | 27 | VfsPathRepr::PathBuf(it) => { |
28 | let res = it.join(path).normalize(); | 28 | let res = it.join(path).normalize(); |
29 | VfsPath(VfsPathRepr::PathBuf(res)) | 29 | Some(VfsPath(VfsPathRepr::PathBuf(res))) |
30 | } | 30 | } |
31 | VfsPathRepr::VirtualPath(it) => { | 31 | VfsPathRepr::VirtualPath(it) => { |
32 | let res = it.join(path); | 32 | let res = it.join(path)?; |
33 | VfsPath(VfsPathRepr::VirtualPath(res)) | 33 | Some(VfsPath(VfsPathRepr::VirtualPath(res))) |
34 | } | 34 | } |
35 | } | 35 | } |
36 | } | 36 | } |
@@ -101,13 +101,15 @@ impl VirtualPath { | |||
101 | self.0 = self.0[..pos].to_string(); | 101 | self.0 = self.0[..pos].to_string(); |
102 | true | 102 | true |
103 | } | 103 | } |
104 | fn join(&self, mut path: &str) -> VirtualPath { | 104 | fn join(&self, mut path: &str) -> Option<VirtualPath> { |
105 | let mut res = self.clone(); | 105 | let mut res = self.clone(); |
106 | while path.starts_with("../") { | 106 | while path.starts_with("../") { |
107 | assert!(res.pop()); | 107 | if !res.pop() { |
108 | return None; | ||
109 | } | ||
108 | path = &path["../".len()..] | 110 | path = &path["../".len()..] |
109 | } | 111 | } |
110 | res.0 = format!("{}/{}", res.0, path); | 112 | res.0 = format!("{}/{}", res.0, path); |
111 | res | 113 | Some(res) |
112 | } | 114 | } |
113 | } | 115 | } |
diff --git a/docs/dev/README.md b/docs/dev/README.md index 76e1da6cf..11dc5261b 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md | |||
@@ -166,6 +166,17 @@ That said, adding an innocent-looking `pub use` is a very simple way to break en | |||
166 | Note: if you enjoyed this abstract hand-waving about boundaries, you might appreciate | 166 | Note: if you enjoyed this abstract hand-waving about boundaries, you might appreciate |
167 | https://www.tedinski.com/2018/02/06/system-boundaries.html | 167 | https://www.tedinski.com/2018/02/06/system-boundaries.html |
168 | 168 | ||
169 | ## Minimal Tests | ||
170 | |||
171 | Most tests in rust-analyzer start with a snippet of Rust code. | ||
172 | This snippets should be minimal -- if you copy-paste a snippet of real code into the tests, make sure to remove everything which could be removed. | ||
173 | There are many benefits to this: | ||
174 | |||
175 | * less to read or to scroll past | ||
176 | * easier to understand what exactly is tested | ||
177 | * less stuff printed during printf-debugging | ||
178 | * less time to run test | ||
179 | |||
169 | ## Order of Imports | 180 | ## Order of Imports |
170 | 181 | ||
171 | We separate import groups with blank lines | 182 | We separate import groups with blank lines |
diff --git a/editors/code/package.json b/editors/code/package.json index 68484a370..f542a490a 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -336,6 +336,14 @@ | |||
336 | "default": null, | 336 | "default": null, |
337 | "description": "List of features to activate. Defaults to `rust-analyzer.cargo.features`." | 337 | "description": "List of features to activate. Defaults to `rust-analyzer.cargo.features`." |
338 | }, | 338 | }, |
339 | "rust-analyzer.cargoRunner": { | ||
340 | "type": [ | ||
341 | "null", | ||
342 | "string" | ||
343 | ], | ||
344 | "default": null, | ||
345 | "description": "Custom cargo runner extension ID." | ||
346 | }, | ||
339 | "rust-analyzer.inlayHints.enable": { | 347 | "rust-analyzer.inlayHints.enable": { |
340 | "type": "boolean", | 348 | "type": "boolean", |
341 | "default": true, | 349 | "default": true, |
diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index 48a25495f..8c9d7802f 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts | |||
@@ -394,7 +394,7 @@ export function run(ctx: Ctx): Cmd { | |||
394 | 394 | ||
395 | item.detail = 'rerun'; | 395 | item.detail = 'rerun'; |
396 | prevRunnable = item; | 396 | prevRunnable = item; |
397 | const task = createTask(item.runnable); | 397 | const task = await createTask(item.runnable, ctx.config); |
398 | return await vscode.tasks.executeTask(task); | 398 | return await vscode.tasks.executeTask(task); |
399 | }; | 399 | }; |
400 | } | 400 | } |
@@ -404,7 +404,7 @@ export function runSingle(ctx: Ctx): Cmd { | |||
404 | const editor = ctx.activeRustEditor; | 404 | const editor = ctx.activeRustEditor; |
405 | if (!editor) return; | 405 | if (!editor) return; |
406 | 406 | ||
407 | const task = createTask(runnable); | 407 | const task = await createTask(runnable, ctx.config); |
408 | task.group = vscode.TaskGroup.Build; | 408 | task.group = vscode.TaskGroup.Build; |
409 | task.presentationOptions = { | 409 | task.presentationOptions = { |
410 | reveal: vscode.TaskRevealKind.Always, | 410 | reveal: vscode.TaskRevealKind.Always, |
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index 9591d4fe3..fc95a7de6 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts | |||
@@ -110,6 +110,10 @@ export class Config { | |||
110 | }; | 110 | }; |
111 | } | 111 | } |
112 | 112 | ||
113 | get cargoRunner() { | ||
114 | return this.get<string | undefined>("cargoRunner"); | ||
115 | } | ||
116 | |||
113 | get debug() { | 117 | get debug() { |
114 | // "/rustc/<id>" used by suggestions only. | 118 | // "/rustc/<id>" used by suggestions only. |
115 | const { ["/rustc/<id>"]: _, ...sourceFileMap } = this.get<Record<string, string>>("debug.sourceFileMap"); | 119 | const { ["/rustc/<id>"]: _, ...sourceFileMap } = this.get<Record<string, string>>("debug.sourceFileMap"); |
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index cdb63b46f..5ceab8b44 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts | |||
@@ -114,7 +114,7 @@ export async function activate(context: vscode.ExtensionContext) { | |||
114 | ctx.registerCommand('applyActionGroup', commands.applyActionGroup); | 114 | ctx.registerCommand('applyActionGroup', commands.applyActionGroup); |
115 | ctx.registerCommand('gotoLocation', commands.gotoLocation); | 115 | ctx.registerCommand('gotoLocation', commands.gotoLocation); |
116 | 116 | ||
117 | ctx.pushCleanup(activateTaskProvider(workspaceFolder)); | 117 | ctx.pushCleanup(activateTaskProvider(workspaceFolder, ctx.config)); |
118 | 118 | ||
119 | activateInlayHints(ctx); | 119 | activateInlayHints(ctx); |
120 | 120 | ||
diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts index bb060cfe1..766b05112 100644 --- a/editors/code/src/run.ts +++ b/editors/code/src/run.ts | |||
@@ -1,10 +1,11 @@ | |||
1 | import * as vscode from 'vscode'; | 1 | import * as vscode from 'vscode'; |
2 | import * as lc from 'vscode-languageclient'; | 2 | import * as lc from 'vscode-languageclient'; |
3 | import * as ra from './lsp_ext'; | 3 | import * as ra from './lsp_ext'; |
4 | import * as toolchain from "./toolchain"; | 4 | import * as tasks from './tasks'; |
5 | 5 | ||
6 | import { Ctx } from './ctx'; | 6 | import { Ctx } from './ctx'; |
7 | import { makeDebugConfig } from './debug'; | 7 | import { makeDebugConfig } from './debug'; |
8 | import { Config } from './config'; | ||
8 | 9 | ||
9 | const quickPickButtons = [{ iconPath: new vscode.ThemeIcon("save"), tooltip: "Save as a launch.json configurtation." }]; | 10 | const quickPickButtons = [{ iconPath: new vscode.ThemeIcon("save"), tooltip: "Save as a launch.json configurtation." }]; |
10 | 11 | ||
@@ -95,52 +96,29 @@ export class RunnableQuickPick implements vscode.QuickPickItem { | |||
95 | } | 96 | } |
96 | } | 97 | } |
97 | 98 | ||
98 | interface CargoTaskDefinition extends vscode.TaskDefinition { | 99 | export async function createTask(runnable: ra.Runnable, config: Config): Promise<vscode.Task> { |
99 | type: 'cargo'; | 100 | if (runnable.kind !== "cargo") { |
100 | label: string; | 101 | // rust-analyzer supports only one kind, "cargo" |
101 | command: string; | 102 | // do not use tasks.TASK_TYPE here, these are completely different meanings. |
102 | args: string[]; | ||
103 | env?: { [key: string]: string }; | ||
104 | } | ||
105 | |||
106 | export function createTask(runnable: ra.Runnable): vscode.Task { | ||
107 | const TASK_SOURCE = 'Rust'; | ||
108 | 103 | ||
109 | let command; | 104 | throw `Unexpected runnable kind: ${runnable.kind}`; |
110 | switch (runnable.kind) { | ||
111 | case "cargo": command = toolchain.getPathForExecutable("cargo"); | ||
112 | } | 105 | } |
106 | |||
113 | const args = [...runnable.args.cargoArgs]; // should be a copy! | 107 | const args = [...runnable.args.cargoArgs]; // should be a copy! |
114 | if (runnable.args.executableArgs.length > 0) { | 108 | if (runnable.args.executableArgs.length > 0) { |
115 | args.push('--', ...runnable.args.executableArgs); | 109 | args.push('--', ...runnable.args.executableArgs); |
116 | } | 110 | } |
117 | const definition: CargoTaskDefinition = { | 111 | const definition: tasks.CargoTaskDefinition = { |
118 | type: 'cargo', | 112 | type: tasks.TASK_TYPE, |
119 | label: runnable.label, | 113 | command: args[0], // run, test, etc... |
120 | command, | 114 | args: args.slice(1), |
121 | args, | 115 | cwd: runnable.args.workspaceRoot, |
122 | env: Object.assign({}, process.env as { [key: string]: string }, { "RUST_BACKTRACE": "short" }), | 116 | env: Object.assign({}, process.env as { [key: string]: string }, { "RUST_BACKTRACE": "short" }), |
123 | }; | 117 | }; |
124 | 118 | ||
125 | const execOption: vscode.ShellExecutionOptions = { | 119 | const target = vscode.workspace.workspaceFolders![0]; // safe, see main activate() |
126 | cwd: runnable.args.workspaceRoot || '.', | 120 | const cargoTask = await tasks.buildCargoTask(target, definition, runnable.label, args, config.cargoRunner, true); |
127 | env: definition.env, | 121 | cargoTask.presentationOptions.clear = true; |
128 | }; | 122 | |
129 | const exec = new vscode.ShellExecution( | 123 | return cargoTask; |
130 | definition.command, | ||
131 | definition.args, | ||
132 | execOption, | ||
133 | ); | ||
134 | |||
135 | const f = vscode.workspace.workspaceFolders![0]; | ||
136 | const t = new vscode.Task( | ||
137 | definition, | ||
138 | f, | ||
139 | definition.label, | ||
140 | TASK_SOURCE, | ||
141 | exec, | ||
142 | ['$rustc'], | ||
143 | ); | ||
144 | t.presentationOptions.clear = true; | ||
145 | return t; | ||
146 | } | 124 | } |
diff --git a/editors/code/src/tasks.ts b/editors/code/src/tasks.ts index 9748824df..14abbd5b7 100644 --- a/editors/code/src/tasks.ts +++ b/editors/code/src/tasks.ts | |||
@@ -1,11 +1,14 @@ | |||
1 | import * as vscode from 'vscode'; | 1 | import * as vscode from 'vscode'; |
2 | import * as toolchain from "./toolchain"; | 2 | import * as toolchain from "./toolchain"; |
3 | import { Config } from './config'; | ||
4 | import { log } from './util'; | ||
3 | 5 | ||
4 | // This ends up as the `type` key in tasks.json. RLS also uses `cargo` and | 6 | // This ends up as the `type` key in tasks.json. RLS also uses `cargo` and |
5 | // our configuration should be compatible with it so use the same key. | 7 | // our configuration should be compatible with it so use the same key. |
6 | const TASK_TYPE = 'cargo'; | 8 | export const TASK_TYPE = 'cargo'; |
9 | export const TASK_SOURCE = 'rust'; | ||
7 | 10 | ||
8 | interface CargoTaskDefinition extends vscode.TaskDefinition { | 11 | export interface CargoTaskDefinition extends vscode.TaskDefinition { |
9 | command?: string; | 12 | command?: string; |
10 | args?: string[]; | 13 | args?: string[]; |
11 | cwd?: string; | 14 | cwd?: string; |
@@ -14,73 +17,101 @@ interface CargoTaskDefinition extends vscode.TaskDefinition { | |||
14 | 17 | ||
15 | class CargoTaskProvider implements vscode.TaskProvider { | 18 | class CargoTaskProvider implements vscode.TaskProvider { |
16 | private readonly target: vscode.WorkspaceFolder; | 19 | private readonly target: vscode.WorkspaceFolder; |
20 | private readonly config: Config; | ||
17 | 21 | ||
18 | constructor(target: vscode.WorkspaceFolder) { | 22 | constructor(target: vscode.WorkspaceFolder, config: Config) { |
19 | this.target = target; | 23 | this.target = target; |
24 | this.config = config; | ||
20 | } | 25 | } |
21 | 26 | ||
22 | provideTasks(): vscode.Task[] { | 27 | async provideTasks(): Promise<vscode.Task[]> { |
23 | // Detect Rust tasks. Currently we do not do any actual detection | 28 | // Detect Rust tasks. Currently we do not do any actual detection |
24 | // of tasks (e.g. aliases in .cargo/config) and just return a fixed | 29 | // of tasks (e.g. aliases in .cargo/config) and just return a fixed |
25 | // set of tasks that always exist. These tasks cannot be removed in | 30 | // set of tasks that always exist. These tasks cannot be removed in |
26 | // tasks.json - only tweaked. | 31 | // tasks.json - only tweaked. |
27 | 32 | ||
28 | const cargoPath = toolchain.cargoPath(); | 33 | const defs = [ |
29 | |||
30 | return [ | ||
31 | { command: 'build', group: vscode.TaskGroup.Build }, | 34 | { command: 'build', group: vscode.TaskGroup.Build }, |
32 | { command: 'check', group: vscode.TaskGroup.Build }, | 35 | { command: 'check', group: vscode.TaskGroup.Build }, |
33 | { command: 'test', group: vscode.TaskGroup.Test }, | 36 | { command: 'test', group: vscode.TaskGroup.Test }, |
34 | { command: 'clean', group: vscode.TaskGroup.Clean }, | 37 | { command: 'clean', group: vscode.TaskGroup.Clean }, |
35 | { command: 'run', group: undefined }, | 38 | { command: 'run', group: undefined }, |
36 | ] | 39 | ]; |
37 | .map(({ command, group }) => { | 40 | |
38 | const vscodeTask = new vscode.Task( | 41 | const tasks: vscode.Task[] = []; |
39 | // The contents of this object end up in the tasks.json entries. | 42 | for (const def of defs) { |
40 | { | 43 | const vscodeTask = await buildCargoTask(this.target, { type: TASK_TYPE, command: def.command }, `cargo ${def.command}`, [def.command], this.config.cargoRunner); |
41 | type: TASK_TYPE, | 44 | vscodeTask.group = def.group; |
42 | command, | 45 | tasks.push(vscodeTask); |
43 | }, | 46 | } |
44 | // The scope of the task - workspace or specific folder (global | 47 | |
45 | // is not supported). | 48 | return tasks; |
46 | this.target, | ||
47 | // The task name, and task source. These are shown in the UI as | ||
48 | // `${source}: ${name}`, e.g. `rust: cargo build`. | ||
49 | `cargo ${command}`, | ||
50 | 'rust', | ||
51 | // What to do when this command is executed. | ||
52 | new vscode.ShellExecution(cargoPath, [command]), | ||
53 | // Problem matchers. | ||
54 | ['$rustc'], | ||
55 | ); | ||
56 | vscodeTask.group = group; | ||
57 | return vscodeTask; | ||
58 | }); | ||
59 | } | 49 | } |
60 | 50 | ||
61 | resolveTask(task: vscode.Task): vscode.Task | undefined { | 51 | async resolveTask(task: vscode.Task): Promise<vscode.Task | undefined> { |
62 | // VSCode calls this for every cargo task in the user's tasks.json, | 52 | // VSCode calls this for every cargo task in the user's tasks.json, |
63 | // we need to inform VSCode how to execute that command by creating | 53 | // we need to inform VSCode how to execute that command by creating |
64 | // a ShellExecution for it. | 54 | // a ShellExecution for it. |
65 | 55 | ||
66 | const definition = task.definition as CargoTaskDefinition; | 56 | const definition = task.definition as CargoTaskDefinition; |
67 | 57 | ||
68 | if (definition.type === 'cargo' && definition.command) { | 58 | if (definition.type === TASK_TYPE && definition.command) { |
69 | const args = [definition.command].concat(definition.args ?? []); | 59 | const args = [definition.command].concat(definition.args ?? []); |
70 | 60 | ||
71 | return new vscode.Task( | 61 | return await buildCargoTask(this.target, definition, task.name, args, this.config.cargoRunner); |
72 | definition, | ||
73 | task.name, | ||
74 | 'rust', | ||
75 | new vscode.ShellExecution('cargo', args, definition), | ||
76 | ); | ||
77 | } | 62 | } |
78 | 63 | ||
79 | return undefined; | 64 | return undefined; |
80 | } | 65 | } |
81 | } | 66 | } |
82 | 67 | ||
83 | export function activateTaskProvider(target: vscode.WorkspaceFolder): vscode.Disposable { | 68 | export async function buildCargoTask( |
84 | const provider = new CargoTaskProvider(target); | 69 | target: vscode.WorkspaceFolder, |
70 | definition: CargoTaskDefinition, | ||
71 | name: string, | ||
72 | args: string[], | ||
73 | customRunner?: string, | ||
74 | throwOnError: boolean = false | ||
75 | ): Promise<vscode.Task> { | ||
76 | |||
77 | let exec: vscode.ShellExecution | undefined = undefined; | ||
78 | |||
79 | if (customRunner) { | ||
80 | const runnerCommand = `${customRunner}.buildShellExecution`; | ||
81 | try { | ||
82 | const runnerArgs = { kind: TASK_TYPE, args, cwd: definition.cwd, env: definition.env }; | ||
83 | const customExec = await vscode.commands.executeCommand(runnerCommand, runnerArgs); | ||
84 | if (customExec) { | ||
85 | if (customExec instanceof vscode.ShellExecution) { | ||
86 | exec = customExec; | ||
87 | } else { | ||
88 | log.debug("Invalid cargo ShellExecution", customExec); | ||
89 | throw "Invalid cargo ShellExecution."; | ||
90 | } | ||
91 | } | ||
92 | // fallback to default processing | ||
93 | |||
94 | } catch (e) { | ||
95 | if (throwOnError) throw `Cargo runner '${customRunner}' failed! ${e}`; | ||
96 | // fallback to default processing | ||
97 | } | ||
98 | } | ||
99 | |||
100 | if (!exec) { | ||
101 | exec = new vscode.ShellExecution(toolchain.cargoPath(), args, definition); | ||
102 | } | ||
103 | |||
104 | return new vscode.Task( | ||
105 | definition, | ||
106 | target, | ||
107 | name, | ||
108 | TASK_SOURCE, | ||
109 | exec, | ||
110 | ['$rustc'] | ||
111 | ); | ||
112 | } | ||
113 | |||
114 | export function activateTaskProvider(target: vscode.WorkspaceFolder, config: Config): vscode.Disposable { | ||
115 | const provider = new CargoTaskProvider(target, config); | ||
85 | return vscode.tasks.registerTaskProvider(TASK_TYPE, provider); | 116 | return vscode.tasks.registerTaskProvider(TASK_TYPE, provider); |
86 | } | 117 | } |