diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_cargo_watch/src/lib.rs | 175 | ||||
-rw-r--r-- | crates/ra_db/src/input.rs | 21 | ||||
-rw-r--r-- | crates/ra_project_model/Cargo.toml | 1 | ||||
-rw-r--r-- | crates/ra_project_model/src/cargo_workspace.rs | 67 | ||||
-rw-r--r-- | crates/ra_project_model/src/lib.rs | 43 | ||||
-rw-r--r-- | crates/rust-analyzer/src/cli/load_cargo.rs | 18 | ||||
-rw-r--r-- | crates/rust-analyzer/src/world.rs | 28 |
7 files changed, 239 insertions, 114 deletions
diff --git a/crates/ra_cargo_watch/src/lib.rs b/crates/ra_cargo_watch/src/lib.rs index 1a6926db3..71aa28f0a 100644 --- a/crates/ra_cargo_watch/src/lib.rs +++ b/crates/ra_cargo_watch/src/lib.rs | |||
@@ -9,8 +9,8 @@ use lsp_types::{ | |||
9 | }; | 9 | }; |
10 | use std::{ | 10 | use std::{ |
11 | io::{BufRead, BufReader}, | 11 | io::{BufRead, BufReader}, |
12 | path::PathBuf, | 12 | path::{Path, PathBuf}, |
13 | process::{Command, Stdio}, | 13 | process::{Child, Command, Stdio}, |
14 | thread::JoinHandle, | 14 | thread::JoinHandle, |
15 | time::Instant, | 15 | time::Instant, |
16 | }; | 16 | }; |
@@ -246,18 +246,71 @@ enum CheckEvent { | |||
246 | End, | 246 | End, |
247 | } | 247 | } |
248 | 248 | ||
249 | pub fn run_cargo( | ||
250 | args: &[String], | ||
251 | current_dir: Option<&Path>, | ||
252 | mut on_message: impl FnMut(cargo_metadata::Message) -> bool, | ||
253 | ) -> Child { | ||
254 | let mut command = Command::new("cargo"); | ||
255 | if let Some(current_dir) = current_dir { | ||
256 | command.current_dir(current_dir); | ||
257 | } | ||
258 | |||
259 | let mut child = command | ||
260 | .args(args) | ||
261 | .stdout(Stdio::piped()) | ||
262 | .stderr(Stdio::null()) | ||
263 | .stdin(Stdio::null()) | ||
264 | .spawn() | ||
265 | .expect("couldn't launch cargo"); | ||
266 | |||
267 | // We manually read a line at a time, instead of using serde's | ||
268 | // stream deserializers, because the deserializer cannot recover | ||
269 | // from an error, resulting in it getting stuck, because we try to | ||
270 | // be resillient against failures. | ||
271 | // | ||
272 | // Because cargo only outputs one JSON object per line, we can | ||
273 | // simply skip a line if it doesn't parse, which just ignores any | ||
274 | // erroneus output. | ||
275 | let stdout = BufReader::new(child.stdout.take().unwrap()); | ||
276 | for line in stdout.lines() { | ||
277 | let line = match line { | ||
278 | Ok(line) => line, | ||
279 | Err(err) => { | ||
280 | log::error!("Couldn't read line from cargo: {}", err); | ||
281 | continue; | ||
282 | } | ||
283 | }; | ||
284 | |||
285 | let message = serde_json::from_str::<cargo_metadata::Message>(&line); | ||
286 | let message = match message { | ||
287 | Ok(message) => message, | ||
288 | Err(err) => { | ||
289 | log::error!("Invalid json from cargo check, ignoring ({}): {:?} ", err, line); | ||
290 | continue; | ||
291 | } | ||
292 | }; | ||
293 | |||
294 | if !on_message(message) { | ||
295 | break; | ||
296 | } | ||
297 | } | ||
298 | |||
299 | child | ||
300 | } | ||
301 | |||
249 | impl WatchThread { | 302 | impl WatchThread { |
250 | fn dummy() -> WatchThread { | 303 | fn dummy() -> WatchThread { |
251 | WatchThread { handle: None, message_recv: never() } | 304 | WatchThread { handle: None, message_recv: never() } |
252 | } | 305 | } |
253 | 306 | ||
254 | fn new(options: &CheckOptions, workspace_root: &PathBuf) -> WatchThread { | 307 | fn new(options: &CheckOptions, workspace_root: &Path) -> WatchThread { |
255 | let mut args: Vec<String> = vec![ | 308 | let mut args: Vec<String> = vec![ |
256 | options.command.clone(), | 309 | options.command.clone(), |
257 | "--workspace".to_string(), | 310 | "--workspace".to_string(), |
258 | "--message-format=json".to_string(), | 311 | "--message-format=json".to_string(), |
259 | "--manifest-path".to_string(), | 312 | "--manifest-path".to_string(), |
260 | format!("{}/Cargo.toml", workspace_root.to_string_lossy()), | 313 | format!("{}/Cargo.toml", workspace_root.display()), |
261 | ]; | 314 | ]; |
262 | if options.all_targets { | 315 | if options.all_targets { |
263 | args.push("--all-targets".to_string()); | 316 | args.push("--all-targets".to_string()); |
@@ -265,83 +318,47 @@ impl WatchThread { | |||
265 | args.extend(options.args.iter().cloned()); | 318 | args.extend(options.args.iter().cloned()); |
266 | 319 | ||
267 | let (message_send, message_recv) = unbounded(); | 320 | let (message_send, message_recv) = unbounded(); |
268 | let enabled = options.enable; | 321 | let workspace_root = workspace_root.to_owned(); |
269 | let handle = std::thread::spawn(move || { | 322 | let handle = if options.enable { |
270 | if !enabled { | 323 | Some(std::thread::spawn(move || { |
271 | return; | 324 | // If we trigger an error here, we will do so in the loop instead, |
272 | } | 325 | // which will break out of the loop, and continue the shutdown |
273 | 326 | let _ = message_send.send(CheckEvent::Begin); | |
274 | let mut command = Command::new("cargo") | 327 | |
275 | .args(&args) | 328 | let mut child = run_cargo(&args, Some(&workspace_root), |message| { |
276 | .stdout(Stdio::piped()) | 329 | // Skip certain kinds of messages to only spend time on what's useful |
277 | .stderr(Stdio::null()) | 330 | match &message { |
278 | .stdin(Stdio::null()) | 331 | Message::CompilerArtifact(artifact) if artifact.fresh => return true, |
279 | .spawn() | 332 | Message::BuildScriptExecuted(_) => return true, |
280 | .expect("couldn't launch cargo"); | 333 | Message::Unknown => return true, |
281 | 334 | _ => {} | |
282 | // If we trigger an error here, we will do so in the loop instead, | ||
283 | // which will break out of the loop, and continue the shutdown | ||
284 | let _ = message_send.send(CheckEvent::Begin); | ||
285 | |||
286 | // We manually read a line at a time, instead of using serde's | ||
287 | // stream deserializers, because the deserializer cannot recover | ||
288 | // from an error, resulting in it getting stuck, because we try to | ||
289 | // be resillient against failures. | ||
290 | // | ||
291 | // Because cargo only outputs one JSON object per line, we can | ||
292 | // simply skip a line if it doesn't parse, which just ignores any | ||
293 | // erroneus output. | ||
294 | let stdout = BufReader::new(command.stdout.take().unwrap()); | ||
295 | for line in stdout.lines() { | ||
296 | let line = match line { | ||
297 | Ok(line) => line, | ||
298 | Err(err) => { | ||
299 | log::error!("Couldn't read line from cargo: {}", err); | ||
300 | continue; | ||
301 | } | ||
302 | }; | ||
303 | |||
304 | let message = serde_json::from_str::<cargo_metadata::Message>(&line); | ||
305 | let message = match message { | ||
306 | Ok(message) => message, | ||
307 | Err(err) => { | ||
308 | log::error!( | ||
309 | "Invalid json from cargo check, ignoring ({}): {:?} ", | ||
310 | err, | ||
311 | line | ||
312 | ); | ||
313 | continue; | ||
314 | } | 335 | } |
315 | }; | ||
316 | |||
317 | // Skip certain kinds of messages to only spend time on what's useful | ||
318 | match &message { | ||
319 | Message::CompilerArtifact(artifact) if artifact.fresh => continue, | ||
320 | Message::BuildScriptExecuted(_) => continue, | ||
321 | Message::Unknown => continue, | ||
322 | _ => {} | ||
323 | } | ||
324 | 336 | ||
325 | match message_send.send(CheckEvent::Msg(message)) { | 337 | match message_send.send(CheckEvent::Msg(message)) { |
326 | Ok(()) => {} | 338 | Ok(()) => {} |
327 | Err(_err) => { | 339 | Err(_err) => { |
328 | // The send channel was closed, so we want to shutdown | 340 | // The send channel was closed, so we want to shutdown |
329 | break; | 341 | return false; |
330 | } | 342 | } |
331 | } | 343 | }; |
332 | } | 344 | |
333 | 345 | true | |
334 | // We can ignore any error here, as we are already in the progress | 346 | }); |
335 | // of shutting down. | 347 | |
336 | let _ = message_send.send(CheckEvent::End); | 348 | // We can ignore any error here, as we are already in the progress |
337 | 349 | // of shutting down. | |
338 | // It is okay to ignore the result, as it only errors if the process is already dead | 350 | let _ = message_send.send(CheckEvent::End); |
339 | let _ = command.kill(); | 351 | |
340 | 352 | // It is okay to ignore the result, as it only errors if the process is already dead | |
341 | // Again, we don't care about the exit status so just ignore the result | 353 | let _ = child.kill(); |
342 | let _ = command.wait(); | 354 | |
343 | }); | 355 | // Again, we don't care about the exit status so just ignore the result |
344 | WatchThread { handle: Some(handle), message_recv } | 356 | let _ = child.wait(); |
357 | })) | ||
358 | } else { | ||
359 | None | ||
360 | }; | ||
361 | WatchThread { handle, message_recv } | ||
345 | } | 362 | } |
346 | } | 363 | } |
347 | 364 | ||
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs index bde843001..e371f849d 100644 --- a/crates/ra_db/src/input.rs +++ b/crates/ra_db/src/input.rs | |||
@@ -6,7 +6,11 @@ | |||
6 | //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how | 6 | //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how |
7 | //! actual IO is done and lowered to input. | 7 | //! actual IO is done and lowered to input. |
8 | 8 | ||
9 | use std::{fmt, ops, str::FromStr}; | 9 | use std::{ |
10 | fmt, ops, | ||
11 | path::{Path, PathBuf}, | ||
12 | str::FromStr, | ||
13 | }; | ||
10 | 14 | ||
11 | use ra_cfg::CfgOptions; | 15 | use ra_cfg::CfgOptions; |
12 | use ra_syntax::SmolStr; | 16 | use ra_syntax::SmolStr; |
@@ -144,7 +148,7 @@ pub struct Env { | |||
144 | // crate. We store a map to allow remap it to ExternSourceId | 148 | // crate. We store a map to allow remap it to ExternSourceId |
145 | #[derive(Default, Debug, Clone, PartialEq, Eq)] | 149 | #[derive(Default, Debug, Clone, PartialEq, Eq)] |
146 | pub struct ExternSource { | 150 | pub struct ExternSource { |
147 | extern_paths: FxHashMap<String, ExternSourceId>, | 151 | extern_paths: FxHashMap<PathBuf, ExternSourceId>, |
148 | } | 152 | } |
149 | 153 | ||
150 | #[derive(Debug, Clone, PartialEq, Eq)] | 154 | #[derive(Debug, Clone, PartialEq, Eq)] |
@@ -294,13 +298,10 @@ impl Env { | |||
294 | } | 298 | } |
295 | 299 | ||
296 | impl ExternSource { | 300 | impl ExternSource { |
297 | pub fn extern_path(&self, path: &str) -> Option<(ExternSourceId, RelativePathBuf)> { | 301 | pub fn extern_path(&self, path: impl AsRef<Path>) -> Option<(ExternSourceId, RelativePathBuf)> { |
302 | let path = path.as_ref(); | ||
298 | self.extern_paths.iter().find_map(|(root_path, id)| { | 303 | self.extern_paths.iter().find_map(|(root_path, id)| { |
299 | if path.starts_with(root_path) { | 304 | if let Ok(rel_path) = path.strip_prefix(root_path) { |
300 | let mut rel_path = &path[root_path.len()..]; | ||
301 | if rel_path.starts_with("/") { | ||
302 | rel_path = &rel_path[1..]; | ||
303 | } | ||
304 | let rel_path = RelativePathBuf::from_path(rel_path).ok()?; | 305 | let rel_path = RelativePathBuf::from_path(rel_path).ok()?; |
305 | Some((id.clone(), rel_path)) | 306 | Some((id.clone(), rel_path)) |
306 | } else { | 307 | } else { |
@@ -309,8 +310,8 @@ impl ExternSource { | |||
309 | }) | 310 | }) |
310 | } | 311 | } |
311 | 312 | ||
312 | pub fn set_extern_path(&mut self, root_path: &str, root: ExternSourceId) { | 313 | pub fn set_extern_path(&mut self, root_path: &Path, root: ExternSourceId) { |
313 | self.extern_paths.insert(root_path.to_owned(), root); | 314 | self.extern_paths.insert(root_path.to_path_buf(), root); |
314 | } | 315 | } |
315 | } | 316 | } |
316 | 317 | ||
diff --git a/crates/ra_project_model/Cargo.toml b/crates/ra_project_model/Cargo.toml index 6252241bf..22300548a 100644 --- a/crates/ra_project_model/Cargo.toml +++ b/crates/ra_project_model/Cargo.toml | |||
@@ -16,6 +16,7 @@ cargo_metadata = "0.9.1" | |||
16 | ra_arena = { path = "../ra_arena" } | 16 | ra_arena = { path = "../ra_arena" } |
17 | ra_db = { path = "../ra_db" } | 17 | ra_db = { path = "../ra_db" } |
18 | ra_cfg = { path = "../ra_cfg" } | 18 | ra_cfg = { path = "../ra_cfg" } |
19 | ra_cargo_watch = { path = "../ra_cargo_watch" } | ||
19 | 20 | ||
20 | serde = { version = "1.0.104", features = ["derive"] } | 21 | serde = { version = "1.0.104", features = ["derive"] } |
21 | serde_json = "1.0.48" | 22 | serde_json = "1.0.48" |
diff --git a/crates/ra_project_model/src/cargo_workspace.rs b/crates/ra_project_model/src/cargo_workspace.rs index 4fea459d5..eeeb10233 100644 --- a/crates/ra_project_model/src/cargo_workspace.rs +++ b/crates/ra_project_model/src/cargo_workspace.rs | |||
@@ -3,8 +3,9 @@ | |||
3 | use std::path::{Path, PathBuf}; | 3 | use std::path::{Path, PathBuf}; |
4 | 4 | ||
5 | use anyhow::{Context, Result}; | 5 | use anyhow::{Context, Result}; |
6 | use cargo_metadata::{CargoOpt, MetadataCommand}; | 6 | use cargo_metadata::{CargoOpt, Message, MetadataCommand, PackageId}; |
7 | use ra_arena::{impl_arena_id, Arena, RawId}; | 7 | use ra_arena::{impl_arena_id, Arena, RawId}; |
8 | use ra_cargo_watch::run_cargo; | ||
8 | use ra_db::Edition; | 9 | use ra_db::Edition; |
9 | use rustc_hash::FxHashMap; | 10 | use rustc_hash::FxHashMap; |
10 | use serde::Deserialize; | 11 | use serde::Deserialize; |
@@ -35,11 +36,19 @@ pub struct CargoFeatures { | |||
35 | /// List of features to activate. | 36 | /// List of features to activate. |
36 | /// This will be ignored if `cargo_all_features` is true. | 37 | /// This will be ignored if `cargo_all_features` is true. |
37 | pub features: Vec<String>, | 38 | pub features: Vec<String>, |
39 | |||
40 | /// Runs cargo check on launch to figure out the correct values of OUT_DIR | ||
41 | pub load_out_dirs_from_check: bool, | ||
38 | } | 42 | } |
39 | 43 | ||
40 | impl Default for CargoFeatures { | 44 | impl Default for CargoFeatures { |
41 | fn default() -> Self { | 45 | fn default() -> Self { |
42 | CargoFeatures { no_default_features: false, all_features: true, features: Vec::new() } | 46 | CargoFeatures { |
47 | no_default_features: false, | ||
48 | all_features: true, | ||
49 | features: Vec::new(), | ||
50 | load_out_dirs_from_check: false, | ||
51 | } | ||
43 | } | 52 | } |
44 | } | 53 | } |
45 | 54 | ||
@@ -60,6 +69,7 @@ struct PackageData { | |||
60 | dependencies: Vec<PackageDependency>, | 69 | dependencies: Vec<PackageDependency>, |
61 | edition: Edition, | 70 | edition: Edition, |
62 | features: Vec<String>, | 71 | features: Vec<String>, |
72 | out_dir: Option<PathBuf>, | ||
63 | } | 73 | } |
64 | 74 | ||
65 | #[derive(Debug, Clone)] | 75 | #[derive(Debug, Clone)] |
@@ -131,6 +141,9 @@ impl Package { | |||
131 | ) -> impl Iterator<Item = &'a PackageDependency> + 'a { | 141 | ) -> impl Iterator<Item = &'a PackageDependency> + 'a { |
132 | ws.packages[self].dependencies.iter() | 142 | ws.packages[self].dependencies.iter() |
133 | } | 143 | } |
144 | pub fn out_dir(self, ws: &CargoWorkspace) -> Option<&Path> { | ||
145 | ws.packages[self].out_dir.as_ref().map(|od| od.as_path()) | ||
146 | } | ||
134 | } | 147 | } |
135 | 148 | ||
136 | impl Target { | 149 | impl Target { |
@@ -173,6 +186,12 @@ impl CargoWorkspace { | |||
173 | let meta = meta.exec().with_context(|| { | 186 | let meta = meta.exec().with_context(|| { |
174 | format!("Failed to run `cargo metadata --manifest-path {}`", cargo_toml.display()) | 187 | format!("Failed to run `cargo metadata --manifest-path {}`", cargo_toml.display()) |
175 | })?; | 188 | })?; |
189 | |||
190 | let mut out_dir_by_id = FxHashMap::default(); | ||
191 | if cargo_features.load_out_dirs_from_check { | ||
192 | out_dir_by_id = load_out_dirs(cargo_toml, cargo_features); | ||
193 | } | ||
194 | |||
176 | let mut pkg_by_id = FxHashMap::default(); | 195 | let mut pkg_by_id = FxHashMap::default(); |
177 | let mut packages = Arena::default(); | 196 | let mut packages = Arena::default(); |
178 | let mut targets = Arena::default(); | 197 | let mut targets = Arena::default(); |
@@ -193,6 +212,7 @@ impl CargoWorkspace { | |||
193 | edition, | 212 | edition, |
194 | dependencies: Vec::new(), | 213 | dependencies: Vec::new(), |
195 | features: Vec::new(), | 214 | features: Vec::new(), |
215 | out_dir: out_dir_by_id.get(&id).cloned(), | ||
196 | }); | 216 | }); |
197 | let pkg_data = &mut packages[pkg]; | 217 | let pkg_data = &mut packages[pkg]; |
198 | pkg_by_id.insert(id, pkg); | 218 | pkg_by_id.insert(id, pkg); |
@@ -252,3 +272,46 @@ impl CargoWorkspace { | |||
252 | &self.workspace_root | 272 | &self.workspace_root |
253 | } | 273 | } |
254 | } | 274 | } |
275 | |||
276 | pub fn load_out_dirs( | ||
277 | cargo_toml: &Path, | ||
278 | cargo_features: &CargoFeatures, | ||
279 | ) -> FxHashMap<PackageId, PathBuf> { | ||
280 | let mut args: Vec<String> = vec![ | ||
281 | "check".to_string(), | ||
282 | "--message-format=json".to_string(), | ||
283 | "--manifest-path".to_string(), | ||
284 | format!("{}", cargo_toml.display()), | ||
285 | ]; | ||
286 | |||
287 | if cargo_features.all_features { | ||
288 | args.push("--all-features".to_string()); | ||
289 | } else if cargo_features.no_default_features { | ||
290 | // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` | ||
291 | // https://github.com/oli-obk/cargo_metadata/issues/79 | ||
292 | args.push("--no-default-features".to_string()); | ||
293 | } else if !cargo_features.features.is_empty() { | ||
294 | for feature in &cargo_features.features { | ||
295 | args.push(feature.clone()); | ||
296 | } | ||
297 | } | ||
298 | |||
299 | let mut res = FxHashMap::default(); | ||
300 | let mut child = run_cargo(&args, cargo_toml.parent(), |message| { | ||
301 | match message { | ||
302 | Message::BuildScriptExecuted(message) => { | ||
303 | let package_id = message.package_id; | ||
304 | let out_dir = message.out_dir; | ||
305 | res.insert(package_id, out_dir); | ||
306 | } | ||
307 | |||
308 | Message::CompilerArtifact(_) => (), | ||
309 | Message::CompilerMessage(_) => (), | ||
310 | Message::Unknown => (), | ||
311 | } | ||
312 | true | ||
313 | }); | ||
314 | |||
315 | let _ = child.wait(); | ||
316 | res | ||
317 | } | ||
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs index 897874813..43f834253 100644 --- a/crates/ra_project_model/src/lib.rs +++ b/crates/ra_project_model/src/lib.rs | |||
@@ -150,6 +150,21 @@ impl ProjectWorkspace { | |||
150 | } | 150 | } |
151 | } | 151 | } |
152 | 152 | ||
153 | pub fn out_dirs(&self) -> Vec<PathBuf> { | ||
154 | match self { | ||
155 | ProjectWorkspace::Json { project: _project } => vec![], | ||
156 | ProjectWorkspace::Cargo { cargo, sysroot: _sysroot } => { | ||
157 | let mut out_dirs = Vec::with_capacity(cargo.packages().len()); | ||
158 | for pkg in cargo.packages() { | ||
159 | if let Some(out_dir) = pkg.out_dir(&cargo) { | ||
160 | out_dirs.push(out_dir.to_path_buf()); | ||
161 | } | ||
162 | } | ||
163 | out_dirs | ||
164 | } | ||
165 | } | ||
166 | } | ||
167 | |||
153 | pub fn n_packages(&self) -> usize { | 168 | pub fn n_packages(&self) -> usize { |
154 | match self { | 169 | match self { |
155 | ProjectWorkspace::Json { project } => project.crates.len(), | 170 | ProjectWorkspace::Json { project } => project.crates.len(), |
@@ -162,7 +177,8 @@ impl ProjectWorkspace { | |||
162 | pub fn to_crate_graph( | 177 | pub fn to_crate_graph( |
163 | &self, | 178 | &self, |
164 | default_cfg_options: &CfgOptions, | 179 | default_cfg_options: &CfgOptions, |
165 | outdirs: &FxHashMap<String, (ExternSourceId, String)>, | 180 | additional_out_dirs: &FxHashMap<String, PathBuf>, |
181 | extern_source_roots: &FxHashMap<PathBuf, ExternSourceId>, | ||
166 | load: &mut dyn FnMut(&Path) -> Option<FileId>, | 182 | load: &mut dyn FnMut(&Path) -> Option<FileId>, |
167 | ) -> CrateGraph { | 183 | ) -> CrateGraph { |
168 | let mut crate_graph = CrateGraph::default(); | 184 | let mut crate_graph = CrateGraph::default(); |
@@ -237,9 +253,11 @@ impl ProjectWorkspace { | |||
237 | 253 | ||
238 | let mut env = Env::default(); | 254 | let mut env = Env::default(); |
239 | let mut extern_source = ExternSource::default(); | 255 | let mut extern_source = ExternSource::default(); |
240 | if let Some((id, path)) = outdirs.get(krate.name(&sysroot)) { | 256 | if let Some(path) = additional_out_dirs.get(krate.name(&sysroot)) { |
241 | env.set("OUT_DIR", path.clone()); | 257 | env.set("OUT_DIR", path.to_string_lossy().to_string()); |
242 | extern_source.set_extern_path(&path, *id); | 258 | if let Some(extern_source_id) = extern_source_roots.get(path) { |
259 | extern_source.set_extern_path(&path, *extern_source_id); | ||
260 | } | ||
243 | } | 261 | } |
244 | 262 | ||
245 | let crate_id = crate_graph.add_crate_root( | 263 | let crate_id = crate_graph.add_crate_root( |
@@ -292,9 +310,20 @@ impl ProjectWorkspace { | |||
292 | }; | 310 | }; |
293 | let mut env = Env::default(); | 311 | let mut env = Env::default(); |
294 | let mut extern_source = ExternSource::default(); | 312 | let mut extern_source = ExternSource::default(); |
295 | if let Some((id, path)) = outdirs.get(pkg.name(&cargo)) { | 313 | if let Some(out_dir) = dbg!(pkg.out_dir(cargo)) { |
296 | env.set("OUT_DIR", path.clone()); | 314 | env.set("OUT_DIR", out_dir.to_string_lossy().to_string()); |
297 | extern_source.set_extern_path(&path, *id); | 315 | if let Some(extern_source_id) = |
316 | dbg!(dbg!(&extern_source_roots).get(out_dir)) | ||
317 | { | ||
318 | extern_source.set_extern_path(&out_dir, *extern_source_id); | ||
319 | } | ||
320 | } else { | ||
321 | if let Some(path) = additional_out_dirs.get(pkg.name(&cargo)) { | ||
322 | env.set("OUT_DIR", path.to_string_lossy().to_string()); | ||
323 | if let Some(extern_source_id) = extern_source_roots.get(path) { | ||
324 | extern_source.set_extern_path(&path, *extern_source_id); | ||
325 | } | ||
326 | } | ||
298 | } | 327 | } |
299 | let crate_id = crate_graph.add_crate_root( | 328 | let crate_id = crate_graph.add_crate_root( |
300 | file_id, | 329 | file_id, |
diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 2ce69c9b3..7d75b991d 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs | |||
@@ -54,12 +54,18 @@ pub(crate) fn load_cargo( | |||
54 | 54 | ||
55 | // FIXME: outdirs? | 55 | // FIXME: outdirs? |
56 | let outdirs = FxHashMap::default(); | 56 | let outdirs = FxHashMap::default(); |
57 | 57 | let extern_source_roots = FxHashMap::default(); | |
58 | let crate_graph = ws.to_crate_graph(&default_cfg_options, &outdirs, &mut |path: &Path| { | 58 | |
59 | let vfs_file = vfs.load(path); | 59 | let crate_graph = ws.to_crate_graph( |
60 | log::debug!("vfs file {:?} -> {:?}", path, vfs_file); | 60 | &default_cfg_options, |
61 | vfs_file.map(vfs_file_to_id) | 61 | &outdirs, |
62 | }); | 62 | &extern_source_roots, |
63 | &mut |path: &Path| { | ||
64 | let vfs_file = vfs.load(path); | ||
65 | log::debug!("vfs file {:?} -> {:?}", path, vfs_file); | ||
66 | vfs_file.map(vfs_file_to_id) | ||
67 | }, | ||
68 | ); | ||
63 | log::debug!("crate graph: {:?}", crate_graph); | 69 | log::debug!("crate graph: {:?}", crate_graph); |
64 | 70 | ||
65 | let source_roots = roots | 71 | let source_roots = roots |
diff --git a/crates/rust-analyzer/src/world.rs b/crates/rust-analyzer/src/world.rs index 5743471bf..63e913047 100644 --- a/crates/rust-analyzer/src/world.rs +++ b/crates/rust-analyzer/src/world.rs | |||
@@ -105,11 +105,15 @@ impl WorldState { | |||
105 | })); | 105 | })); |
106 | } | 106 | } |
107 | 107 | ||
108 | let extern_dirs: FxHashSet<_> = | 108 | let mut extern_dirs: FxHashSet<_> = |
109 | additional_out_dirs.iter().map(|(_, path)| (PathBuf::from(path))).collect(); | 109 | additional_out_dirs.iter().map(|(_, path)| (PathBuf::from(path))).collect(); |
110 | for ws in workspaces.iter() { | ||
111 | extern_dirs.extend(ws.out_dirs()); | ||
112 | } | ||
113 | |||
110 | let mut extern_source_roots = FxHashMap::default(); | 114 | let mut extern_source_roots = FxHashMap::default(); |
111 | 115 | ||
112 | roots.extend(additional_out_dirs.iter().map(|(_, path)| { | 116 | roots.extend(extern_dirs.iter().map(|path| { |
113 | let mut filter = RustPackageFilterBuilder::default().set_member(false); | 117 | let mut filter = RustPackageFilterBuilder::default().set_member(false); |
114 | for glob in exclude_globs.iter() { | 118 | for glob in exclude_globs.iter() { |
115 | filter = filter.exclude(glob.clone()); | 119 | filter = filter.exclude(glob.clone()); |
@@ -148,17 +152,21 @@ impl WorldState { | |||
148 | vfs_file.map(|f| FileId(f.0)) | 152 | vfs_file.map(|f| FileId(f.0)) |
149 | }; | 153 | }; |
150 | 154 | ||
151 | let mut outdirs = FxHashMap::default(); | 155 | let additional_out_dirs: FxHashMap<String, PathBuf> = additional_out_dirs |
152 | for (name, path) in additional_out_dirs { | 156 | .into_iter() |
153 | let path = PathBuf::from(&path); | 157 | .map(|(name, path)| (name, PathBuf::from(&path))) |
154 | if let Some(id) = extern_source_roots.get(&path) { | 158 | .collect(); |
155 | outdirs.insert(name, (id.clone(), path.to_string_lossy().replace("\\", "/"))); | ||
156 | } | ||
157 | } | ||
158 | 159 | ||
159 | workspaces | 160 | workspaces |
160 | .iter() | 161 | .iter() |
161 | .map(|ws| ws.to_crate_graph(&default_cfg_options, &outdirs, &mut load)) | 162 | .map(|ws| { |
163 | ws.to_crate_graph( | ||
164 | &default_cfg_options, | ||
165 | &additional_out_dirs, | ||
166 | &extern_source_roots, | ||
167 | &mut load, | ||
168 | ) | ||
169 | }) | ||
162 | .for_each(|graph| { | 170 | .for_each(|graph| { |
163 | crate_graph.extend(graph); | 171 | crate_graph.extend(graph); |
164 | }); | 172 | }); |