From 9358eecc042d8b551f58d2d5ddb9c88d258880c1 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Thu, 28 Jan 2021 23:33:02 +0800 Subject: Async Loading outdir and proc-macro --- crates/project_model/src/build_data.rs | 288 ++++++++++++++++------------ crates/project_model/src/cargo_workspace.rs | 47 +++-- crates/project_model/src/lib.rs | 1 + crates/project_model/src/workspace.rs | 53 ++++- crates/rust-analyzer/src/cli/load_cargo.rs | 36 ++-- crates/rust-analyzer/src/config.rs | 4 +- crates/rust-analyzer/src/global_state.rs | 12 +- crates/rust-analyzer/src/lsp_ext.rs | 1 + crates/rust-analyzer/src/main_loop.rs | 38 +++- crates/rust-analyzer/src/op_queue.rs | 28 +-- crates/rust-analyzer/src/reload.rs | 88 +++++++-- 11 files changed, 397 insertions(+), 199 deletions(-) diff --git a/crates/project_model/src/build_data.rs b/crates/project_model/src/build_data.rs index 3ff347e2c..a5c564e0a 100644 --- a/crates/project_model/src/build_data.rs +++ b/crates/project_model/src/build_data.rs @@ -5,10 +5,11 @@ use std::{ io::BufReader, path::{Path, PathBuf}, process::{Command, Stdio}, + sync::Arc, }; use anyhow::Result; -use cargo_metadata::{BuildScript, Message, Package, PackageId}; +use cargo_metadata::{BuildScript, Message}; use itertools::Itertools; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; @@ -16,150 +17,195 @@ use stdx::JodChild; use crate::{cfg_flag::CfgFlag, CargoConfig}; -#[derive(Debug, Clone, Default)] -pub(crate) struct BuildDataMap { - data: FxHashMap, -} #[derive(Debug, Clone, Default, PartialEq, Eq)] -pub struct BuildData { +pub(crate) struct BuildData { /// List of config flags defined by this package's build script - pub cfgs: Vec, + pub(crate) cfgs: Vec, /// List of cargo-related environment variables with their value /// /// If the package has a build script which defines environment variables, /// they can also be found here. - pub envs: Vec<(String, String)>, + pub(crate) envs: Vec<(String, String)>, /// Directory where a build script might place its output - pub out_dir: Option, + pub(crate) out_dir: Option, /// Path to the proc-macro library file if this package exposes proc-macros - pub proc_macro_dylib_path: Option, + pub(crate) proc_macro_dylib_path: Option, } -impl BuildDataMap { - pub(crate) fn new( - cargo_toml: &AbsPath, - cargo_features: &CargoConfig, - packages: &Vec, - progress: &dyn Fn(String), - ) -> Result { - let mut cmd = Command::new(toolchain::cargo()); - cmd.args(&["check", "--workspace", "--message-format=json", "--manifest-path"]) - .arg(cargo_toml.as_ref()); - - // --all-targets includes tests, benches and examples in addition to the - // default lib and bins. This is an independent concept from the --targets - // flag below. - cmd.arg("--all-targets"); - - if let Some(target) = &cargo_features.target { - cmd.args(&["--target", target]); - } +#[derive(Clone, Debug)] +pub(crate) struct BuildDataConfig { + cargo_toml: AbsPathBuf, + cargo_features: CargoConfig, + packages: Arc>, +} - if cargo_features.all_features { - cmd.arg("--all-features"); - } else { - if cargo_features.no_default_features { - // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` - // https://github.com/oli-obk/cargo_metadata/issues/79 - cmd.arg("--no-default-features"); - } - if !cargo_features.features.is_empty() { - cmd.arg("--features"); - cmd.arg(cargo_features.features.join(" ")); - } - } +impl PartialEq for BuildDataConfig { + fn eq(&self, other: &Self) -> bool { + Arc::ptr_eq(&self.packages, &other.packages) + } +} - cmd.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null()); - - let mut child = cmd.spawn().map(JodChild)?; - let child_stdout = child.stdout.take().unwrap(); - let stdout = BufReader::new(child_stdout); - - let mut res = BuildDataMap::default(); - for message in cargo_metadata::Message::parse_stream(stdout) { - if let Ok(message) = message { - match message { - Message::BuildScriptExecuted(BuildScript { - package_id, - out_dir, - cfgs, - env, - .. - }) => { - let cfgs = { - let mut acc = Vec::new(); - for cfg in cfgs { - match cfg.parse::() { - Ok(it) => acc.push(it), - Err(err) => { - anyhow::bail!("invalid cfg from cargo-metadata: {}", err) - } - }; - } - acc - }; - let res = res.data.entry(package_id.clone()).or_default(); - // cargo_metadata crate returns default (empty) path for - // older cargos, which is not absolute, so work around that. - if out_dir != PathBuf::default() { - let out_dir = AbsPathBuf::assert(out_dir); - res.out_dir = Some(out_dir); - res.cfgs = cfgs; - } +impl Eq for BuildDataConfig {} - res.envs = env; - } - Message::CompilerArtifact(message) => { - progress(format!("metadata {}", message.target.name)); - - if message.target.kind.contains(&"proc-macro".to_string()) { - let package_id = message.package_id; - // Skip rmeta file - if let Some(filename) = - message.filenames.iter().find(|name| is_dylib(name)) - { - let filename = AbsPathBuf::assert(filename.clone()); - let res = res.data.entry(package_id.clone()).or_default(); - res.proc_macro_dylib_path = Some(filename); - } - } - } - Message::CompilerMessage(message) => { - progress(message.target.name.clone()); - } - Message::Unknown => (), - Message::BuildFinished(_) => {} - Message::TextLine(_) => {} - } - } +#[derive(Debug, Default)] +pub struct BuildDataCollector { + configs: FxHashMap, +} + +#[derive(Debug, Default, PartialEq, Eq)] +pub struct BuildDataResult { + data: FxHashMap, +} + +pub(crate) type BuildDataMap = FxHashMap; + +impl BuildDataCollector { + pub(crate) fn add_config(&mut self, workspace_root: &AbsPath, config: BuildDataConfig) { + self.configs.insert(workspace_root.to_path_buf().clone(), config); + } + + pub fn collect(&mut self, progress: &dyn Fn(String)) -> Result { + let mut res = BuildDataResult::default(); + for (path, config) in self.configs.iter() { + res.data.insert( + path.clone(), + collect_from_workspace( + &config.cargo_toml, + &config.cargo_features, + &config.packages, + progress, + )?, + ); } - res.inject_cargo_env(packages); Ok(res) } +} + +impl BuildDataResult { + pub(crate) fn get(&self, root: &AbsPath) -> Option<&BuildDataMap> { + self.data.get(&root.to_path_buf()) + } +} - pub(crate) fn with_cargo_env(packages: &Vec) -> Self { - let mut res = Self::default(); - res.inject_cargo_env(packages); - res +impl BuildDataConfig { + pub(crate) fn new( + cargo_toml: AbsPathBuf, + cargo_features: CargoConfig, + packages: Arc>, + ) -> Self { + Self { cargo_toml, cargo_features, packages } } +} - pub(crate) fn get(&self, id: &PackageId) -> Option<&BuildData> { - self.data.get(id) +fn collect_from_workspace( + cargo_toml: &AbsPath, + cargo_features: &CargoConfig, + packages: &Vec, + progress: &dyn Fn(String), +) -> Result { + let mut cmd = Command::new(toolchain::cargo()); + cmd.args(&["check", "--workspace", "--message-format=json", "--manifest-path"]) + .arg(cargo_toml.as_ref()); + + // --all-targets includes tests, benches and examples in addition to the + // default lib and bins. This is an independent concept from the --targets + // flag below. + cmd.arg("--all-targets"); + + if let Some(target) = &cargo_features.target { + cmd.args(&["--target", target]); } - fn inject_cargo_env(&mut self, packages: &Vec) { - for meta_pkg in packages { - let resource = self.data.entry(meta_pkg.id.clone()).or_default(); - inject_cargo_env(meta_pkg, &mut resource.envs); + if cargo_features.all_features { + cmd.arg("--all-features"); + } else { + if cargo_features.no_default_features { + // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` + // https://github.com/oli-obk/cargo_metadata/issues/79 + cmd.arg("--no-default-features"); + } + if !cargo_features.features.is_empty() { + cmd.arg("--features"); + cmd.arg(cargo_features.features.join(" ")); + } + } + + cmd.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null()); - if let Some(out_dir) = &resource.out_dir { - // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() - if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) { - resource.envs.push(("OUT_DIR".to_string(), out_dir)); + let mut child = cmd.spawn().map(JodChild)?; + let child_stdout = child.stdout.take().unwrap(); + let stdout = BufReader::new(child_stdout); + + let mut res = BuildDataMap::default(); + for message in cargo_metadata::Message::parse_stream(stdout) { + if let Ok(message) = message { + match message { + Message::BuildScriptExecuted(BuildScript { + package_id, + out_dir, + cfgs, + env, + .. + }) => { + let cfgs = { + let mut acc = Vec::new(); + for cfg in cfgs { + match cfg.parse::() { + Ok(it) => acc.push(it), + Err(err) => { + anyhow::bail!("invalid cfg from cargo-metadata: {}", err) + } + }; + } + acc + }; + let res = res.entry(package_id.repr.clone()).or_default(); + // cargo_metadata crate returns default (empty) path for + // older cargos, which is not absolute, so work around that. + if out_dir != PathBuf::default() { + let out_dir = AbsPathBuf::assert(out_dir); + res.out_dir = Some(out_dir); + res.cfgs = cfgs; + } + + res.envs = env; + } + Message::CompilerArtifact(message) => { + progress(format!("metadata {}", message.target.name)); + + if message.target.kind.contains(&"proc-macro".to_string()) { + let package_id = message.package_id; + // Skip rmeta file + if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name)) + { + let filename = AbsPathBuf::assert(filename.clone()); + let res = res.entry(package_id.repr.clone()).or_default(); + res.proc_macro_dylib_path = Some(filename); + } + } + } + Message::CompilerMessage(message) => { + progress(message.target.name.clone()); } + Message::Unknown => (), + Message::BuildFinished(_) => {} + Message::TextLine(_) => {} } } } + + for package in packages { + let build_data = res.entry(package.id.repr.clone()).or_default(); + inject_cargo_env(package, build_data); + if let Some(out_dir) = &build_data.out_dir { + // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() + if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) { + build_data.envs.push(("OUT_DIR".to_string(), out_dir)); + } + } + } + + Ok(res) } // FIXME: File a better way to know if it is a dylib @@ -173,7 +219,9 @@ fn is_dylib(path: &Path) -> bool { /// Recreates the compile-time environment variables that Cargo sets. /// /// Should be synced with -fn inject_cargo_env(package: &cargo_metadata::Package, env: &mut Vec<(String, String)>) { +fn inject_cargo_env(package: &cargo_metadata::Package, build_data: &mut BuildData) { + let env = &mut build_data.envs; + // FIXME: Missing variables: // CARGO_PKG_HOMEPAGE, CARGO_CRATE_NAME, CARGO_BIN_NAME, CARGO_BIN_EXE_ diff --git a/crates/project_model/src/cargo_workspace.rs b/crates/project_model/src/cargo_workspace.rs index c8a5333c4..f47898b9b 100644 --- a/crates/project_model/src/cargo_workspace.rs +++ b/crates/project_model/src/cargo_workspace.rs @@ -1,6 +1,6 @@ //! FIXME: write short doc here -use std::{convert::TryInto, ops, process::Command}; +use std::{convert::TryInto, ops, process::Command, sync::Arc}; use anyhow::{Context, Result}; use base_db::Edition; @@ -9,7 +9,7 @@ use la_arena::{Arena, Idx}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; -use crate::build_data::{BuildData, BuildDataMap}; +use crate::build_data::BuildDataConfig; use crate::utf8_stdout; /// `CargoWorkspace` represents the logical structure of, well, a Cargo @@ -27,6 +27,7 @@ pub struct CargoWorkspace { packages: Arena, targets: Arena, workspace_root: AbsPathBuf, + build_data_config: BuildDataConfig, } impl ops::Index for CargoWorkspace { @@ -55,9 +56,6 @@ pub struct CargoConfig { /// This will be ignored if `cargo_all_features` is true. pub features: Vec, - /// Runs cargo check on launch to figure out the correct values of OUT_DIR - pub load_out_dirs_from_check: bool, - /// rustc target pub target: Option, @@ -94,8 +92,8 @@ pub struct PackageData { pub features: FxHashMap>, /// List of features enabled on this package pub active_features: Vec, - /// Build script related data for this package - pub build_data: BuildData, + // String representation of package id + pub id: String, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -228,12 +226,6 @@ impl CargoWorkspace { ) })?; - let resources = if config.load_out_dirs_from_check { - BuildDataMap::new(cargo_toml, config, &meta.packages, progress)? - } else { - BuildDataMap::with_cargo_env(&meta.packages) - }; - let mut pkg_by_id = FxHashMap::default(); let mut packages = Arena::default(); let mut targets = Arena::default(); @@ -241,10 +233,7 @@ impl CargoWorkspace { let ws_members = &meta.workspace_members; meta.packages.sort_by(|a, b| a.id.cmp(&b.id)); - for meta_pkg in meta.packages { - let id = meta_pkg.id.clone(); - let build_data = resources.get(&id).cloned().unwrap_or_default(); - + for meta_pkg in &meta.packages { let cargo_metadata::Package { id, edition, name, manifest_path, version, .. } = meta_pkg; let is_member = ws_members.contains(&id); @@ -252,24 +241,24 @@ impl CargoWorkspace { .parse::() .with_context(|| format!("Failed to parse edition {}", edition))?; let pkg = packages.alloc(PackageData { - name, + id: id.repr.clone(), + name: name.clone(), version: version.to_string(), - manifest: AbsPathBuf::assert(manifest_path), + manifest: AbsPathBuf::assert(manifest_path.clone()), targets: Vec::new(), is_member, edition, dependencies: Vec::new(), - features: meta_pkg.features.into_iter().collect(), + features: meta_pkg.features.clone().into_iter().collect(), active_features: Vec::new(), - build_data, }); let pkg_data = &mut packages[pkg]; pkg_by_id.insert(id, pkg); - for meta_tgt in meta_pkg.targets { + for meta_tgt in &meta_pkg.targets { let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"]; let tgt = targets.alloc(TargetData { package: pkg, - name: meta_tgt.name, + name: meta_tgt.name.clone(), root: AbsPathBuf::assert(meta_tgt.src_path.clone()), kind: TargetKind::new(meta_tgt.kind.as_slice()), is_proc_macro, @@ -308,7 +297,13 @@ impl CargoWorkspace { } let workspace_root = AbsPathBuf::assert(meta.workspace_root); - Ok(CargoWorkspace { packages, targets, workspace_root: workspace_root }) + let build_data_config = BuildDataConfig::new( + cargo_toml.to_path_buf(), + config.clone(), + Arc::new(meta.packages.clone()), + ); + + Ok(CargoWorkspace { packages, targets, workspace_root, build_data_config }) } pub fn packages<'a>(&'a self) -> impl Iterator + ExactSizeIterator + 'a { @@ -334,6 +329,10 @@ impl CargoWorkspace { } } + pub(crate) fn build_data_config(&self) -> &BuildDataConfig { + &self.build_data_config + } + fn is_unique(&self, name: &str) -> bool { self.packages.iter().filter(|(_, v)| v.name == name).count() == 1 } diff --git a/crates/project_model/src/lib.rs b/crates/project_model/src/lib.rs index 525c336e6..d712095a6 100644 --- a/crates/project_model/src/lib.rs +++ b/crates/project_model/src/lib.rs @@ -19,6 +19,7 @@ use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashSet; pub use crate::{ + build_data::{BuildDataCollector, BuildDataResult}, cargo_workspace::{ CargoConfig, CargoWorkspace, Package, PackageData, PackageDependency, Target, TargetData, TargetKind, diff --git a/crates/project_model/src/workspace.rs b/crates/project_model/src/workspace.rs index 559f4e7bf..c30861976 100644 --- a/crates/project_model/src/workspace.rs +++ b/crates/project_model/src/workspace.rs @@ -16,8 +16,13 @@ use proc_macro_api::ProcMacroClient; use rustc_hash::{FxHashMap, FxHashSet}; use crate::{ - cargo_workspace, cfg_flag::CfgFlag, rustc_cfg, sysroot::SysrootCrate, utf8_stdout, CargoConfig, - CargoWorkspace, ProjectJson, ProjectManifest, Sysroot, TargetKind, + build_data::{BuildData, BuildDataMap, BuildDataResult}, + cargo_workspace, + cfg_flag::CfgFlag, + rustc_cfg, + sysroot::SysrootCrate, + utf8_stdout, BuildDataCollector, CargoConfig, CargoWorkspace, ProjectJson, ProjectManifest, + Sysroot, TargetKind, }; /// `PackageRoot` describes a package root folder. @@ -153,7 +158,7 @@ impl ProjectWorkspace { /// Returns the roots for the current `ProjectWorkspace` /// The return type contains the path and whether or not /// the root is a member of the current workspace - pub fn to_roots(&self) -> Vec { + pub fn to_roots(&self, build_data: Option<&BuildDataResult>) -> Vec { match self { ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project .crates() @@ -179,7 +184,12 @@ impl ProjectWorkspace { let pkg_root = cargo[pkg].root().to_path_buf(); let mut include = vec![pkg_root.clone()]; - include.extend(cargo[pkg].build_data.out_dir.clone()); + include.extend( + build_data + .and_then(|it| it.get(cargo.workspace_root())) + .and_then(|map| map.get(&cargo[pkg].id)) + .and_then(|it| it.out_dir.clone()), + ); let mut exclude = vec![pkg_root.join(".git")]; if is_member { @@ -219,6 +229,7 @@ impl ProjectWorkspace { pub fn to_crate_graph( &self, + build_data: Option<&BuildDataResult>, proc_macro_client: Option<&ProcMacroClient>, load: &mut dyn FnMut(&AbsPath) -> Option, ) -> CrateGraph { @@ -241,8 +252,10 @@ impl ProjectWorkspace { &proc_macro_loader, load, cargo, + build_data.and_then(|it| it.get(cargo.workspace_root())), sysroot, rustc, + rustc.as_ref().zip(build_data).and_then(|(it, map)| map.get(it.workspace_root())), ), }; if crate_graph.patch_cfg_if() { @@ -252,6 +265,18 @@ impl ProjectWorkspace { } crate_graph } + + pub fn collect_build_data_configs(&self, collector: &mut BuildDataCollector) { + match self { + ProjectWorkspace::Cargo { cargo, rustc, .. } => { + collector.add_config(&cargo.workspace_root(), cargo.build_data_config().clone()); + if let Some(rustc) = rustc { + collector.add_config(rustc.workspace_root(), rustc.build_data_config().clone()); + } + } + _ => {} + } + } } fn project_json_to_crate_graph( @@ -324,8 +349,10 @@ fn cargo_to_crate_graph( proc_macro_loader: &dyn Fn(&Path) -> Vec, load: &mut dyn FnMut(&AbsPath) -> Option, cargo: &CargoWorkspace, + build_data_map: Option<&BuildDataMap>, sysroot: &Sysroot, rustc: &Option, + rustc_build_data_map: Option<&BuildDataMap>, ) -> CrateGraph { let _p = profile::span("cargo_to_crate_graph"); let mut crate_graph = CrateGraph::default(); @@ -351,6 +378,7 @@ fn cargo_to_crate_graph( let crate_id = add_target_crate_root( &mut crate_graph, &cargo[pkg], + build_data_map.and_then(|it| it.get(&cargo[pkg].id)), &cfg_options, proc_macro_loader, file_id, @@ -427,6 +455,7 @@ fn cargo_to_crate_graph( let crate_id = add_target_crate_root( &mut crate_graph, &rustc_workspace[pkg], + rustc_build_data_map.and_then(|it| it.get(&rustc_workspace[pkg].id)), &cfg_options, proc_macro_loader, file_id, @@ -475,6 +504,7 @@ fn cargo_to_crate_graph( fn add_target_crate_root( crate_graph: &mut CrateGraph, pkg: &cargo_workspace::PackageData, + build_data: Option<&BuildData>, cfg_options: &CfgOptions, proc_macro_loader: &dyn Fn(&Path) -> Vec, file_id: FileId, @@ -485,19 +515,22 @@ fn add_target_crate_root( for feature in pkg.active_features.iter() { opts.insert_key_value("feature".into(), feature.into()); } - opts.extend(pkg.build_data.cfgs.iter().cloned()); + if let Some(cfgs) = build_data.as_ref().map(|it| &it.cfgs) { + opts.extend(cfgs.iter().cloned()); + } opts }; let mut env = Env::default(); - for (k, v) in &pkg.build_data.envs { - env.set(k, v.clone()); + if let Some(envs) = build_data.map(|it| &it.envs) { + for (k, v) in envs { + env.set(k, v.clone()); + } } - let proc_macro = pkg - .build_data - .proc_macro_dylib_path + let proc_macro = build_data .as_ref() + .and_then(|it| it.proc_macro_dylib_path.as_ref()) .map(|it| proc_macro_loader(&it)) .unwrap_or_default(); diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index dbab4f5f4..e12e87180 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs @@ -6,7 +6,9 @@ use anyhow::Result; use crossbeam_channel::{unbounded, Receiver}; use ide::{AnalysisHost, Change}; use ide_db::base_db::CrateGraph; -use project_model::{CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace}; +use project_model::{ + BuildDataCollector, CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace, +}; use vfs::{loader::Handle, AbsPath, AbsPathBuf}; use crate::reload::{ProjectFolders, SourceRootConfig}; @@ -18,11 +20,7 @@ pub fn load_cargo( ) -> Result<(AnalysisHost, vfs::Vfs)> { let root = AbsPathBuf::assert(std::env::current_dir()?.join(root)); let root = ProjectManifest::discover_single(&root)?; - let ws = ProjectWorkspace::load( - root, - &CargoConfig { load_out_dirs_from_check, ..Default::default() }, - &|_| {}, - )?; + let ws = ProjectWorkspace::load(root, &CargoConfig::default(), &|_| {})?; let (sender, receiver) = unbounded(); let mut vfs = vfs::Vfs::default(); @@ -39,14 +37,26 @@ pub fn load_cargo( None }; - let crate_graph = ws.to_crate_graph(proc_macro_client.as_ref(), &mut |path: &AbsPath| { - let contents = loader.load_sync(path); - let path = vfs::VfsPath::from(path.to_path_buf()); - vfs.set_file_contents(path.clone(), contents); - vfs.file_id(&path) - }); + let build_data = if load_out_dirs_from_check { + let mut collector = BuildDataCollector::default(); + ws.collect_build_data_configs(&mut collector); + Some(collector.collect(&|_| {})?) + } else { + None + }; + + let crate_graph = ws.to_crate_graph( + build_data.as_ref(), + proc_macro_client.as_ref(), + &mut |path: &AbsPath| { + let contents = loader.load_sync(path); + let path = vfs::VfsPath::from(path.to_path_buf()); + vfs.set_file_contents(path.clone(), contents); + vfs.file_id(&path) + }, + ); - let project_folders = ProjectFolders::new(&[ws], &[]); + let project_folders = ProjectFolders::new(&[ws], &[], build_data.as_ref()); loader.set_config(vfs::loader::Config { load: project_folders.load, watch: vec![] }); log::debug!("crate graph: {:?}", crate_graph); diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 37487b6ac..cc0b22bff 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -469,6 +469,9 @@ impl Config { pub fn cargo_autoreload(&self) -> bool { self.data.cargo_autoreload } + pub fn load_out_dirs_from_check(&self) -> bool { + self.data.cargo_loadOutDirsFromCheck + } pub fn cargo(&self) -> CargoConfig { let rustc_source = self.data.rustcSource.as_ref().map(|it| self.root_path.join(&it)); @@ -476,7 +479,6 @@ impl Config { no_default_features: self.data.cargo_noDefaultFeatures, all_features: self.data.cargo_allFeatures, features: self.data.cargo_features.clone(), - load_out_dirs_from_check: self.data.cargo_loadOutDirsFromCheck, target: self.data.cargo_target.clone(), rustc_source, no_sysroot: self.data.cargo_noSysroot, diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 442fbd14c..6374a9f3c 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -11,7 +11,9 @@ use ide::{Analysis, AnalysisHost, Change, FileId}; use ide_db::base_db::{CrateId, VfsPath}; use lsp_types::{SemanticTokens, Url}; use parking_lot::{Mutex, RwLock}; -use project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; +use project_model::{ + BuildDataCollector, BuildDataResult, CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target, +}; use rustc_hash::FxHashMap; use vfs::AnchoredPathBuf; @@ -33,7 +35,7 @@ use crate::{ #[derive(Eq, PartialEq, Copy, Clone)] pub(crate) enum Status { Loading, - Ready, + Ready { partial: bool }, Invalid, NeedsReload, } @@ -79,7 +81,9 @@ pub(crate) struct GlobalState { pub(crate) source_root_config: SourceRootConfig, pub(crate) proc_macro_client: Option, pub(crate) workspaces: Arc>, - pub(crate) fetch_workspaces_queue: OpQueue, + pub(crate) fetch_workspaces_queue: OpQueue<()>, + pub(crate) workspace_build_data: Option, + pub(crate) fetch_build_data_queue: OpQueue, latest_requests: Arc>, } @@ -133,6 +137,8 @@ impl GlobalState { proc_macro_client: None, workspaces: Arc::new(Vec::new()), fetch_workspaces_queue: OpQueue::default(), + workspace_build_data: None, + fetch_build_data_queue: OpQueue::default(), latest_requests: Default::default(), } } diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index a85978737..670ca9a45 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs @@ -234,6 +234,7 @@ pub enum StatusNotification {} #[derive(Serialize, Deserialize)] pub enum Status { Loading, + ReadyPartial, Ready, NeedsReload, Invalid, diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 6d2475a59..f4fd1ac13 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -21,7 +21,7 @@ use crate::{ global_state::{file_id_to_url, url_to_file_id, GlobalState, Status}, handlers, lsp_ext, lsp_utils::{apply_document_changes, is_canceled, notification_is, Progress}, - reload::ProjectWorkspaceProgress, + reload::{BuildDataProgress, ProjectWorkspaceProgress}, Result, }; @@ -63,6 +63,7 @@ pub(crate) enum Task { Diagnostics(Vec<(FileId, Vec)>), PrimeCaches(PrimeCachesProgress), FetchWorkspace(ProjectWorkspaceProgress), + FetchBuildData(BuildDataProgress), } impl fmt::Debug for Event { @@ -226,12 +227,33 @@ impl GlobalState { } ProjectWorkspaceProgress::End(workspaces) => { self.fetch_workspaces_completed(); - self.switch_workspaces(workspaces); + self.switch_workspaces(workspaces, None); (Progress::End, None) } }; self.report_progress("fetching", state, msg, None); } + Task::FetchBuildData(progress) => { + let (state, msg) = match progress { + BuildDataProgress::Begin => (Some(Progress::Begin), None), + BuildDataProgress::Report(msg) => { + (Some(Progress::Report), Some(msg)) + } + BuildDataProgress::End(collector) => { + self.fetch_build_data_completed(); + let workspaces = (*self.workspaces) + .clone() + .into_iter() + .map(|it| Ok(it)) + .collect(); + self.switch_workspaces(workspaces, Some(collector)); + (Some(Progress::End), None) + } + }; + if let Some(state) = state { + self.report_progress("loading", state, msg, None); + } + } } // Coalesce multiple task events into one loop turn task = match self.task_pool.receiver.try_recv() { @@ -287,7 +309,11 @@ impl GlobalState { Progress::Report } else { assert_eq!(n_done, n_total); - self.transition(Status::Ready); + let status = Status::Ready { + partial: self.config.load_out_dirs_from_check() + && self.workspace_build_data.is_none(), + }; + self.transition(status); Progress::End }; self.report_progress( @@ -372,13 +398,14 @@ impl GlobalState { } let state_changed = self.process_changes(); - if prev_status == Status::Loading && self.status == Status::Ready { + let is_ready = matches!(self.status, Status::Ready { .. } ); + if prev_status == Status::Loading && is_ready { for flycheck in &self.flycheck { flycheck.update(); } } - if self.status == Status::Ready && (state_changed || prev_status == Status::Loading) { + if is_ready && (state_changed || prev_status == Status::Loading) { self.update_file_notifications_on_threadpool(); // Refresh semantic tokens if the client supports it. @@ -408,6 +435,7 @@ impl GlobalState { } self.fetch_workspaces_if_needed(); + self.fetch_build_data_if_needed(); let loop_duration = loop_start.elapsed(); if loop_duration > Duration::from_millis(100) { diff --git a/crates/rust-analyzer/src/op_queue.rs b/crates/rust-analyzer/src/op_queue.rs index 51d66f4b3..761b9ad39 100644 --- a/crates/rust-analyzer/src/op_queue.rs +++ b/crates/rust-analyzer/src/op_queue.rs @@ -1,22 +1,26 @@ //! Bookkeeping to make sure only one long-running operation is executed. -#[derive(Default)] -pub(crate) struct OpQueue { - op_scheduled: bool, +pub(crate) struct OpQueue { + op_scheduled: Option, op_in_progress: bool, } -impl OpQueue { - pub(crate) fn request_op(&mut self) { - self.op_scheduled = true; +impl Default for OpQueue { + fn default() -> Self { + Self { op_scheduled: None, op_in_progress: false } } - pub(crate) fn should_start_op(&mut self) -> bool { - if !self.op_in_progress && self.op_scheduled { - self.op_in_progress = true; - self.op_scheduled = false; - return true; +} + +impl OpQueue { + pub(crate) fn request_op(&mut self, data: D) { + self.op_scheduled = Some(data); + } + pub(crate) fn should_start_op(&mut self) -> Option { + if self.op_in_progress { + return None; } - false + self.op_in_progress = self.op_scheduled.is_some(); + self.op_scheduled.take() } pub(crate) fn op_completed(&mut self) { assert!(self.op_in_progress); diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index ef73099cf..289bbc443 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -4,7 +4,7 @@ use std::{mem, sync::Arc}; use flycheck::{FlycheckConfig, FlycheckHandle}; use ide::Change; use ide_db::base_db::{CrateGraph, SourceRoot, VfsPath}; -use project_model::{ProcMacroClient, ProjectWorkspace}; +use project_model::{BuildDataCollector, BuildDataResult, ProcMacroClient, ProjectWorkspace}; use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind}; use crate::{ @@ -22,6 +22,13 @@ pub(crate) enum ProjectWorkspaceProgress { End(Vec>), } +#[derive(Debug)] +pub(crate) enum BuildDataProgress { + Begin, + Report(String), + End(anyhow::Result), +} + impl GlobalState { pub(crate) fn update_configuration(&mut self, config: Config) { let _p = profile::span("GlobalState::update_configuration"); @@ -41,7 +48,7 @@ impl GlobalState { } match self.status { Status::Loading | Status::NeedsReload => return, - Status::Ready | Status::Invalid => (), + Status::Ready { .. } | Status::Invalid => (), } if self.config.cargo_autoreload() { self.fetch_workspaces_request(); @@ -89,7 +96,8 @@ impl GlobalState { if self.config.status_notification() { let lsp_status = match new_status { Status::Loading => lsp_ext::Status::Loading, - Status::Ready => lsp_ext::Status::Ready, + Status::Ready { partial: true } => lsp_ext::Status::ReadyPartial, + Status::Ready { partial: false } => lsp_ext::Status::Ready, Status::Invalid => lsp_ext::Status::Invalid, Status::NeedsReload => lsp_ext::Status::NeedsReload, }; @@ -99,11 +107,37 @@ impl GlobalState { } } + pub(crate) fn fetch_build_data_request(&mut self, build_data_collector: BuildDataCollector) { + self.fetch_build_data_queue.request_op(build_data_collector); + } + + pub(crate) fn fetch_build_data_if_needed(&mut self) { + let mut build_data_collector = match self.fetch_build_data_queue.should_start_op() { + Some(it) => it, + None => return, + }; + self.task_pool.handle.spawn_with_sender(move |sender| { + sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap(); + + let progress = { + let sender = sender.clone(); + move |msg| { + sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap() + } + }; + let res = build_data_collector.collect(&progress); + sender.send(Task::FetchBuildData(BuildDataProgress::End(res))).unwrap(); + }); + } + pub(crate) fn fetch_build_data_completed(&mut self) { + self.fetch_build_data_queue.op_completed() + } + pub(crate) fn fetch_workspaces_request(&mut self) { - self.fetch_workspaces_queue.request_op() + self.fetch_workspaces_queue.request_op(()) } pub(crate) fn fetch_workspaces_if_needed(&mut self) { - if !self.fetch_workspaces_queue.should_start_op() { + if self.fetch_workspaces_queue.should_start_op().is_none() { return; } log::info!("will fetch workspaces"); @@ -154,7 +188,11 @@ impl GlobalState { self.fetch_workspaces_queue.op_completed() } - pub(crate) fn switch_workspaces(&mut self, workspaces: Vec>) { + pub(crate) fn switch_workspaces( + &mut self, + workspaces: Vec>, + workspace_build_data: Option>, + ) { let _p = profile::span("GlobalState::switch_workspaces"); log::info!("will switch workspaces: {:?}", workspaces); @@ -176,7 +214,20 @@ impl GlobalState { }) .collect::>(); - if &*self.workspaces == &workspaces { + let workspace_build_data = match workspace_build_data { + Some(Ok(it)) => Some(it), + Some(Err(err)) => { + log::error!("failed to fetch build data: {:#}", err); + self.show_message( + lsp_types::MessageType::Error, + format!("rust-analyzer failed to fetch build data: {:#}", err), + ); + return; + } + None => None, + }; + + if &*self.workspaces == &workspaces && self.workspace_build_data == workspace_build_data { return; } @@ -189,7 +240,7 @@ impl GlobalState { let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions { watchers: workspaces .iter() - .flat_map(ProjectWorkspace::to_roots) + .flat_map(|it| it.to_roots(workspace_build_data.as_ref())) .filter(|it| it.is_member) .flat_map(|root| { root.include.into_iter().map(|it| format!("{}/**/*.rs", it.display())) @@ -215,7 +266,8 @@ impl GlobalState { let mut change = Change::new(); let files_config = self.config.files(); - let project_folders = ProjectFolders::new(&workspaces, &files_config.exclude); + let project_folders = + ProjectFolders::new(&workspaces, &files_config.exclude, workspace_build_data.as_ref()); self.proc_macro_client = match self.config.proc_macro_srv() { None => None, @@ -257,15 +309,28 @@ impl GlobalState { res }; for ws in workspaces.iter() { - crate_graph.extend(ws.to_crate_graph(self.proc_macro_client.as_ref(), &mut load)); + crate_graph.extend(ws.to_crate_graph( + self.workspace_build_data.as_ref(), + self.proc_macro_client.as_ref(), + &mut load, + )); } crate_graph }; change.set_crate_graph(crate_graph); + if self.config.load_out_dirs_from_check() && workspace_build_data.is_none() { + let mut collector = BuildDataCollector::default(); + for ws in &workspaces { + ws.collect_build_data_configs(&mut collector); + } + self.fetch_build_data_request(collector) + } + self.source_root_config = project_folders.source_root_config; self.workspaces = Arc::new(workspaces); + self.workspace_build_data = workspace_build_data; self.analysis_host.apply_change(change); self.process_changes(); @@ -323,12 +388,13 @@ impl ProjectFolders { pub(crate) fn new( workspaces: &[ProjectWorkspace], global_excludes: &[AbsPathBuf], + build_data: Option<&BuildDataResult>, ) -> ProjectFolders { let mut res = ProjectFolders::default(); let mut fsc = FileSetConfig::builder(); let mut local_filesets = vec![]; - for root in workspaces.iter().flat_map(|it| it.to_roots()) { + for root in workspaces.iter().flat_map(|it| it.to_roots(build_data)) { let file_set_roots: Vec = root.include.iter().cloned().map(VfsPath::from).collect(); -- cgit v1.2.3