mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-23 07:14:28 +00:00
internal: simplify handling of the build scripts
This commit is contained in:
parent
8da560264e
commit
f4de2ece0d
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -1181,6 +1181,7 @@ dependencies = [
|
|||||||
"proc_macro_api",
|
"proc_macro_api",
|
||||||
"profile",
|
"profile",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
"semver",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"stdx",
|
"stdx",
|
||||||
|
@ -12,6 +12,7 @@ doctest = false
|
|||||||
log = "0.4.8"
|
log = "0.4.8"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
cargo_metadata = "0.14"
|
cargo_metadata = "0.14"
|
||||||
|
semver = "1"
|
||||||
serde = { version = "1.0.106", features = ["derive"] }
|
serde = { version = "1.0.106", features = ["derive"] }
|
||||||
serde_json = "1.0.48"
|
serde_json = "1.0.48"
|
||||||
anyhow = "1.0.26"
|
anyhow = "1.0.26"
|
||||||
|
@ -1,323 +0,0 @@
|
|||||||
//! Handles build script specific information
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
path::PathBuf,
|
|
||||||
process::{Command, Stdio},
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use base_db::CrateName;
|
|
||||||
use cargo_metadata::camino::Utf8Path;
|
|
||||||
use cargo_metadata::{BuildScript, Message};
|
|
||||||
use paths::{AbsPath, AbsPathBuf};
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
use serde::Deserialize;
|
|
||||||
use stdx::format_to;
|
|
||||||
|
|
||||||
use crate::{cfg_flag::CfgFlag, CargoConfig};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
|
||||||
pub(crate) struct PackageBuildData {
|
|
||||||
/// List of config flags defined by this package's build script
|
|
||||||
pub(crate) cfgs: Vec<CfgFlag>,
|
|
||||||
/// List of cargo-related environment variables with their value
|
|
||||||
///
|
|
||||||
/// If the package has a build script which defines environment variables,
|
|
||||||
/// they can also be found here.
|
|
||||||
pub(crate) envs: Vec<(String, String)>,
|
|
||||||
/// Directory where a build script might place its output
|
|
||||||
pub(crate) out_dir: Option<AbsPathBuf>,
|
|
||||||
/// Path to the proc-macro library file if this package exposes proc-macros
|
|
||||||
pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, PartialEq, Eq, Clone)]
|
|
||||||
pub(crate) struct WorkspaceBuildData {
|
|
||||||
per_package: FxHashMap<String, PackageBuildData>,
|
|
||||||
error: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, PartialEq, Eq, Clone)]
|
|
||||||
pub struct BuildDataResult {
|
|
||||||
per_workspace: FxHashMap<AbsPathBuf, WorkspaceBuildData>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub(crate) struct BuildDataConfig {
|
|
||||||
cargo_toml: AbsPathBuf,
|
|
||||||
cargo_features: CargoConfig,
|
|
||||||
packages: Arc<Vec<cargo_metadata::Package>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for BuildDataConfig {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
Arc::ptr_eq(&self.packages, &other.packages)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for BuildDataConfig {}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct BuildDataCollector {
|
|
||||||
wrap_rustc: bool,
|
|
||||||
configs: FxHashMap<AbsPathBuf, BuildDataConfig>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuildDataCollector {
|
|
||||||
pub fn new(wrap_rustc: bool) -> Self {
|
|
||||||
Self { wrap_rustc, configs: FxHashMap::default() }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn add_config(&mut self, workspace_root: &AbsPath, config: BuildDataConfig) {
|
|
||||||
self.configs.insert(workspace_root.to_path_buf(), config);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn collect(&mut self, progress: &dyn Fn(String)) -> Result<BuildDataResult> {
|
|
||||||
let mut res = BuildDataResult::default();
|
|
||||||
for (path, config) in self.configs.iter() {
|
|
||||||
let workspace_build_data = WorkspaceBuildData::collect(
|
|
||||||
&config.cargo_toml,
|
|
||||||
&config.cargo_features,
|
|
||||||
&config.packages,
|
|
||||||
self.wrap_rustc,
|
|
||||||
progress,
|
|
||||||
)?;
|
|
||||||
res.per_workspace.insert(path.clone(), workspace_build_data);
|
|
||||||
}
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WorkspaceBuildData {
|
|
||||||
pub(crate) fn get(&self, package_id: &str) -> Option<&PackageBuildData> {
|
|
||||||
self.per_package.get(package_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuildDataResult {
|
|
||||||
pub(crate) fn get(&self, workspace_root: &AbsPath) -> Option<&WorkspaceBuildData> {
|
|
||||||
self.per_workspace.get(workspace_root)
|
|
||||||
}
|
|
||||||
pub fn error(&self) -> Option<String> {
|
|
||||||
let mut buf = String::new();
|
|
||||||
for (_workspace_root, build_data) in &self.per_workspace {
|
|
||||||
if let Some(err) = &build_data.error {
|
|
||||||
format_to!(buf, "cargo check failed:\n{}", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if buf.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(buf)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuildDataConfig {
|
|
||||||
pub(crate) fn new(
|
|
||||||
cargo_toml: AbsPathBuf,
|
|
||||||
cargo_features: CargoConfig,
|
|
||||||
packages: Arc<Vec<cargo_metadata::Package>>,
|
|
||||||
) -> Self {
|
|
||||||
Self { cargo_toml, cargo_features, packages }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WorkspaceBuildData {
|
|
||||||
fn collect(
|
|
||||||
cargo_toml: &AbsPath,
|
|
||||||
cargo_features: &CargoConfig,
|
|
||||||
packages: &Vec<cargo_metadata::Package>,
|
|
||||||
wrap_rustc: bool,
|
|
||||||
progress: &dyn Fn(String),
|
|
||||||
) -> Result<WorkspaceBuildData> {
|
|
||||||
let mut cmd = Command::new(toolchain::cargo());
|
|
||||||
|
|
||||||
if wrap_rustc {
|
|
||||||
// Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
|
|
||||||
// that to compile only proc macros and build scripts during the initial
|
|
||||||
// `cargo check`.
|
|
||||||
let myself = std::env::current_exe()?;
|
|
||||||
cmd.env("RUSTC_WRAPPER", myself);
|
|
||||||
cmd.env("RA_RUSTC_WRAPPER", "1");
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd.current_dir(cargo_toml.parent().unwrap());
|
|
||||||
cmd.args(&["check", "--quiet", "--workspace", "--message-format=json", "--manifest-path"])
|
|
||||||
.arg(cargo_toml.as_ref());
|
|
||||||
|
|
||||||
// --all-targets includes tests, benches and examples in addition to the
|
|
||||||
// default lib and bins. This is an independent concept from the --targets
|
|
||||||
// flag below.
|
|
||||||
cmd.arg("--all-targets");
|
|
||||||
|
|
||||||
if let Some(target) = &cargo_features.target {
|
|
||||||
cmd.args(&["--target", target]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if cargo_features.all_features {
|
|
||||||
cmd.arg("--all-features");
|
|
||||||
} else {
|
|
||||||
if cargo_features.no_default_features {
|
|
||||||
// FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
|
|
||||||
// https://github.com/oli-obk/cargo_metadata/issues/79
|
|
||||||
cmd.arg("--no-default-features");
|
|
||||||
}
|
|
||||||
if !cargo_features.features.is_empty() {
|
|
||||||
cmd.arg("--features");
|
|
||||||
cmd.arg(cargo_features.features.join(" "));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
|
|
||||||
|
|
||||||
let mut res = WorkspaceBuildData::default();
|
|
||||||
|
|
||||||
let mut callback_err = None;
|
|
||||||
let output = stdx::process::streaming_output(
|
|
||||||
cmd,
|
|
||||||
&mut |line| {
|
|
||||||
if callback_err.is_some() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy-pasted from existing cargo_metadata. It seems like we
|
|
||||||
// should be using sered_stacker here?
|
|
||||||
let mut deserializer = serde_json::Deserializer::from_str(line);
|
|
||||||
deserializer.disable_recursion_limit();
|
|
||||||
let message = Message::deserialize(&mut deserializer)
|
|
||||||
.unwrap_or_else(|_| Message::TextLine(line.to_string()));
|
|
||||||
|
|
||||||
match message {
|
|
||||||
Message::BuildScriptExecuted(BuildScript {
|
|
||||||
package_id,
|
|
||||||
out_dir,
|
|
||||||
cfgs,
|
|
||||||
env,
|
|
||||||
..
|
|
||||||
}) => {
|
|
||||||
let cfgs = {
|
|
||||||
let mut acc = Vec::new();
|
|
||||||
for cfg in cfgs {
|
|
||||||
match cfg.parse::<CfgFlag>() {
|
|
||||||
Ok(it) => acc.push(it),
|
|
||||||
Err(err) => {
|
|
||||||
callback_err = Some(anyhow::format_err!(
|
|
||||||
"invalid cfg from cargo-metadata: {}",
|
|
||||||
err
|
|
||||||
));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
acc
|
|
||||||
};
|
|
||||||
let package_build_data =
|
|
||||||
res.per_package.entry(package_id.repr).or_default();
|
|
||||||
// cargo_metadata crate returns default (empty) path for
|
|
||||||
// older cargos, which is not absolute, so work around that.
|
|
||||||
if !out_dir.as_str().is_empty() {
|
|
||||||
let out_dir =
|
|
||||||
AbsPathBuf::assert(PathBuf::from(out_dir.into_os_string()));
|
|
||||||
package_build_data.out_dir = Some(out_dir);
|
|
||||||
package_build_data.cfgs = cfgs;
|
|
||||||
}
|
|
||||||
|
|
||||||
package_build_data.envs = env;
|
|
||||||
}
|
|
||||||
Message::CompilerArtifact(message) => {
|
|
||||||
progress(format!("metadata {}", message.target.name));
|
|
||||||
|
|
||||||
if message.target.kind.iter().any(|k| k == "proc-macro") {
|
|
||||||
let package_id = message.package_id;
|
|
||||||
// Skip rmeta file
|
|
||||||
if let Some(filename) =
|
|
||||||
message.filenames.iter().find(|name| is_dylib(name))
|
|
||||||
{
|
|
||||||
let filename = AbsPathBuf::assert(PathBuf::from(&filename));
|
|
||||||
let package_build_data =
|
|
||||||
res.per_package.entry(package_id.repr).or_default();
|
|
||||||
package_build_data.proc_macro_dylib_path = Some(filename);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Message::CompilerMessage(message) => {
|
|
||||||
progress(message.target.name);
|
|
||||||
}
|
|
||||||
Message::BuildFinished(_) => {}
|
|
||||||
Message::TextLine(_) => {}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
&mut |_| (),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
for package in packages {
|
|
||||||
let package_build_data = res.per_package.entry(package.id.repr.clone()).or_default();
|
|
||||||
inject_cargo_env(package, package_build_data);
|
|
||||||
if let Some(out_dir) = &package_build_data.out_dir {
|
|
||||||
// NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
|
|
||||||
if let Some(out_dir) = out_dir.as_os_str().to_str().map(|s| s.to_owned()) {
|
|
||||||
package_build_data.envs.push(("OUT_DIR".to_string(), out_dir));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !output.status.success() {
|
|
||||||
let mut stderr = String::from_utf8(output.stderr).unwrap_or_default();
|
|
||||||
if stderr.is_empty() {
|
|
||||||
stderr = "cargo check failed".to_string();
|
|
||||||
}
|
|
||||||
res.error = Some(stderr)
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: File a better way to know if it is a dylib
|
|
||||||
fn is_dylib(path: &Utf8Path) -> bool {
|
|
||||||
match path.extension().map(|e| e.to_string().to_lowercase()) {
|
|
||||||
None => false,
|
|
||||||
Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Recreates the compile-time environment variables that Cargo sets.
|
|
||||||
///
|
|
||||||
/// Should be synced with <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
|
|
||||||
fn inject_cargo_env(package: &cargo_metadata::Package, build_data: &mut PackageBuildData) {
|
|
||||||
let env = &mut build_data.envs;
|
|
||||||
|
|
||||||
// FIXME: Missing variables:
|
|
||||||
// CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
|
|
||||||
|
|
||||||
let mut manifest_dir = package.manifest_path.clone();
|
|
||||||
manifest_dir.pop();
|
|
||||||
env.push(("CARGO_MANIFEST_DIR".into(), manifest_dir.into_string()));
|
|
||||||
|
|
||||||
// Not always right, but works for common cases.
|
|
||||||
env.push(("CARGO".into(), "cargo".into()));
|
|
||||||
|
|
||||||
env.push(("CARGO_PKG_VERSION".into(), package.version.to_string()));
|
|
||||||
env.push(("CARGO_PKG_VERSION_MAJOR".into(), package.version.major.to_string()));
|
|
||||||
env.push(("CARGO_PKG_VERSION_MINOR".into(), package.version.minor.to_string()));
|
|
||||||
env.push(("CARGO_PKG_VERSION_PATCH".into(), package.version.patch.to_string()));
|
|
||||||
env.push(("CARGO_PKG_VERSION_PRE".into(), package.version.pre.to_string()));
|
|
||||||
|
|
||||||
let authors = package.authors.join(";");
|
|
||||||
env.push(("CARGO_PKG_AUTHORS".into(), authors));
|
|
||||||
|
|
||||||
env.push(("CARGO_PKG_NAME".into(), package.name.clone()));
|
|
||||||
// FIXME: This isn't really correct (a package can have many crates with different names), but
|
|
||||||
// it's better than leaving the variable unset.
|
|
||||||
env.push(("CARGO_CRATE_NAME".into(), CrateName::normalize_dashes(&package.name).to_string()));
|
|
||||||
env.push(("CARGO_PKG_DESCRIPTION".into(), package.description.clone().unwrap_or_default()));
|
|
||||||
env.push(("CARGO_PKG_HOMEPAGE".into(), package.homepage.clone().unwrap_or_default()));
|
|
||||||
env.push(("CARGO_PKG_REPOSITORY".into(), package.repository.clone().unwrap_or_default()));
|
|
||||||
env.push(("CARGO_PKG_LICENSE".into(), package.license.clone().unwrap_or_default()));
|
|
||||||
|
|
||||||
let license_file = package.license_file.as_ref().map(|buf| buf.to_string()).unwrap_or_default();
|
|
||||||
env.push(("CARGO_PKG_LICENSE_FILE".into(), license_file));
|
|
||||||
}
|
|
207
crates/project_model/src/build_scripts.rs
Normal file
207
crates/project_model/src/build_scripts.rs
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
//! Workspace information we get from cargo consists of two pieces. The first is
|
||||||
|
//! the output of `cargo metadata`. The second is the output of running
|
||||||
|
//! `build.rs` files (`OUT_DIR` env var, extra cfg flags) and compiling proc
|
||||||
|
//! macro.
|
||||||
|
//!
|
||||||
|
//! This module implements this second part. We use "build script" terminology
|
||||||
|
//! here, but it covers procedural macros as well.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
path::PathBuf,
|
||||||
|
process::{Command, Stdio},
|
||||||
|
};
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use cargo_metadata::{camino::Utf8Path, Message};
|
||||||
|
use la_arena::ArenaMap;
|
||||||
|
use paths::AbsPathBuf;
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
use crate::{cfg_flag::CfgFlag, CargoConfig, CargoWorkspace, Package};
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||||
|
pub struct WorkspaceBuildScripts {
|
||||||
|
pub(crate) outputs: ArenaMap<Package, BuildScriptOutput>,
|
||||||
|
error: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||||
|
pub(crate) struct BuildScriptOutput {
|
||||||
|
/// List of config flags defined by this package's build script.
|
||||||
|
pub(crate) cfgs: Vec<CfgFlag>,
|
||||||
|
/// List of cargo-related environment variables with their value.
|
||||||
|
///
|
||||||
|
/// If the package has a build script which defines environment variables,
|
||||||
|
/// they can also be found here.
|
||||||
|
pub(crate) envs: Vec<(String, String)>,
|
||||||
|
/// Directory where a build script might place its output.
|
||||||
|
pub(crate) out_dir: Option<AbsPathBuf>,
|
||||||
|
/// Path to the proc-macro library file if this package exposes proc-macros.
|
||||||
|
pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WorkspaceBuildScripts {
|
||||||
|
pub fn run(
|
||||||
|
config: &CargoConfig,
|
||||||
|
workspace: &CargoWorkspace,
|
||||||
|
progress: &dyn Fn(String),
|
||||||
|
) -> Result<WorkspaceBuildScripts> {
|
||||||
|
let mut cmd = Command::new(toolchain::cargo());
|
||||||
|
|
||||||
|
if config.wrap_rustc_in_build_scripts {
|
||||||
|
// Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
|
||||||
|
// that to compile only proc macros and build scripts during the initial
|
||||||
|
// `cargo check`.
|
||||||
|
let myself = std::env::current_exe()?;
|
||||||
|
cmd.env("RUSTC_WRAPPER", myself);
|
||||||
|
cmd.env("RA_RUSTC_WRAPPER", "1");
|
||||||
|
}
|
||||||
|
cmd.current_dir(workspace.workspace_root());
|
||||||
|
cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]);
|
||||||
|
|
||||||
|
// --all-targets includes tests, benches and examples in addition to the
|
||||||
|
// default lib and bins. This is an independent concept from the --targets
|
||||||
|
// flag below.
|
||||||
|
cmd.arg("--all-targets");
|
||||||
|
|
||||||
|
if let Some(target) = &config.target {
|
||||||
|
cmd.args(&["--target", target]);
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.all_features {
|
||||||
|
cmd.arg("--all-features");
|
||||||
|
} else {
|
||||||
|
if config.no_default_features {
|
||||||
|
cmd.arg("--no-default-features");
|
||||||
|
}
|
||||||
|
if !config.features.is_empty() {
|
||||||
|
cmd.arg("--features");
|
||||||
|
cmd.arg(config.features.join(" "));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
|
||||||
|
|
||||||
|
let mut res = WorkspaceBuildScripts::default();
|
||||||
|
// NB: Cargo.toml could have been modified between `cargo metadata` and
|
||||||
|
// `cargo check`. We shouldn't assume that package ids we see here are
|
||||||
|
// exactly those from `config`.
|
||||||
|
let mut by_id: FxHashMap<String, Package> = FxHashMap::default();
|
||||||
|
|
||||||
|
for package in workspace.packages() {
|
||||||
|
res.outputs.insert(package, BuildScriptOutput::default());
|
||||||
|
by_id.insert(workspace[package].id.clone(), package);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut callback_err = None;
|
||||||
|
let mut stderr = String::new();
|
||||||
|
let output = stdx::process::streaming_output(
|
||||||
|
cmd,
|
||||||
|
&mut |line| {
|
||||||
|
if callback_err.is_some() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy-pasted from existing cargo_metadata. It seems like we
|
||||||
|
// should be using sered_stacker here?
|
||||||
|
let mut deserializer = serde_json::Deserializer::from_str(line);
|
||||||
|
deserializer.disable_recursion_limit();
|
||||||
|
let message = Message::deserialize(&mut deserializer)
|
||||||
|
.unwrap_or_else(|_| Message::TextLine(line.to_string()));
|
||||||
|
|
||||||
|
match message {
|
||||||
|
Message::BuildScriptExecuted(message) => {
|
||||||
|
let package = match by_id.get(&message.package_id.repr) {
|
||||||
|
Some(it) => *it,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
let cfgs = {
|
||||||
|
let mut acc = Vec::new();
|
||||||
|
for cfg in message.cfgs {
|
||||||
|
match cfg.parse::<CfgFlag>() {
|
||||||
|
Ok(it) => acc.push(it),
|
||||||
|
Err(err) => {
|
||||||
|
callback_err = Some(anyhow::format_err!(
|
||||||
|
"invalid cfg from cargo-metadata: {}",
|
||||||
|
err
|
||||||
|
));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
acc
|
||||||
|
};
|
||||||
|
let package_build_data = &mut res.outputs[package];
|
||||||
|
// cargo_metadata crate returns default (empty) path for
|
||||||
|
// older cargos, which is not absolute, so work around that.
|
||||||
|
if !message.out_dir.as_str().is_empty() {
|
||||||
|
let out_dir =
|
||||||
|
AbsPathBuf::assert(PathBuf::from(message.out_dir.into_os_string()));
|
||||||
|
package_build_data.out_dir = Some(out_dir);
|
||||||
|
package_build_data.cfgs = cfgs;
|
||||||
|
}
|
||||||
|
|
||||||
|
package_build_data.envs = message.env;
|
||||||
|
}
|
||||||
|
Message::CompilerArtifact(message) => {
|
||||||
|
let package = match by_id.get(&message.package_id.repr) {
|
||||||
|
Some(it) => *it,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
|
||||||
|
progress(format!("metadata {}", message.target.name));
|
||||||
|
|
||||||
|
if message.target.kind.iter().any(|k| k == "proc-macro") {
|
||||||
|
// Skip rmeta file
|
||||||
|
if let Some(filename) =
|
||||||
|
message.filenames.iter().find(|name| is_dylib(name))
|
||||||
|
{
|
||||||
|
let filename = AbsPathBuf::assert(PathBuf::from(&filename));
|
||||||
|
res.outputs[package].proc_macro_dylib_path = Some(filename);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Message::CompilerMessage(message) => {
|
||||||
|
progress(message.target.name);
|
||||||
|
}
|
||||||
|
Message::BuildFinished(_) => {}
|
||||||
|
Message::TextLine(_) => {}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
&mut |line| {
|
||||||
|
stderr.push_str(line);
|
||||||
|
stderr.push('\n');
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
|
for package in workspace.packages() {
|
||||||
|
let package_build_data = &mut res.outputs[package];
|
||||||
|
// inject_cargo_env(package, package_build_data);
|
||||||
|
if let Some(out_dir) = &package_build_data.out_dir {
|
||||||
|
// NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
|
||||||
|
if let Some(out_dir) = out_dir.as_os_str().to_str().map(|s| s.to_owned()) {
|
||||||
|
package_build_data.envs.push(("OUT_DIR".to_string(), out_dir));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
if stderr.is_empty() {
|
||||||
|
stderr = "cargo check failed".to_string();
|
||||||
|
}
|
||||||
|
res.error = Some(stderr)
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: File a better way to know if it is a dylib.
|
||||||
|
fn is_dylib(path: &Utf8Path) -> bool {
|
||||||
|
match path.extension().map(|e| e.to_string().to_lowercase()) {
|
||||||
|
None => false,
|
||||||
|
Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"),
|
||||||
|
}
|
||||||
|
}
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
use std::iter;
|
use std::iter;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::{convert::TryInto, ops, process::Command, sync::Arc};
|
use std::{convert::TryInto, ops, process::Command};
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use base_db::Edition;
|
use base_db::Edition;
|
||||||
@ -13,8 +13,8 @@ use rustc_hash::FxHashMap;
|
|||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::from_value;
|
use serde_json::from_value;
|
||||||
|
|
||||||
|
use crate::utf8_stdout;
|
||||||
use crate::CfgOverrides;
|
use crate::CfgOverrides;
|
||||||
use crate::{build_data::BuildDataConfig, utf8_stdout};
|
|
||||||
|
|
||||||
/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
|
/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
|
||||||
/// workspace. It pretty closely mirrors `cargo metadata` output.
|
/// workspace. It pretty closely mirrors `cargo metadata` output.
|
||||||
@ -31,7 +31,6 @@ pub struct CargoWorkspace {
|
|||||||
packages: Arena<PackageData>,
|
packages: Arena<PackageData>,
|
||||||
targets: Arena<TargetData>,
|
targets: Arena<TargetData>,
|
||||||
workspace_root: AbsPathBuf,
|
workspace_root: AbsPathBuf,
|
||||||
build_data_config: BuildDataConfig,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ops::Index<Package> for CargoWorkspace {
|
impl ops::Index<Package> for CargoWorkspace {
|
||||||
@ -81,6 +80,8 @@ pub struct CargoConfig {
|
|||||||
|
|
||||||
/// crates to disable `#[cfg(test)]` on
|
/// crates to disable `#[cfg(test)]` on
|
||||||
pub unset_test_crates: Vec<String>,
|
pub unset_test_crates: Vec<String>,
|
||||||
|
|
||||||
|
pub wrap_rustc_in_build_scripts: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CargoConfig {
|
impl CargoConfig {
|
||||||
@ -103,7 +104,7 @@ pub type Target = Idx<TargetData>;
|
|||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub struct PackageData {
|
pub struct PackageData {
|
||||||
/// Version given in the `Cargo.toml`
|
/// Version given in the `Cargo.toml`
|
||||||
pub version: String,
|
pub version: semver::Version,
|
||||||
/// Name as given in the `Cargo.toml`
|
/// Name as given in the `Cargo.toml`
|
||||||
pub name: String,
|
pub name: String,
|
||||||
/// Path containing the `Cargo.toml`
|
/// Path containing the `Cargo.toml`
|
||||||
@ -288,11 +289,7 @@ impl CargoWorkspace {
|
|||||||
Ok(meta)
|
Ok(meta)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(
|
pub fn new(mut meta: cargo_metadata::Metadata) -> CargoWorkspace {
|
||||||
cargo_toml: &AbsPath,
|
|
||||||
config: &CargoConfig,
|
|
||||||
mut meta: cargo_metadata::Metadata,
|
|
||||||
) -> CargoWorkspace {
|
|
||||||
let mut pkg_by_id = FxHashMap::default();
|
let mut pkg_by_id = FxHashMap::default();
|
||||||
let mut packages = Arena::default();
|
let mut packages = Arena::default();
|
||||||
let mut targets = Arena::default();
|
let mut targets = Arena::default();
|
||||||
@ -314,7 +311,7 @@ impl CargoWorkspace {
|
|||||||
let pkg = packages.alloc(PackageData {
|
let pkg = packages.alloc(PackageData {
|
||||||
id: id.repr.clone(),
|
id: id.repr.clone(),
|
||||||
name: name.clone(),
|
name: name.clone(),
|
||||||
version: version.to_string(),
|
version: version.clone(),
|
||||||
manifest: AbsPathBuf::assert(PathBuf::from(&manifest_path)),
|
manifest: AbsPathBuf::assert(PathBuf::from(&manifest_path)),
|
||||||
targets: Vec::new(),
|
targets: Vec::new(),
|
||||||
is_member,
|
is_member,
|
||||||
@ -374,10 +371,8 @@ impl CargoWorkspace {
|
|||||||
|
|
||||||
let workspace_root =
|
let workspace_root =
|
||||||
AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string()));
|
AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string()));
|
||||||
let build_data_config =
|
|
||||||
BuildDataConfig::new(cargo_toml.to_path_buf(), config.clone(), Arc::new(meta.packages));
|
|
||||||
|
|
||||||
CargoWorkspace { packages, targets, workspace_root, build_data_config }
|
CargoWorkspace { packages, targets, workspace_root }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_cargo_metadata3(
|
pub fn from_cargo_metadata3(
|
||||||
@ -386,7 +381,7 @@ impl CargoWorkspace {
|
|||||||
progress: &dyn Fn(String),
|
progress: &dyn Fn(String),
|
||||||
) -> Result<CargoWorkspace> {
|
) -> Result<CargoWorkspace> {
|
||||||
let meta = CargoWorkspace::fetch_metadata(cargo_toml, config, progress)?;
|
let meta = CargoWorkspace::fetch_metadata(cargo_toml, config, progress)?;
|
||||||
Ok(CargoWorkspace::new(cargo_toml, config, meta))
|
Ok(CargoWorkspace::new(meta))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + ExactSizeIterator + 'a {
|
pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + ExactSizeIterator + 'a {
|
||||||
@ -412,10 +407,6 @@ impl CargoWorkspace {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn build_data_config(&self) -> &BuildDataConfig {
|
|
||||||
&self.build_data_config
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_unique(&self, name: &str) -> bool {
|
fn is_unique(&self, name: &str) -> bool {
|
||||||
self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
|
self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,7 @@ mod project_json;
|
|||||||
mod sysroot;
|
mod sysroot;
|
||||||
mod workspace;
|
mod workspace;
|
||||||
mod rustc_cfg;
|
mod rustc_cfg;
|
||||||
mod build_data;
|
mod build_scripts;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
fs::{self, read_dir, ReadDir},
|
fs::{self, read_dir, ReadDir},
|
||||||
@ -34,7 +34,7 @@ use paths::{AbsPath, AbsPathBuf};
|
|||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
build_data::{BuildDataCollector, BuildDataResult},
|
build_scripts::WorkspaceBuildScripts,
|
||||||
cargo_workspace::{
|
cargo_workspace::{
|
||||||
CargoConfig, CargoWorkspace, Package, PackageData, PackageDependency, RustcSource, Target,
|
CargoConfig, CargoWorkspace, Package, PackageData, PackageDependency, RustcSource, Target,
|
||||||
TargetData, TargetKind,
|
TargetData, TargetKind,
|
||||||
|
@ -6,20 +6,19 @@ use std::{collections::VecDeque, fmt, fs, process::Command};
|
|||||||
|
|
||||||
use anyhow::{format_err, Context, Result};
|
use anyhow::{format_err, Context, Result};
|
||||||
use base_db::{CrateDisplayName, CrateGraph, CrateId, CrateName, Edition, Env, FileId, ProcMacro};
|
use base_db::{CrateDisplayName, CrateGraph, CrateId, CrateName, Edition, Env, FileId, ProcMacro};
|
||||||
use cargo_workspace::DepKind;
|
|
||||||
use cfg::{CfgDiff, CfgOptions};
|
use cfg::{CfgDiff, CfgOptions};
|
||||||
use paths::{AbsPath, AbsPathBuf};
|
use paths::{AbsPath, AbsPathBuf};
|
||||||
use proc_macro_api::ProcMacroClient;
|
use proc_macro_api::ProcMacroClient;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
build_data::{BuildDataResult, PackageBuildData, WorkspaceBuildData},
|
build_scripts::BuildScriptOutput,
|
||||||
cargo_workspace,
|
cargo_workspace::{DepKind, PackageData, RustcSource},
|
||||||
cfg_flag::CfgFlag,
|
cfg_flag::CfgFlag,
|
||||||
rustc_cfg,
|
rustc_cfg,
|
||||||
sysroot::SysrootCrate,
|
sysroot::SysrootCrate,
|
||||||
utf8_stdout, BuildDataCollector, CargoConfig, CargoWorkspace, ProjectJson, ProjectManifest,
|
utf8_stdout, CargoConfig, CargoWorkspace, ProjectJson, ProjectManifest, Sysroot, TargetKind,
|
||||||
Sysroot, TargetKind,
|
WorkspaceBuildScripts,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub type CfgOverrides = FxHashMap<String, CfgDiff>;
|
pub type CfgOverrides = FxHashMap<String, CfgDiff>;
|
||||||
@ -134,7 +133,7 @@ impl ProjectWorkspace {
|
|||||||
cargo_version
|
cargo_version
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
let cargo = CargoWorkspace::new(&cargo_toml, config, meta);
|
let cargo = CargoWorkspace::new(meta);
|
||||||
|
|
||||||
let sysroot = if config.no_sysroot {
|
let sysroot = if config.no_sysroot {
|
||||||
Sysroot::default()
|
Sysroot::default()
|
||||||
@ -148,7 +147,6 @@ impl ProjectWorkspace {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let rustc_dir = if let Some(rustc_source) = &config.rustc_source {
|
let rustc_dir = if let Some(rustc_source) = &config.rustc_source {
|
||||||
use cargo_workspace::RustcSource;
|
|
||||||
match rustc_source {
|
match rustc_source {
|
||||||
RustcSource::Path(path) => Some(path.clone()),
|
RustcSource::Path(path) => Some(path.clone()),
|
||||||
RustcSource::Discover => Sysroot::discover_rustc(&cargo_toml),
|
RustcSource::Discover => Sysroot::discover_rustc(&cargo_toml),
|
||||||
@ -163,7 +161,7 @@ impl ProjectWorkspace {
|
|||||||
.with_context(|| {
|
.with_context(|| {
|
||||||
format!("Failed to read Cargo metadata for Rust sources")
|
format!("Failed to read Cargo metadata for Rust sources")
|
||||||
})?;
|
})?;
|
||||||
CargoWorkspace::new(&rustc_dir, config, meta)
|
CargoWorkspace::new(meta)
|
||||||
}),
|
}),
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
@ -201,7 +199,7 @@ impl ProjectWorkspace {
|
|||||||
/// Returns the roots for the current `ProjectWorkspace`
|
/// Returns the roots for the current `ProjectWorkspace`
|
||||||
/// The return type contains the path and whether or not
|
/// The return type contains the path and whether or not
|
||||||
/// the root is a member of the current workspace
|
/// the root is a member of the current workspace
|
||||||
pub fn to_roots(&self, build_data: Option<&BuildDataResult>) -> Vec<PackageRoot> {
|
pub fn to_roots(&self, build_scripts: &WorkspaceBuildScripts) -> Vec<PackageRoot> {
|
||||||
match self {
|
match self {
|
||||||
ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
|
ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
|
||||||
.crates()
|
.crates()
|
||||||
@ -229,10 +227,7 @@ impl ProjectWorkspace {
|
|||||||
|
|
||||||
let mut include = vec![pkg_root.clone()];
|
let mut include = vec![pkg_root.clone()];
|
||||||
include.extend(
|
include.extend(
|
||||||
build_data
|
build_scripts.outputs.get(pkg).and_then(|it| it.out_dir.clone()),
|
||||||
.and_then(|it| it.get(cargo.workspace_root()))
|
|
||||||
.and_then(|map| map.get(&cargo[pkg].id))
|
|
||||||
.and_then(|it| it.out_dir.clone()),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// In case target's path is manually set in Cargo.toml to be
|
// In case target's path is manually set in Cargo.toml to be
|
||||||
@ -307,7 +302,7 @@ impl ProjectWorkspace {
|
|||||||
|
|
||||||
pub fn to_crate_graph(
|
pub fn to_crate_graph(
|
||||||
&self,
|
&self,
|
||||||
build_data: Option<&BuildDataResult>,
|
build_scripts: &WorkspaceBuildScripts,
|
||||||
proc_macro_client: Option<&ProcMacroClient>,
|
proc_macro_client: Option<&ProcMacroClient>,
|
||||||
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
|
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
|
||||||
) -> CrateGraph {
|
) -> CrateGraph {
|
||||||
@ -332,13 +327,9 @@ impl ProjectWorkspace {
|
|||||||
&proc_macro_loader,
|
&proc_macro_loader,
|
||||||
load,
|
load,
|
||||||
cargo,
|
cargo,
|
||||||
build_data.and_then(|it| it.get(cargo.workspace_root())),
|
build_scripts,
|
||||||
sysroot,
|
sysroot,
|
||||||
rustc,
|
rustc,
|
||||||
rustc
|
|
||||||
.as_ref()
|
|
||||||
.zip(build_data)
|
|
||||||
.and_then(|(it, map)| map.get(it.workspace_root())),
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
|
ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
|
||||||
@ -352,15 +343,6 @@ impl ProjectWorkspace {
|
|||||||
}
|
}
|
||||||
crate_graph
|
crate_graph
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn collect_build_data_configs(&self, collector: &mut BuildDataCollector) {
|
|
||||||
match self {
|
|
||||||
ProjectWorkspace::Cargo { cargo, .. } => {
|
|
||||||
collector.add_config(cargo.workspace_root(), cargo.build_data_config().clone());
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn project_json_to_crate_graph(
|
fn project_json_to_crate_graph(
|
||||||
@ -435,10 +417,9 @@ fn cargo_to_crate_graph(
|
|||||||
proc_macro_loader: &dyn Fn(&AbsPath) -> Vec<ProcMacro>,
|
proc_macro_loader: &dyn Fn(&AbsPath) -> Vec<ProcMacro>,
|
||||||
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
|
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
|
||||||
cargo: &CargoWorkspace,
|
cargo: &CargoWorkspace,
|
||||||
build_data_map: Option<&WorkspaceBuildData>,
|
build_scripts: &WorkspaceBuildScripts,
|
||||||
sysroot: &Sysroot,
|
sysroot: &Sysroot,
|
||||||
rustc: &Option<CargoWorkspace>,
|
rustc: &Option<CargoWorkspace>,
|
||||||
rustc_build_data_map: Option<&WorkspaceBuildData>,
|
|
||||||
) -> CrateGraph {
|
) -> CrateGraph {
|
||||||
let _p = profile::span("cargo_to_crate_graph");
|
let _p = profile::span("cargo_to_crate_graph");
|
||||||
let mut crate_graph = CrateGraph::default();
|
let mut crate_graph = CrateGraph::default();
|
||||||
@ -481,7 +462,7 @@ fn cargo_to_crate_graph(
|
|||||||
let crate_id = add_target_crate_root(
|
let crate_id = add_target_crate_root(
|
||||||
&mut crate_graph,
|
&mut crate_graph,
|
||||||
&cargo[pkg],
|
&cargo[pkg],
|
||||||
build_data_map.and_then(|it| it.get(&cargo[pkg].id)),
|
build_scripts.outputs.get(pkg),
|
||||||
&cfg_options,
|
&cfg_options,
|
||||||
proc_macro_loader,
|
proc_macro_loader,
|
||||||
file_id,
|
file_id,
|
||||||
@ -555,7 +536,6 @@ fn cargo_to_crate_graph(
|
|||||||
rustc_workspace,
|
rustc_workspace,
|
||||||
load,
|
load,
|
||||||
&mut crate_graph,
|
&mut crate_graph,
|
||||||
rustc_build_data_map,
|
|
||||||
&cfg_options,
|
&cfg_options,
|
||||||
proc_macro_loader,
|
proc_macro_loader,
|
||||||
&mut pkg_to_lib_crate,
|
&mut pkg_to_lib_crate,
|
||||||
@ -615,7 +595,6 @@ fn handle_rustc_crates(
|
|||||||
rustc_workspace: &CargoWorkspace,
|
rustc_workspace: &CargoWorkspace,
|
||||||
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
|
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
|
||||||
crate_graph: &mut CrateGraph,
|
crate_graph: &mut CrateGraph,
|
||||||
rustc_build_data_map: Option<&WorkspaceBuildData>,
|
|
||||||
cfg_options: &CfgOptions,
|
cfg_options: &CfgOptions,
|
||||||
proc_macro_loader: &dyn Fn(&AbsPath) -> Vec<ProcMacro>,
|
proc_macro_loader: &dyn Fn(&AbsPath) -> Vec<ProcMacro>,
|
||||||
pkg_to_lib_crate: &mut FxHashMap<la_arena::Idx<crate::PackageData>, CrateId>,
|
pkg_to_lib_crate: &mut FxHashMap<la_arena::Idx<crate::PackageData>, CrateId>,
|
||||||
@ -651,7 +630,7 @@ fn handle_rustc_crates(
|
|||||||
let crate_id = add_target_crate_root(
|
let crate_id = add_target_crate_root(
|
||||||
crate_graph,
|
crate_graph,
|
||||||
&rustc_workspace[pkg],
|
&rustc_workspace[pkg],
|
||||||
rustc_build_data_map.and_then(|it| it.get(&rustc_workspace[pkg].id)),
|
None,
|
||||||
cfg_options,
|
cfg_options,
|
||||||
proc_macro_loader,
|
proc_macro_loader,
|
||||||
file_id,
|
file_id,
|
||||||
@ -706,8 +685,8 @@ fn handle_rustc_crates(
|
|||||||
|
|
||||||
fn add_target_crate_root(
|
fn add_target_crate_root(
|
||||||
crate_graph: &mut CrateGraph,
|
crate_graph: &mut CrateGraph,
|
||||||
pkg: &cargo_workspace::PackageData,
|
pkg: &PackageData,
|
||||||
build_data: Option<&PackageBuildData>,
|
build_data: Option<&BuildScriptOutput>,
|
||||||
cfg_options: &CfgOptions,
|
cfg_options: &CfgOptions,
|
||||||
proc_macro_loader: &dyn Fn(&AbsPath) -> Vec<ProcMacro>,
|
proc_macro_loader: &dyn Fn(&AbsPath) -> Vec<ProcMacro>,
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
@ -726,6 +705,8 @@ fn add_target_crate_root(
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut env = Env::default();
|
let mut env = Env::default();
|
||||||
|
inject_cargo_env(pkg, &mut env);
|
||||||
|
|
||||||
if let Some(envs) = build_data.map(|it| &it.envs) {
|
if let Some(envs) = build_data.map(|it| &it.envs) {
|
||||||
for (k, v) in envs {
|
for (k, v) in envs {
|
||||||
env.set(k, v.clone());
|
env.set(k, v.clone());
|
||||||
@ -812,3 +793,40 @@ fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId)
|
|||||||
log::error!("{}", err)
|
log::error!("{}", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Recreates the compile-time environment variables that Cargo sets.
|
||||||
|
///
|
||||||
|
/// Should be synced with
|
||||||
|
/// <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
|
||||||
|
///
|
||||||
|
/// FIXME: ask Cargo to provide this data instead of re-deriving.
|
||||||
|
fn inject_cargo_env(package: &PackageData, env: &mut Env) {
|
||||||
|
// FIXME: Missing variables:
|
||||||
|
// CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
|
||||||
|
|
||||||
|
let mut manifest_dir = package.manifest.clone();
|
||||||
|
manifest_dir.pop();
|
||||||
|
env.set("CARGO_MANIFEST_DIR".into(), manifest_dir.as_os_str().to_string_lossy().into_owned());
|
||||||
|
|
||||||
|
// Not always right, but works for common cases.
|
||||||
|
env.set("CARGO".into(), "cargo".into());
|
||||||
|
|
||||||
|
env.set("CARGO_PKG_VERSION".into(), package.version.to_string());
|
||||||
|
env.set("CARGO_PKG_VERSION_MAJOR".into(), package.version.major.to_string());
|
||||||
|
env.set("CARGO_PKG_VERSION_MINOR".into(), package.version.minor.to_string());
|
||||||
|
env.set("CARGO_PKG_VERSION_PATCH".into(), package.version.patch.to_string());
|
||||||
|
env.set("CARGO_PKG_VERSION_PRE".into(), package.version.pre.to_string());
|
||||||
|
|
||||||
|
env.set("CARGO_PKG_AUTHORS".into(), String::new());
|
||||||
|
|
||||||
|
env.set("CARGO_PKG_NAME".into(), package.name.clone());
|
||||||
|
// FIXME: This isn't really correct (a package can have many crates with different names), but
|
||||||
|
// it's better than leaving the variable unset.
|
||||||
|
env.set("CARGO_CRATE_NAME".into(), CrateName::normalize_dashes(&package.name).to_string());
|
||||||
|
env.set("CARGO_PKG_DESCRIPTION".into(), String::new());
|
||||||
|
env.set("CARGO_PKG_HOMEPAGE".into(), String::new());
|
||||||
|
env.set("CARGO_PKG_REPOSITORY".into(), String::new());
|
||||||
|
env.set("CARGO_PKG_LICENSE".into(), String::new());
|
||||||
|
|
||||||
|
env.set("CARGO_PKG_LICENSE_FILE".into(), String::new());
|
||||||
|
}
|
||||||
|
@ -68,7 +68,6 @@ impl AnalysisStatsCmd {
|
|||||||
cargo_config.no_sysroot = self.no_sysroot;
|
cargo_config.no_sysroot = self.no_sysroot;
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
load_out_dirs_from_check: self.enable_build_scripts,
|
load_out_dirs_from_check: self.enable_build_scripts,
|
||||||
wrap_rustc: false,
|
|
||||||
with_proc_macro: self.enable_proc_macros,
|
with_proc_macro: self.enable_proc_macros,
|
||||||
prefill_caches: false,
|
prefill_caches: false,
|
||||||
};
|
};
|
||||||
|
@ -34,12 +34,8 @@ pub fn diagnostics(
|
|||||||
with_proc_macro: bool,
|
with_proc_macro: bool,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let cargo_config = Default::default();
|
let cargo_config = Default::default();
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config =
|
||||||
load_out_dirs_from_check,
|
LoadCargoConfig { load_out_dirs_from_check, with_proc_macro, prefill_caches: false };
|
||||||
with_proc_macro,
|
|
||||||
wrap_rustc: false,
|
|
||||||
prefill_caches: false,
|
|
||||||
};
|
|
||||||
let (host, _vfs, _proc_macro) =
|
let (host, _vfs, _proc_macro) =
|
||||||
load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {})?;
|
load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {})?;
|
||||||
let db = host.raw_database();
|
let db = host.raw_database();
|
||||||
|
@ -8,7 +8,7 @@ use hir::db::DefDatabase;
|
|||||||
use ide::{AnalysisHost, Change};
|
use ide::{AnalysisHost, Change};
|
||||||
use ide_db::base_db::CrateGraph;
|
use ide_db::base_db::CrateGraph;
|
||||||
use project_model::{
|
use project_model::{
|
||||||
BuildDataCollector, CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace,
|
CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace, WorkspaceBuildScripts,
|
||||||
};
|
};
|
||||||
use vfs::{loader::Handle, AbsPath, AbsPathBuf};
|
use vfs::{loader::Handle, AbsPath, AbsPathBuf};
|
||||||
|
|
||||||
@ -16,7 +16,6 @@ use crate::reload::{ProjectFolders, SourceRootConfig};
|
|||||||
|
|
||||||
pub(crate) struct LoadCargoConfig {
|
pub(crate) struct LoadCargoConfig {
|
||||||
pub(crate) load_out_dirs_from_check: bool,
|
pub(crate) load_out_dirs_from_check: bool,
|
||||||
pub(crate) wrap_rustc: bool,
|
|
||||||
pub(crate) with_proc_macro: bool,
|
pub(crate) with_proc_macro: bool,
|
||||||
pub(crate) prefill_caches: bool,
|
pub(crate) prefill_caches: bool,
|
||||||
}
|
}
|
||||||
@ -33,12 +32,13 @@ pub(crate) fn load_workspace_at(
|
|||||||
eprintln!("root = {:?}", root);
|
eprintln!("root = {:?}", root);
|
||||||
let workspace = ProjectWorkspace::load(root, cargo_config, progress)?;
|
let workspace = ProjectWorkspace::load(root, cargo_config, progress)?;
|
||||||
|
|
||||||
load_workspace(workspace, load_config, progress)
|
load_workspace(workspace, cargo_config, load_config, progress)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_workspace(
|
fn load_workspace(
|
||||||
ws: ProjectWorkspace,
|
ws: ProjectWorkspace,
|
||||||
config: &LoadCargoConfig,
|
cargo_config: &CargoConfig,
|
||||||
|
load_config: &LoadCargoConfig,
|
||||||
progress: &dyn Fn(String),
|
progress: &dyn Fn(String),
|
||||||
) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroClient>)> {
|
) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroClient>)> {
|
||||||
let (sender, receiver) = unbounded();
|
let (sender, receiver) = unbounded();
|
||||||
@ -49,33 +49,29 @@ fn load_workspace(
|
|||||||
Box::new(loader)
|
Box::new(loader)
|
||||||
};
|
};
|
||||||
|
|
||||||
let proc_macro_client = if config.with_proc_macro {
|
let proc_macro_client = if load_config.with_proc_macro {
|
||||||
let path = AbsPathBuf::assert(std::env::current_exe()?);
|
let path = AbsPathBuf::assert(std::env::current_exe()?);
|
||||||
Some(ProcMacroClient::extern_process(path, &["proc-macro"]).unwrap())
|
Some(ProcMacroClient::extern_process(path, &["proc-macro"]).unwrap())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let build_data = if config.load_out_dirs_from_check {
|
let build_scripts = match &ws {
|
||||||
let mut collector = BuildDataCollector::new(config.wrap_rustc);
|
ProjectWorkspace::Cargo { cargo, .. } if load_config.load_out_dirs_from_check => {
|
||||||
ws.collect_build_data_configs(&mut collector);
|
WorkspaceBuildScripts::run(cargo_config, cargo, progress)?
|
||||||
Some(collector.collect(progress)?)
|
}
|
||||||
} else {
|
_ => WorkspaceBuildScripts::default(),
|
||||||
None
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let crate_graph = ws.to_crate_graph(
|
let crate_graph =
|
||||||
build_data.as_ref(),
|
ws.to_crate_graph(&build_scripts, proc_macro_client.as_ref(), &mut |path: &AbsPath| {
|
||||||
proc_macro_client.as_ref(),
|
|
||||||
&mut |path: &AbsPath| {
|
|
||||||
let contents = loader.load_sync(path);
|
let contents = loader.load_sync(path);
|
||||||
let path = vfs::VfsPath::from(path.to_path_buf());
|
let path = vfs::VfsPath::from(path.to_path_buf());
|
||||||
vfs.set_file_contents(path.clone(), contents);
|
vfs.set_file_contents(path.clone(), contents);
|
||||||
vfs.file_id(&path)
|
vfs.file_id(&path)
|
||||||
},
|
});
|
||||||
);
|
|
||||||
|
|
||||||
let project_folders = ProjectFolders::new(&[ws], &[], build_data.as_ref());
|
let project_folders = ProjectFolders::new(&[ws], &[build_scripts], &[]);
|
||||||
loader.set_config(vfs::loader::Config {
|
loader.set_config(vfs::loader::Config {
|
||||||
load: project_folders.load,
|
load: project_folders.load,
|
||||||
watch: vec![],
|
watch: vec![],
|
||||||
@ -86,7 +82,7 @@ fn load_workspace(
|
|||||||
let host =
|
let host =
|
||||||
load_crate_graph(crate_graph, project_folders.source_root_config, &mut vfs, &receiver);
|
load_crate_graph(crate_graph, project_folders.source_root_config, &mut vfs, &receiver);
|
||||||
|
|
||||||
if config.prefill_caches {
|
if load_config.prefill_caches {
|
||||||
host.analysis().prime_caches(|_| {})?;
|
host.analysis().prime_caches(|_| {})?;
|
||||||
}
|
}
|
||||||
Ok((host, vfs, proc_macro_client))
|
Ok((host, vfs, proc_macro_client))
|
||||||
@ -146,10 +142,9 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_loading_rust_analyzer() {
|
fn test_loading_rust_analyzer() {
|
||||||
let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
|
let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
|
||||||
let cargo_config = Default::default();
|
let cargo_config = CargoConfig::default();
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
load_out_dirs_from_check: false,
|
load_out_dirs_from_check: false,
|
||||||
wrap_rustc: false,
|
|
||||||
with_proc_macro: false,
|
with_proc_macro: false,
|
||||||
prefill_caches: false,
|
prefill_caches: false,
|
||||||
};
|
};
|
||||||
|
@ -5,13 +5,13 @@ use crate::cli::{
|
|||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
use ide_ssr::{MatchFinder, SsrPattern, SsrRule};
|
use ide_ssr::{MatchFinder, SsrPattern, SsrRule};
|
||||||
|
use project_model::CargoConfig;
|
||||||
|
|
||||||
pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> {
|
pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> {
|
||||||
use ide_db::base_db::SourceDatabaseExt;
|
use ide_db::base_db::SourceDatabaseExt;
|
||||||
let cargo_config = Default::default();
|
let cargo_config = CargoConfig::default();
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
load_out_dirs_from_check: true,
|
load_out_dirs_from_check: true,
|
||||||
wrap_rustc: false,
|
|
||||||
with_proc_macro: true,
|
with_proc_macro: true,
|
||||||
prefill_caches: false,
|
prefill_caches: false,
|
||||||
};
|
};
|
||||||
@ -39,10 +39,9 @@ pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> {
|
|||||||
pub fn search_for_patterns(patterns: Vec<SsrPattern>, debug_snippet: Option<String>) -> Result<()> {
|
pub fn search_for_patterns(patterns: Vec<SsrPattern>, debug_snippet: Option<String>) -> Result<()> {
|
||||||
use ide_db::base_db::SourceDatabaseExt;
|
use ide_db::base_db::SourceDatabaseExt;
|
||||||
use ide_db::symbol_index::SymbolsDatabase;
|
use ide_db::symbol_index::SymbolsDatabase;
|
||||||
let cargo_config = Default::default();
|
let cargo_config = CargoConfig::default();
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
load_out_dirs_from_check: true,
|
load_out_dirs_from_check: true,
|
||||||
wrap_rustc: true,
|
|
||||||
with_proc_macro: true,
|
with_proc_macro: true,
|
||||||
prefill_caches: false,
|
prefill_caches: false,
|
||||||
};
|
};
|
||||||
|
@ -628,9 +628,6 @@ impl Config {
|
|||||||
pub fn run_build_scripts(&self) -> bool {
|
pub fn run_build_scripts(&self) -> bool {
|
||||||
self.data.cargo_runBuildScripts || self.data.procMacro_enable
|
self.data.cargo_runBuildScripts || self.data.procMacro_enable
|
||||||
}
|
}
|
||||||
pub fn wrap_rustc(&self) -> bool {
|
|
||||||
self.data.cargo_useRustcWrapperForBuildScripts
|
|
||||||
}
|
|
||||||
pub fn cargo(&self) -> CargoConfig {
|
pub fn cargo(&self) -> CargoConfig {
|
||||||
let rustc_source = self.data.rustcSource.as_ref().map(|rustc_src| {
|
let rustc_source = self.data.rustcSource.as_ref().map(|rustc_src| {
|
||||||
if rustc_src == "discover" {
|
if rustc_src == "discover" {
|
||||||
@ -648,6 +645,7 @@ impl Config {
|
|||||||
rustc_source,
|
rustc_source,
|
||||||
no_sysroot: self.data.cargo_noSysroot,
|
no_sysroot: self.data.cargo_noSysroot,
|
||||||
unset_test_crates: self.data.cargo_unsetTest.clone(),
|
unset_test_crates: self.data.cargo_unsetTest.clone(),
|
||||||
|
wrap_rustc_in_build_scripts: self.data.cargo_useRustcWrapperForBuildScripts,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ use ide_db::base_db::{CrateId, VfsPath};
|
|||||||
use lsp_types::{SemanticTokens, Url};
|
use lsp_types::{SemanticTokens, Url};
|
||||||
use parking_lot::{Mutex, RwLock};
|
use parking_lot::{Mutex, RwLock};
|
||||||
use project_model::{
|
use project_model::{
|
||||||
BuildDataCollector, BuildDataResult, CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target,
|
CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target, WorkspaceBuildScripts,
|
||||||
};
|
};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use vfs::AnchoredPathBuf;
|
use vfs::AnchoredPathBuf;
|
||||||
@ -79,12 +79,15 @@ pub(crate) struct GlobalState {
|
|||||||
/// fetch.
|
/// fetch.
|
||||||
///
|
///
|
||||||
/// If the fetch (partially) fails, we do not update the values.
|
/// If the fetch (partially) fails, we do not update the values.
|
||||||
|
///
|
||||||
|
/// Invariant: workspaces.len() == workspace_build_data
|
||||||
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
|
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
|
||||||
pub(crate) fetch_workspaces_queue: OpQueue<(), Vec<anyhow::Result<ProjectWorkspace>>>,
|
pub(crate) fetch_workspaces_queue: OpQueue<Vec<anyhow::Result<ProjectWorkspace>>>,
|
||||||
pub(crate) workspace_build_data: Option<BuildDataResult>,
|
|
||||||
pub(crate) fetch_build_data_queue:
|
pub(crate) workspace_build_data: Vec<WorkspaceBuildScripts>,
|
||||||
OpQueue<BuildDataCollector, Option<anyhow::Result<BuildDataResult>>>,
|
pub(crate) fetch_build_data_queue: OpQueue<Vec<anyhow::Result<WorkspaceBuildScripts>>>,
|
||||||
pub(crate) prime_caches_queue: OpQueue<(), ()>,
|
|
||||||
|
pub(crate) prime_caches_queue: OpQueue<()>,
|
||||||
|
|
||||||
latest_requests: Arc<RwLock<LatestRequests>>,
|
latest_requests: Arc<RwLock<LatestRequests>>,
|
||||||
}
|
}
|
||||||
@ -146,7 +149,7 @@ impl GlobalState {
|
|||||||
|
|
||||||
workspaces: Arc::new(Vec::new()),
|
workspaces: Arc::new(Vec::new()),
|
||||||
fetch_workspaces_queue: OpQueue::default(),
|
fetch_workspaces_queue: OpQueue::default(),
|
||||||
workspace_build_data: None,
|
workspace_build_data: Vec::new(),
|
||||||
prime_caches_queue: OpQueue::default(),
|
prime_caches_queue: OpQueue::default(),
|
||||||
|
|
||||||
fetch_build_data_queue: OpQueue::default(),
|
fetch_build_data_queue: OpQueue::default(),
|
||||||
|
@ -17,6 +17,7 @@ use ide_db::helpers::{
|
|||||||
insert_use::{ImportGranularity, InsertUseConfig},
|
insert_use::{ImportGranularity, InsertUseConfig},
|
||||||
SnippetCap,
|
SnippetCap,
|
||||||
};
|
};
|
||||||
|
use project_model::CargoConfig;
|
||||||
use test_utils::project_root;
|
use test_utils::project_root;
|
||||||
use vfs::{AbsPathBuf, VfsPath};
|
use vfs::{AbsPathBuf, VfsPath};
|
||||||
|
|
||||||
@ -32,10 +33,9 @@ fn integrated_highlighting_benchmark() {
|
|||||||
let workspace_to_load = project_root();
|
let workspace_to_load = project_root();
|
||||||
let file = "./crates/ide_db/src/apply_change.rs";
|
let file = "./crates/ide_db/src/apply_change.rs";
|
||||||
|
|
||||||
let cargo_config = Default::default();
|
let cargo_config = CargoConfig::default();
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
load_out_dirs_from_check: true,
|
load_out_dirs_from_check: true,
|
||||||
wrap_rustc: false,
|
|
||||||
with_proc_macro: false,
|
with_proc_macro: false,
|
||||||
prefill_caches: false,
|
prefill_caches: false,
|
||||||
};
|
};
|
||||||
@ -87,10 +87,9 @@ fn integrated_completion_benchmark() {
|
|||||||
let workspace_to_load = project_root();
|
let workspace_to_load = project_root();
|
||||||
let file = "./crates/hir/src/lib.rs";
|
let file = "./crates/hir/src/lib.rs";
|
||||||
|
|
||||||
let cargo_config = Default::default();
|
let cargo_config = CargoConfig::default();
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
load_out_dirs_from_check: true,
|
load_out_dirs_from_check: true,
|
||||||
wrap_rustc: false,
|
|
||||||
with_proc_macro: false,
|
with_proc_macro: false,
|
||||||
prefill_caches: true,
|
prefill_caches: true,
|
||||||
};
|
};
|
||||||
|
@ -12,7 +12,6 @@ use ide::{FileId, PrimeCachesProgress};
|
|||||||
use ide_db::base_db::VfsPath;
|
use ide_db::base_db::VfsPath;
|
||||||
use lsp_server::{Connection, Notification, Request, Response};
|
use lsp_server::{Connection, Notification, Request, Response};
|
||||||
use lsp_types::notification::Notification as _;
|
use lsp_types::notification::Notification as _;
|
||||||
use project_model::BuildDataCollector;
|
|
||||||
use vfs::ChangeKind;
|
use vfs::ChangeKind;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -236,12 +235,7 @@ impl GlobalState {
|
|||||||
let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces);
|
let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces);
|
||||||
|
|
||||||
if self.config.run_build_scripts() && workspaces_updated {
|
if self.config.run_build_scripts() && workspaces_updated {
|
||||||
let mut collector =
|
self.fetch_build_data_request()
|
||||||
BuildDataCollector::new(self.config.wrap_rustc());
|
|
||||||
for ws in self.workspaces.iter() {
|
|
||||||
ws.collect_build_data_configs(&mut collector);
|
|
||||||
}
|
|
||||||
self.fetch_build_data_request(collector)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
(Progress::End, None)
|
(Progress::End, None)
|
||||||
@ -719,23 +713,21 @@ impl GlobalState {
|
|||||||
self.maybe_update_diagnostics();
|
self.maybe_update_diagnostics();
|
||||||
|
|
||||||
// Ensure that only one cache priming task can run at a time
|
// Ensure that only one cache priming task can run at a time
|
||||||
self.prime_caches_queue.request_op(());
|
self.prime_caches_queue.request_op();
|
||||||
if self.prime_caches_queue.should_start_op().is_none() {
|
if self.prime_caches_queue.should_start_op() {
|
||||||
return;
|
self.task_pool.handle.spawn_with_sender({
|
||||||
}
|
let snap = self.snapshot();
|
||||||
|
move |sender| {
|
||||||
self.task_pool.handle.spawn_with_sender({
|
let cb = |progress| {
|
||||||
let snap = self.snapshot();
|
sender.send(Task::PrimeCaches(progress)).unwrap();
|
||||||
move |sender| {
|
};
|
||||||
let cb = |progress| {
|
match snap.analysis.prime_caches(cb) {
|
||||||
sender.send(Task::PrimeCaches(progress)).unwrap();
|
Ok(()) => (),
|
||||||
};
|
Err(_canceled) => (),
|
||||||
match snap.analysis.prime_caches(cb) {
|
}
|
||||||
Ok(()) => (),
|
|
||||||
Err(_canceled) => (),
|
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
});
|
}
|
||||||
}
|
}
|
||||||
fn maybe_update_diagnostics(&mut self) {
|
fn maybe_update_diagnostics(&mut self) {
|
||||||
let subscriptions = self
|
let subscriptions = self
|
||||||
|
@ -1,28 +1,29 @@
|
|||||||
//! Bookkeeping to make sure only one long-running operation is being executed
|
//! Bookkeeping to make sure only one long-running operation is being executed
|
||||||
//! at a time.
|
//! at a time.
|
||||||
|
|
||||||
pub(crate) struct OpQueue<Args, Output> {
|
pub(crate) struct OpQueue<Output> {
|
||||||
op_requested: Option<Args>,
|
op_requested: bool,
|
||||||
op_in_progress: bool,
|
op_in_progress: bool,
|
||||||
last_op_result: Output,
|
last_op_result: Output,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Args, Output: Default> Default for OpQueue<Args, Output> {
|
impl<Output: Default> Default for OpQueue<Output> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self { op_requested: None, op_in_progress: false, last_op_result: Default::default() }
|
Self { op_requested: false, op_in_progress: false, last_op_result: Default::default() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Args, Output> OpQueue<Args, Output> {
|
impl<Output> OpQueue<Output> {
|
||||||
pub(crate) fn request_op(&mut self, data: Args) {
|
pub(crate) fn request_op(&mut self) {
|
||||||
self.op_requested = Some(data);
|
self.op_requested = true;
|
||||||
}
|
}
|
||||||
pub(crate) fn should_start_op(&mut self) -> Option<Args> {
|
pub(crate) fn should_start_op(&mut self) -> bool {
|
||||||
if self.op_in_progress {
|
if self.op_in_progress {
|
||||||
return None;
|
return false;
|
||||||
}
|
}
|
||||||
self.op_in_progress = self.op_requested.is_some();
|
self.op_in_progress = self.op_requested;
|
||||||
self.op_requested.take()
|
self.op_requested = false;
|
||||||
|
self.op_in_progress
|
||||||
}
|
}
|
||||||
pub(crate) fn op_completed(&mut self, result: Output) {
|
pub(crate) fn op_completed(&mut self, result: Output) {
|
||||||
assert!(self.op_in_progress);
|
assert!(self.op_in_progress);
|
||||||
@ -30,7 +31,6 @@ impl<Args, Output> OpQueue<Args, Output> {
|
|||||||
self.last_op_result = result;
|
self.last_op_result = result;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub(crate) fn last_op_result(&self) -> &Output {
|
pub(crate) fn last_op_result(&self) -> &Output {
|
||||||
&self.last_op_result
|
&self.last_op_result
|
||||||
}
|
}
|
||||||
@ -38,6 +38,6 @@ impl<Args, Output> OpQueue<Args, Output> {
|
|||||||
self.op_in_progress
|
self.op_in_progress
|
||||||
}
|
}
|
||||||
pub(crate) fn op_requested(&self) -> bool {
|
pub(crate) fn op_requested(&self) -> bool {
|
||||||
self.op_requested.is_some()
|
self.op_requested
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
//! Project loading & configuration updates
|
//! Project loading & configuration updates
|
||||||
use std::{mem, sync::Arc};
|
use std::{mem, sync::Arc};
|
||||||
|
|
||||||
|
use always_assert::always;
|
||||||
use flycheck::{FlycheckConfig, FlycheckHandle};
|
use flycheck::{FlycheckConfig, FlycheckHandle};
|
||||||
use hir::db::DefDatabase;
|
use hir::db::DefDatabase;
|
||||||
use ide::Change;
|
use ide::Change;
|
||||||
use ide_db::base_db::{CrateGraph, SourceRoot, VfsPath};
|
use ide_db::base_db::{CrateGraph, SourceRoot, VfsPath};
|
||||||
use project_model::{BuildDataCollector, BuildDataResult, ProcMacroClient, ProjectWorkspace};
|
use project_model::{ProcMacroClient, ProjectWorkspace, WorkspaceBuildScripts};
|
||||||
use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
|
use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -26,7 +27,7 @@ pub(crate) enum ProjectWorkspaceProgress {
|
|||||||
pub(crate) enum BuildDataProgress {
|
pub(crate) enum BuildDataProgress {
|
||||||
Begin,
|
Begin,
|
||||||
Report(String),
|
Report(String),
|
||||||
End(anyhow::Result<BuildDataResult>),
|
End(Vec<anyhow::Result<WorkspaceBuildScripts>>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GlobalState {
|
impl GlobalState {
|
||||||
@ -144,10 +145,10 @@ impl GlobalState {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn fetch_workspaces_request(&mut self) {
|
pub(crate) fn fetch_workspaces_request(&mut self) {
|
||||||
self.fetch_workspaces_queue.request_op(())
|
self.fetch_workspaces_queue.request_op()
|
||||||
}
|
}
|
||||||
pub(crate) fn fetch_workspaces_if_needed(&mut self) {
|
pub(crate) fn fetch_workspaces_if_needed(&mut self) {
|
||||||
if self.fetch_workspaces_queue.should_start_op().is_none() {
|
if !self.fetch_workspaces_queue.should_start_op() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
log::info!("will fetch workspaces");
|
log::info!("will fetch workspaces");
|
||||||
@ -207,14 +208,16 @@ impl GlobalState {
|
|||||||
self.fetch_workspaces_queue.op_completed(workspaces)
|
self.fetch_workspaces_queue.op_completed(workspaces)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn fetch_build_data_request(&mut self, build_data_collector: BuildDataCollector) {
|
pub(crate) fn fetch_build_data_request(&mut self) {
|
||||||
self.fetch_build_data_queue.request_op(build_data_collector);
|
self.fetch_build_data_queue.request_op();
|
||||||
}
|
}
|
||||||
pub(crate) fn fetch_build_data_if_needed(&mut self) {
|
pub(crate) fn fetch_build_data_if_needed(&mut self) {
|
||||||
let mut build_data_collector = match self.fetch_build_data_queue.should_start_op() {
|
if !self.fetch_build_data_queue.should_start_op() {
|
||||||
Some(it) => it,
|
return;
|
||||||
None => return,
|
}
|
||||||
};
|
|
||||||
|
let workspaces = Arc::clone(&self.workspaces);
|
||||||
|
let config = self.config.cargo();
|
||||||
self.task_pool.handle.spawn_with_sender(move |sender| {
|
self.task_pool.handle.spawn_with_sender(move |sender| {
|
||||||
sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
|
sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
|
||||||
|
|
||||||
@ -224,15 +227,25 @@ impl GlobalState {
|
|||||||
sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
|
sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let res = build_data_collector.collect(&progress);
|
let mut res = Vec::new();
|
||||||
|
for ws in workspaces.iter() {
|
||||||
|
let ws = match ws {
|
||||||
|
ProjectWorkspace::Cargo { cargo, .. } => cargo,
|
||||||
|
ProjectWorkspace::DetachedFiles { .. } | ProjectWorkspace::Json { .. } => {
|
||||||
|
res.push(Ok(WorkspaceBuildScripts::default()));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
res.push(WorkspaceBuildScripts::run(&config, ws, &progress))
|
||||||
|
}
|
||||||
sender.send(Task::FetchBuildData(BuildDataProgress::End(res))).unwrap();
|
sender.send(Task::FetchBuildData(BuildDataProgress::End(res))).unwrap();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
pub(crate) fn fetch_build_data_completed(
|
pub(crate) fn fetch_build_data_completed(
|
||||||
&mut self,
|
&mut self,
|
||||||
build_data: anyhow::Result<BuildDataResult>,
|
build_data: Vec<anyhow::Result<WorkspaceBuildScripts>>,
|
||||||
) {
|
) {
|
||||||
self.fetch_build_data_queue.op_completed(Some(build_data))
|
self.fetch_build_data_queue.op_completed(build_data)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn switch_workspaces(&mut self) {
|
pub(crate) fn switch_workspaces(&mut self) {
|
||||||
@ -257,12 +270,22 @@ impl GlobalState {
|
|||||||
.filter_map(|res| res.as_ref().ok().cloned())
|
.filter_map(|res| res.as_ref().ok().cloned())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let workspace_build_data = match self.fetch_build_data_queue.last_op_result() {
|
let mut build_scripts = self
|
||||||
Some(Ok(it)) => Some(it.clone()),
|
.fetch_build_data_queue
|
||||||
None | Some(Err(_)) => None,
|
.last_op_result()
|
||||||
};
|
.iter()
|
||||||
|
.map(|res| res.as_ref().ok().cloned().unwrap_or_default())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
if *self.workspaces == workspaces && self.workspace_build_data == workspace_build_data {
|
// FIXME: This is not even remotely correct. I do hope that this is
|
||||||
|
// eventually consistent though. We need to figure a better way to map
|
||||||
|
// `cargo metadata` to `cargo check` in the future.
|
||||||
|
//
|
||||||
|
// I *think* what we need here is an extra field on `ProjectWorkspace`,
|
||||||
|
// and a workflow to set it, once build data is ready.
|
||||||
|
build_scripts.resize_with(workspaces.len(), WorkspaceBuildScripts::default);
|
||||||
|
|
||||||
|
if *self.workspaces == workspaces && self.workspace_build_data == build_scripts {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -271,7 +294,8 @@ impl GlobalState {
|
|||||||
let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
|
let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
|
||||||
watchers: workspaces
|
watchers: workspaces
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|it| it.to_roots(workspace_build_data.as_ref()))
|
.zip(&build_scripts)
|
||||||
|
.flat_map(|(ws, bs)| ws.to_roots(bs))
|
||||||
.filter(|it| it.is_member)
|
.filter(|it| it.is_member)
|
||||||
.flat_map(|root| {
|
.flat_map(|root| {
|
||||||
root.include.into_iter().flat_map(|it| {
|
root.include.into_iter().flat_map(|it| {
|
||||||
@ -304,7 +328,7 @@ impl GlobalState {
|
|||||||
|
|
||||||
let files_config = self.config.files();
|
let files_config = self.config.files();
|
||||||
let project_folders =
|
let project_folders =
|
||||||
ProjectFolders::new(&workspaces, &files_config.exclude, workspace_build_data.as_ref());
|
ProjectFolders::new(&workspaces, &build_scripts, &files_config.exclude);
|
||||||
|
|
||||||
if self.proc_macro_client.is_none() {
|
if self.proc_macro_client.is_none() {
|
||||||
self.proc_macro_client = match self.config.proc_macro_srv() {
|
self.proc_macro_client = match self.config.proc_macro_srv() {
|
||||||
@ -353,9 +377,9 @@ impl GlobalState {
|
|||||||
}
|
}
|
||||||
res
|
res
|
||||||
};
|
};
|
||||||
for ws in workspaces.iter() {
|
for (ws, bs) in workspaces.iter().zip(&build_scripts) {
|
||||||
crate_graph.extend(ws.to_crate_graph(
|
crate_graph.extend(ws.to_crate_graph(
|
||||||
workspace_build_data.as_ref(),
|
bs,
|
||||||
self.proc_macro_client.as_ref(),
|
self.proc_macro_client.as_ref(),
|
||||||
&mut load,
|
&mut load,
|
||||||
));
|
));
|
||||||
@ -367,7 +391,7 @@ impl GlobalState {
|
|||||||
|
|
||||||
self.source_root_config = project_folders.source_root_config;
|
self.source_root_config = project_folders.source_root_config;
|
||||||
self.workspaces = Arc::new(workspaces);
|
self.workspaces = Arc::new(workspaces);
|
||||||
self.workspace_build_data = workspace_build_data;
|
self.workspace_build_data = build_scripts;
|
||||||
|
|
||||||
self.analysis_host.apply_change(change);
|
self.analysis_host.apply_change(change);
|
||||||
self.process_changes();
|
self.process_changes();
|
||||||
@ -392,13 +416,19 @@ impl GlobalState {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn build_data_error(&self) -> Option<String> {
|
fn build_data_error(&self) -> Option<String> {
|
||||||
match self.fetch_build_data_queue.last_op_result() {
|
let mut buf = String::new();
|
||||||
Some(Err(err)) => {
|
|
||||||
Some(format!("rust-analyzer failed to fetch build data: {:#}\n", err))
|
for ws in self.fetch_build_data_queue.last_op_result() {
|
||||||
|
if let Err(err) = ws {
|
||||||
|
stdx::format_to!(buf, "rust-analyzer failed to run custom build: {:#}\n", err);
|
||||||
}
|
}
|
||||||
Some(Ok(data)) => data.error(),
|
|
||||||
None => None,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if buf.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(buf)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reload_flycheck(&mut self) {
|
fn reload_flycheck(&mut self) {
|
||||||
@ -451,14 +481,15 @@ pub(crate) struct ProjectFolders {
|
|||||||
impl ProjectFolders {
|
impl ProjectFolders {
|
||||||
pub(crate) fn new(
|
pub(crate) fn new(
|
||||||
workspaces: &[ProjectWorkspace],
|
workspaces: &[ProjectWorkspace],
|
||||||
|
build_scripts: &[WorkspaceBuildScripts],
|
||||||
global_excludes: &[AbsPathBuf],
|
global_excludes: &[AbsPathBuf],
|
||||||
build_data: Option<&BuildDataResult>,
|
|
||||||
) -> ProjectFolders {
|
) -> ProjectFolders {
|
||||||
|
always!(workspaces.len() == build_scripts.len());
|
||||||
let mut res = ProjectFolders::default();
|
let mut res = ProjectFolders::default();
|
||||||
let mut fsc = FileSetConfig::builder();
|
let mut fsc = FileSetConfig::builder();
|
||||||
let mut local_filesets = vec![];
|
let mut local_filesets = vec![];
|
||||||
|
|
||||||
for root in workspaces.iter().flat_map(|it| it.to_roots(build_data)) {
|
for root in workspaces.iter().zip(build_scripts).flat_map(|(ws, bs)| ws.to_roots(bs)) {
|
||||||
let file_set_roots: Vec<VfsPath> =
|
let file_set_roots: Vec<VfsPath> =
|
||||||
root.include.iter().cloned().map(VfsPath::from).collect();
|
root.include.iter().cloned().map(VfsPath::from).collect();
|
||||||
|
|
||||||
|
@ -62,6 +62,12 @@ impl<T, V> std::ops::Index<Idx<V>> for ArenaMap<Idx<V>, T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T, V> std::ops::IndexMut<Idx<V>> for ArenaMap<Idx<V>, T> {
|
||||||
|
fn index_mut(&mut self, idx: Idx<V>) -> &mut T {
|
||||||
|
self.v[Self::to_idx(idx)].as_mut().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T, V> Default for ArenaMap<Idx<V>, T> {
|
impl<T, V> Default for ArenaMap<Idx<V>, T> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
ArenaMap { v: Vec::new(), _ty: PhantomData }
|
ArenaMap { v: Vec::new(), _ty: PhantomData }
|
||||||
|
Loading…
Reference in New Issue
Block a user