mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-25 11:17:13 +00:00
Merge pull request #20315 from Veykril/push-pvmslwwouzzx
internal: Fix lockfile temp dir usage and use it for build scripts as well
This commit is contained in:
commit
68810295f2
@ -30,7 +30,8 @@ pub type ProcMacroPaths =
|
||||
pub enum ProcMacroLoadingError {
|
||||
Disabled,
|
||||
FailedToBuild,
|
||||
MissingDylibPath,
|
||||
ExpectedProcMacroArtifact,
|
||||
MissingDylibPath(Box<[String]>),
|
||||
NotYetBuilt,
|
||||
NoProcMacros,
|
||||
ProcMacroSrvError(Box<str>),
|
||||
@ -39,8 +40,9 @@ impl ProcMacroLoadingError {
|
||||
pub fn is_hard_error(&self) -> bool {
|
||||
match self {
|
||||
ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false,
|
||||
ProcMacroLoadingError::FailedToBuild
|
||||
| ProcMacroLoadingError::MissingDylibPath
|
||||
ProcMacroLoadingError::ExpectedProcMacroArtifact
|
||||
| ProcMacroLoadingError::FailedToBuild
|
||||
| ProcMacroLoadingError::MissingDylibPath(_)
|
||||
| ProcMacroLoadingError::NoProcMacros
|
||||
| ProcMacroLoadingError::ProcMacroSrvError(_) => true,
|
||||
}
|
||||
@ -51,10 +53,23 @@ impl Error for ProcMacroLoadingError {}
|
||||
impl fmt::Display for ProcMacroLoadingError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
ProcMacroLoadingError::ExpectedProcMacroArtifact => {
|
||||
write!(f, "proc-macro crate did not build proc-macro artifact")
|
||||
}
|
||||
ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"),
|
||||
ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"),
|
||||
ProcMacroLoadingError::MissingDylibPath => {
|
||||
write!(f, "proc-macro crate build data is missing a dylib path")
|
||||
ProcMacroLoadingError::MissingDylibPath(candidates) if candidates.is_empty() => {
|
||||
write!(
|
||||
f,
|
||||
"proc-macro crate built but the dylib path is missing, this indicates a problem with your build system."
|
||||
)
|
||||
}
|
||||
ProcMacroLoadingError::MissingDylibPath(candidates) => {
|
||||
write!(
|
||||
f,
|
||||
"proc-macro crate built but the dylib path is missing, this indicates a problem with your build system. Candidates not considered due to not having a dynamic library extension: {}",
|
||||
candidates.join(", ")
|
||||
)
|
||||
}
|
||||
ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"),
|
||||
ProcMacroLoadingError::NoProcMacros => {
|
||||
|
@ -16,11 +16,13 @@ use la_arena::ArenaMap;
|
||||
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use serde::Deserialize as _;
|
||||
use stdx::never;
|
||||
use toolchain::Tool;
|
||||
|
||||
use crate::{
|
||||
CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath, Package, Sysroot,
|
||||
TargetKind, utf8_stdout,
|
||||
TargetKind, cargo_config_file::make_lockfile_copy,
|
||||
cargo_workspace::MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH, utf8_stdout,
|
||||
};
|
||||
|
||||
/// Output of the build script and proc-macro building steps for a workspace.
|
||||
@ -30,6 +32,15 @@ pub struct WorkspaceBuildScripts {
|
||||
error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||
pub enum ProcMacroDylibPath {
|
||||
Path(AbsPathBuf),
|
||||
DylibNotFound(Box<[Utf8PathBuf]>),
|
||||
NotProcMacro,
|
||||
#[default]
|
||||
NotBuilt,
|
||||
}
|
||||
|
||||
/// Output of the build script and proc-macro building step for a concrete package.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||
pub(crate) struct BuildScriptOutput {
|
||||
@ -43,7 +54,7 @@ pub(crate) struct BuildScriptOutput {
|
||||
/// Directory where a build script might place its output.
|
||||
pub(crate) out_dir: Option<AbsPathBuf>,
|
||||
/// Path to the proc-macro library file if this package exposes proc-macros.
|
||||
pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
|
||||
pub(crate) proc_macro_dylib_path: ProcMacroDylibPath,
|
||||
}
|
||||
|
||||
impl BuildScriptOutput {
|
||||
@ -51,7 +62,7 @@ impl BuildScriptOutput {
|
||||
self.cfgs.is_empty()
|
||||
&& self.envs.is_empty()
|
||||
&& self.out_dir.is_none()
|
||||
&& self.proc_macro_dylib_path.is_none()
|
||||
&& self.proc_macro_dylib_path == ProcMacroDylibPath::NotBuilt
|
||||
}
|
||||
}
|
||||
|
||||
@ -67,7 +78,7 @@ impl WorkspaceBuildScripts {
|
||||
let current_dir = workspace.workspace_root();
|
||||
|
||||
let allowed_features = workspace.workspace_features();
|
||||
let cmd = Self::build_command(
|
||||
let (_guard, cmd) = Self::build_command(
|
||||
config,
|
||||
&allowed_features,
|
||||
workspace.manifest_path(),
|
||||
@ -88,7 +99,7 @@ impl WorkspaceBuildScripts {
|
||||
) -> io::Result<Vec<WorkspaceBuildScripts>> {
|
||||
assert_eq!(config.invocation_strategy, InvocationStrategy::Once);
|
||||
|
||||
let cmd = Self::build_command(
|
||||
let (_guard, cmd) = Self::build_command(
|
||||
config,
|
||||
&Default::default(),
|
||||
// This is not gonna be used anyways, so just construct a dummy here
|
||||
@ -126,6 +137,8 @@ impl WorkspaceBuildScripts {
|
||||
|package, cb| {
|
||||
if let Some(&(package, workspace)) = by_id.get(package) {
|
||||
cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]);
|
||||
} else {
|
||||
never!("Received compiler message for unknown package: {}", package);
|
||||
}
|
||||
},
|
||||
progress,
|
||||
@ -140,12 +153,9 @@ impl WorkspaceBuildScripts {
|
||||
if tracing::enabled!(tracing::Level::INFO) {
|
||||
for (idx, workspace) in workspaces.iter().enumerate() {
|
||||
for package in workspace.packages() {
|
||||
let package_build_data = &mut res[idx].outputs[package];
|
||||
let package_build_data: &mut BuildScriptOutput = &mut res[idx].outputs[package];
|
||||
if !package_build_data.is_empty() {
|
||||
tracing::info!(
|
||||
"{}: {package_build_data:?}",
|
||||
workspace[package].manifest.parent(),
|
||||
);
|
||||
tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -198,10 +208,33 @@ impl WorkspaceBuildScripts {
|
||||
let path = dir_entry.path();
|
||||
let extension = path.extension()?;
|
||||
if extension == std::env::consts::DLL_EXTENSION {
|
||||
let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned();
|
||||
let path = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).ok()?)
|
||||
.ok()?;
|
||||
return Some((name, path));
|
||||
let name = path
|
||||
.file_stem()?
|
||||
.to_str()?
|
||||
.split_once('-')?
|
||||
.0
|
||||
.trim_start_matches("lib")
|
||||
.to_owned();
|
||||
let path = match Utf8PathBuf::from_path_buf(path) {
|
||||
Ok(path) => path,
|
||||
Err(path) => {
|
||||
tracing::warn!(
|
||||
"Proc-macro dylib path contains non-UTF8 characters: {:?}",
|
||||
path.display()
|
||||
);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
return match AbsPathBuf::try_from(path) {
|
||||
Ok(path) => Some((name, path)),
|
||||
Err(path) => {
|
||||
tracing::error!(
|
||||
"proc-macro dylib path is not absolute: {:?}",
|
||||
path
|
||||
);
|
||||
None
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
None
|
||||
@ -209,28 +242,24 @@ impl WorkspaceBuildScripts {
|
||||
.collect();
|
||||
for p in rustc.packages() {
|
||||
let package = &rustc[p];
|
||||
if package
|
||||
.targets
|
||||
.iter()
|
||||
.any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true }))
|
||||
{
|
||||
if let Some((_, path)) = proc_macro_dylibs
|
||||
.iter()
|
||||
.find(|(name, _)| *name.trim_start_matches("lib") == package.name)
|
||||
{
|
||||
bs.outputs[p].proc_macro_dylib_path = Some(path.clone());
|
||||
bs.outputs[p].proc_macro_dylib_path =
|
||||
if package.targets.iter().any(|&it| {
|
||||
matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })
|
||||
}) {
|
||||
match proc_macro_dylibs.iter().find(|(name, _)| *name == package.name) {
|
||||
Some((_, path)) => ProcMacroDylibPath::Path(path.clone()),
|
||||
_ => ProcMacroDylibPath::DylibNotFound(Box::default()),
|
||||
}
|
||||
} else {
|
||||
ProcMacroDylibPath::NotProcMacro
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if tracing::enabled!(tracing::Level::INFO) {
|
||||
for package in rustc.packages() {
|
||||
let package_build_data = &bs.outputs[package];
|
||||
if !package_build_data.is_empty() {
|
||||
tracing::info!(
|
||||
"{}: {package_build_data:?}",
|
||||
rustc[package].manifest.parent(),
|
||||
);
|
||||
tracing::info!("{}: {package_build_data:?}", rustc[package].manifest,);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -263,6 +292,12 @@ impl WorkspaceBuildScripts {
|
||||
|package, cb| {
|
||||
if let Some(&package) = by_id.get(package) {
|
||||
cb(&workspace[package].name, &mut outputs[package]);
|
||||
} else {
|
||||
never!(
|
||||
"Received compiler message for unknown package: {}\n {}",
|
||||
package,
|
||||
by_id.keys().join(", ")
|
||||
);
|
||||
}
|
||||
},
|
||||
progress,
|
||||
@ -272,10 +307,7 @@ impl WorkspaceBuildScripts {
|
||||
for package in workspace.packages() {
|
||||
let package_build_data = &outputs[package];
|
||||
if !package_build_data.is_empty() {
|
||||
tracing::info!(
|
||||
"{}: {package_build_data:?}",
|
||||
workspace[package].manifest.parent(),
|
||||
);
|
||||
tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -348,15 +380,21 @@ impl WorkspaceBuildScripts {
|
||||
progress(format!(
|
||||
"building compile-time-deps: proc-macro {name} built"
|
||||
));
|
||||
if data.proc_macro_dylib_path == ProcMacroDylibPath::NotBuilt {
|
||||
data.proc_macro_dylib_path = ProcMacroDylibPath::NotProcMacro;
|
||||
}
|
||||
if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
|
||||
{
|
||||
// Skip rmeta file
|
||||
if let Some(filename) =
|
||||
message.filenames.iter().find(|file| is_dylib(file))
|
||||
{
|
||||
let filename = AbsPath::assert(filename);
|
||||
data.proc_macro_dylib_path = Some(filename.to_owned());
|
||||
}
|
||||
data.proc_macro_dylib_path =
|
||||
match message.filenames.iter().find(|file| is_dylib(file)) {
|
||||
Some(filename) => {
|
||||
let filename = AbsPath::assert(filename);
|
||||
ProcMacroDylibPath::Path(filename.to_owned())
|
||||
}
|
||||
None => ProcMacroDylibPath::DylibNotFound(
|
||||
message.filenames.clone().into_boxed_slice(),
|
||||
),
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -393,14 +431,15 @@ impl WorkspaceBuildScripts {
|
||||
current_dir: &AbsPath,
|
||||
sysroot: &Sysroot,
|
||||
toolchain: Option<&semver::Version>,
|
||||
) -> io::Result<Command> {
|
||||
) -> io::Result<(Option<temp_dir::TempDir>, Command)> {
|
||||
match config.run_build_script_command.as_deref() {
|
||||
Some([program, args @ ..]) => {
|
||||
let mut cmd = toolchain::command(program, current_dir, &config.extra_env);
|
||||
cmd.args(args);
|
||||
Ok(cmd)
|
||||
Ok((None, cmd))
|
||||
}
|
||||
_ => {
|
||||
let mut requires_unstable_options = false;
|
||||
let mut cmd = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
|
||||
|
||||
cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]);
|
||||
@ -416,7 +455,19 @@ impl WorkspaceBuildScripts {
|
||||
if let Some(target) = &config.target {
|
||||
cmd.args(["--target", target]);
|
||||
}
|
||||
|
||||
let mut temp_dir_guard = None;
|
||||
if toolchain
|
||||
.is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH)
|
||||
{
|
||||
let lockfile_path =
|
||||
<_ as AsRef<Utf8Path>>::as_ref(manifest_path).with_extension("lock");
|
||||
if let Some((temp_dir, target_lockfile)) = make_lockfile_copy(&lockfile_path) {
|
||||
temp_dir_guard = Some(temp_dir);
|
||||
cmd.arg("--lockfile-path");
|
||||
cmd.arg(target_lockfile.as_str());
|
||||
requires_unstable_options = true;
|
||||
}
|
||||
}
|
||||
match &config.features {
|
||||
CargoFeatures::All => {
|
||||
cmd.arg("--all-features");
|
||||
@ -438,6 +489,7 @@ impl WorkspaceBuildScripts {
|
||||
}
|
||||
|
||||
if manifest_path.is_rust_manifest() {
|
||||
requires_unstable_options = true;
|
||||
cmd.arg("-Zscript");
|
||||
}
|
||||
|
||||
@ -447,7 +499,7 @@ impl WorkspaceBuildScripts {
|
||||
// available in current toolchain's cargo, use it to build compile time deps only.
|
||||
const COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION: semver::Version = semver::Version {
|
||||
major: 1,
|
||||
minor: 89,
|
||||
minor: 189,
|
||||
patch: 0,
|
||||
pre: semver::Prerelease::EMPTY,
|
||||
build: semver::BuildMetadata::EMPTY,
|
||||
@ -457,8 +509,7 @@ impl WorkspaceBuildScripts {
|
||||
toolchain.is_some_and(|v| *v >= COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION);
|
||||
|
||||
if cargo_comp_time_deps_available {
|
||||
cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
|
||||
cmd.arg("-Zunstable-options");
|
||||
requires_unstable_options = true;
|
||||
cmd.arg("--compile-time-deps");
|
||||
// we can pass this unconditionally, because we won't actually build the
|
||||
// binaries, and as such, this will succeed even on targets without libtest
|
||||
@ -481,7 +532,11 @@ impl WorkspaceBuildScripts {
|
||||
cmd.env("RA_RUSTC_WRAPPER", "1");
|
||||
}
|
||||
}
|
||||
Ok(cmd)
|
||||
if requires_unstable_options {
|
||||
cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
|
||||
cmd.arg("-Zunstable-options");
|
||||
}
|
||||
Ok((temp_dir_guard, cmd))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
//! Read `.cargo/config.toml` as a JSON object
|
||||
use paths::{Utf8Path, Utf8PathBuf};
|
||||
use rustc_hash::FxHashMap;
|
||||
use toolchain::Tool;
|
||||
|
||||
@ -32,3 +33,24 @@ pub(crate) fn read(
|
||||
|
||||
Some(json)
|
||||
}
|
||||
|
||||
pub(crate) fn make_lockfile_copy(
|
||||
lockfile_path: &Utf8Path,
|
||||
) -> Option<(temp_dir::TempDir, Utf8PathBuf)> {
|
||||
let temp_dir = temp_dir::TempDir::with_prefix("rust-analyzer").ok()?;
|
||||
let target_lockfile = temp_dir.path().join("Cargo.lock").try_into().ok()?;
|
||||
match std::fs::copy(lockfile_path, &target_lockfile) {
|
||||
Ok(_) => {
|
||||
tracing::debug!("Copied lock file from `{}` to `{}`", lockfile_path, target_lockfile);
|
||||
Some((temp_dir, target_lockfile))
|
||||
}
|
||||
// lockfile does not yet exist, so we can just create a new one in the temp dir
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Some((temp_dir, target_lockfile)),
|
||||
Err(e) => {
|
||||
tracing::warn!(
|
||||
"Failed to copy lock file from `{lockfile_path}` to `{target_lockfile}`: {e}",
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -15,16 +15,18 @@ use span::Edition;
|
||||
use stdx::process::spawn_with_streaming_output;
|
||||
use toolchain::Tool;
|
||||
|
||||
use crate::cargo_config_file::make_lockfile_copy;
|
||||
use crate::{CfgOverrides, InvocationStrategy};
|
||||
use crate::{ManifestPath, Sysroot};
|
||||
|
||||
const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version = semver::Version {
|
||||
major: 1,
|
||||
minor: 82,
|
||||
patch: 0,
|
||||
pre: semver::Prerelease::EMPTY,
|
||||
build: semver::BuildMetadata::EMPTY,
|
||||
};
|
||||
pub(crate) const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version =
|
||||
semver::Version {
|
||||
major: 1,
|
||||
minor: 82,
|
||||
patch: 0,
|
||||
pre: semver::Prerelease::EMPTY,
|
||||
build: semver::BuildMetadata::EMPTY,
|
||||
};
|
||||
|
||||
/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
|
||||
/// workspace. It pretty closely mirrors `cargo metadata` output.
|
||||
@ -245,7 +247,7 @@ pub enum TargetKind {
|
||||
}
|
||||
|
||||
impl TargetKind {
|
||||
fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind {
|
||||
pub fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind {
|
||||
for kind in kinds {
|
||||
return match kind {
|
||||
cargo_metadata::TargetKind::Bin => TargetKind::Bin,
|
||||
@ -552,8 +554,10 @@ impl CargoWorkspace {
|
||||
|
||||
pub(crate) struct FetchMetadata {
|
||||
command: cargo_metadata::MetadataCommand,
|
||||
#[expect(dead_code)]
|
||||
manifest_path: ManifestPath,
|
||||
lockfile_path: Option<Utf8PathBuf>,
|
||||
#[expect(dead_code)]
|
||||
kind: &'static str,
|
||||
no_deps: bool,
|
||||
no_deps_result: anyhow::Result<cargo_metadata::Metadata>,
|
||||
@ -604,12 +608,12 @@ impl FetchMetadata {
|
||||
// but nothing else
|
||||
let mut extra_args = config.extra_args.iter();
|
||||
while let Some(arg) = extra_args.next() {
|
||||
if arg == "-Z" {
|
||||
if let Some(arg) = extra_args.next() {
|
||||
needs_nightly = true;
|
||||
other_options.push("-Z".to_owned());
|
||||
other_options.push(arg.to_owned());
|
||||
}
|
||||
if arg == "-Z"
|
||||
&& let Some(arg) = extra_args.next()
|
||||
{
|
||||
needs_nightly = true;
|
||||
other_options.push("-Z".to_owned());
|
||||
other_options.push(arg.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
@ -634,7 +638,7 @@ impl FetchMetadata {
|
||||
command.other_options(other_options.clone());
|
||||
|
||||
if needs_nightly {
|
||||
command.env("RUSTC_BOOTSTRAP", "1");
|
||||
command.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
|
||||
}
|
||||
|
||||
// Pre-fetch basic metadata using `--no-deps`, which:
|
||||
@ -681,11 +685,12 @@ impl FetchMetadata {
|
||||
locked: bool,
|
||||
progress: &dyn Fn(String),
|
||||
) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
|
||||
_ = target_dir;
|
||||
let Self {
|
||||
mut command,
|
||||
manifest_path,
|
||||
manifest_path: _,
|
||||
lockfile_path,
|
||||
kind,
|
||||
kind: _,
|
||||
no_deps,
|
||||
no_deps_result,
|
||||
mut other_options,
|
||||
@ -696,54 +701,18 @@ impl FetchMetadata {
|
||||
}
|
||||
|
||||
let mut using_lockfile_copy = false;
|
||||
let mut _temp_dir_guard = None;
|
||||
// The manifest is a rust file, so this means its a script manifest
|
||||
if let Some(lockfile) = lockfile_path {
|
||||
_temp_dir_guard = temp_dir::TempDir::with_prefix("rust-analyzer").ok();
|
||||
let target_lockfile = _temp_dir_guard
|
||||
.and_then(|tmp| tmp.path().join("Cargo.lock").try_into().ok())
|
||||
.unwrap_or_else(|| {
|
||||
// When multiple workspaces share the same target dir, they might overwrite into a
|
||||
// single lockfile path.
|
||||
// See https://github.com/rust-lang/rust-analyzer/issues/20189#issuecomment-3073520255
|
||||
let manifest_path_hash = std::hash::BuildHasher::hash_one(
|
||||
&std::hash::BuildHasherDefault::<rustc_hash::FxHasher>::default(),
|
||||
&manifest_path,
|
||||
);
|
||||
let disambiguator = format!(
|
||||
"{}_{manifest_path_hash}",
|
||||
manifest_path.components().nth_back(1).map_or("", |c| c.as_str())
|
||||
);
|
||||
|
||||
target_dir
|
||||
.join("rust-analyzer")
|
||||
.join("metadata")
|
||||
.join(kind)
|
||||
.join(disambiguator)
|
||||
.join("Cargo.lock")
|
||||
});
|
||||
match std::fs::copy(&lockfile, &target_lockfile) {
|
||||
Ok(_) => {
|
||||
using_lockfile_copy = true;
|
||||
other_options.push("--lockfile-path".to_owned());
|
||||
other_options.push(target_lockfile.to_string());
|
||||
}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
// There exists no lockfile yet
|
||||
using_lockfile_copy = true;
|
||||
other_options.push("--lockfile-path".to_owned());
|
||||
other_options.push(target_lockfile.to_string());
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!(
|
||||
"Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}",
|
||||
);
|
||||
}
|
||||
}
|
||||
let mut _temp_dir_guard;
|
||||
if let Some(lockfile) = lockfile_path
|
||||
&& let Some((temp_dir, target_lockfile)) = make_lockfile_copy(&lockfile)
|
||||
{
|
||||
_temp_dir_guard = temp_dir;
|
||||
other_options.push("--lockfile-path".to_owned());
|
||||
other_options.push(target_lockfile.to_string());
|
||||
using_lockfile_copy = true;
|
||||
}
|
||||
if using_lockfile_copy {
|
||||
command.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
|
||||
other_options.push("-Zunstable-options".to_owned());
|
||||
command.env("RUSTC_BOOTSTRAP", "1");
|
||||
}
|
||||
// No need to lock it if we copied the lockfile, we won't modify the original after all/
|
||||
// This way cargo cannot error out on us if the lockfile requires updating.
|
||||
@ -752,13 +721,11 @@ impl FetchMetadata {
|
||||
}
|
||||
command.other_options(other_options);
|
||||
|
||||
// FIXME: Fetching metadata is a slow process, as it might require
|
||||
// calling crates.io. We should be reporting progress here, but it's
|
||||
// unclear whether cargo itself supports it.
|
||||
progress("cargo metadata: started".to_owned());
|
||||
|
||||
let res = (|| -> anyhow::Result<(_, _)> {
|
||||
let mut errored = false;
|
||||
tracing::debug!("Running `{:?}`", command.cargo_command());
|
||||
let output =
|
||||
spawn_with_streaming_output(command.cargo_command(), &mut |_| (), &mut |line| {
|
||||
errored = errored || line.starts_with("error") || line.starts_with("warning");
|
||||
|
@ -59,7 +59,7 @@ use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
pub use crate::{
|
||||
build_dependencies::WorkspaceBuildScripts,
|
||||
build_dependencies::{ProcMacroDylibPath, WorkspaceBuildScripts},
|
||||
cargo_workspace::{
|
||||
CargoConfig, CargoFeatures, CargoMetadataConfig, CargoWorkspace, Package, PackageData,
|
||||
PackageDependency, RustLibSource, Target, TargetData, TargetKind,
|
||||
@ -139,21 +139,22 @@ impl ProjectManifest {
|
||||
}
|
||||
|
||||
fn find_in_parent_dirs(path: &AbsPath, target_file_name: &str) -> Option<ManifestPath> {
|
||||
if path.file_name().unwrap_or_default() == target_file_name {
|
||||
if let Ok(manifest) = ManifestPath::try_from(path.to_path_buf()) {
|
||||
return Some(manifest);
|
||||
}
|
||||
if path.file_name().unwrap_or_default() == target_file_name
|
||||
&& let Ok(manifest) = ManifestPath::try_from(path.to_path_buf())
|
||||
{
|
||||
return Some(manifest);
|
||||
}
|
||||
|
||||
let mut curr = Some(path);
|
||||
|
||||
while let Some(path) = curr {
|
||||
let candidate = path.join(target_file_name);
|
||||
if fs::metadata(&candidate).is_ok() {
|
||||
if let Ok(manifest) = ManifestPath::try_from(candidate) {
|
||||
return Some(manifest);
|
||||
}
|
||||
if fs::metadata(&candidate).is_ok()
|
||||
&& let Ok(manifest) = ManifestPath::try_from(candidate)
|
||||
{
|
||||
return Some(manifest);
|
||||
}
|
||||
|
||||
curr = path.parent();
|
||||
}
|
||||
|
||||
|
@ -143,12 +143,11 @@ impl Sysroot {
|
||||
Some(root) => {
|
||||
// special case rustc, we can look that up directly in the sysroot's bin folder
|
||||
// as it should never invoke another cargo binary
|
||||
if let Tool::Rustc = tool {
|
||||
if let Some(path) =
|
||||
if let Tool::Rustc = tool
|
||||
&& let Some(path) =
|
||||
probe_for_binary(root.join("bin").join(Tool::Rustc.name()).into())
|
||||
{
|
||||
return toolchain::command(path, current_dir, envs);
|
||||
}
|
||||
{
|
||||
return toolchain::command(path, current_dir, envs);
|
||||
}
|
||||
|
||||
let mut cmd = toolchain::command(tool.prefer_proxy(), current_dir, envs);
|
||||
@ -291,29 +290,26 @@ impl Sysroot {
|
||||
|
||||
pub fn set_workspace(&mut self, workspace: RustLibSrcWorkspace) {
|
||||
self.workspace = workspace;
|
||||
if self.error.is_none() {
|
||||
if let Some(src_root) = &self.rust_lib_src_root {
|
||||
let has_core = match &self.workspace {
|
||||
RustLibSrcWorkspace::Workspace(ws) => {
|
||||
ws.packages().any(|p| ws[p].name == "core")
|
||||
}
|
||||
RustLibSrcWorkspace::Json(project_json) => project_json
|
||||
.crates()
|
||||
.filter_map(|(_, krate)| krate.display_name.clone())
|
||||
.any(|name| name.canonical_name().as_str() == "core"),
|
||||
RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(),
|
||||
RustLibSrcWorkspace::Empty => true,
|
||||
if self.error.is_none()
|
||||
&& let Some(src_root) = &self.rust_lib_src_root
|
||||
{
|
||||
let has_core = match &self.workspace {
|
||||
RustLibSrcWorkspace::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
|
||||
RustLibSrcWorkspace::Json(project_json) => project_json
|
||||
.crates()
|
||||
.filter_map(|(_, krate)| krate.display_name.clone())
|
||||
.any(|name| name.canonical_name().as_str() == "core"),
|
||||
RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(),
|
||||
RustLibSrcWorkspace::Empty => true,
|
||||
};
|
||||
if !has_core {
|
||||
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
|
||||
" (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)"
|
||||
} else {
|
||||
", try running `rustup component add rust-src` to possibly fix this"
|
||||
};
|
||||
if !has_core {
|
||||
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
|
||||
" (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)"
|
||||
} else {
|
||||
", try running `rustup component add rust-src` to possibly fix this"
|
||||
};
|
||||
self.error = Some(format!(
|
||||
"sysroot at `{src_root}` is missing a `core` library{var_note}",
|
||||
));
|
||||
}
|
||||
self.error =
|
||||
Some(format!("sysroot at `{src_root}` is missing a `core` library{var_note}",));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ use crate::{
|
||||
CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package,
|
||||
ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
|
||||
WorkspaceBuildScripts,
|
||||
build_dependencies::BuildScriptOutput,
|
||||
build_dependencies::{BuildScriptOutput, ProcMacroDylibPath},
|
||||
cargo_config_file,
|
||||
cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource},
|
||||
env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
|
||||
@ -424,12 +424,12 @@ impl ProjectWorkspace {
|
||||
sysroot.set_workspace(loaded_sysroot);
|
||||
}
|
||||
|
||||
if !cargo.requires_rustc_private() {
|
||||
if let Err(e) = &mut rustc {
|
||||
// We don't need the rustc sources here,
|
||||
// so just discard the error.
|
||||
_ = e.take();
|
||||
}
|
||||
if !cargo.requires_rustc_private()
|
||||
&& let Err(e) = &mut rustc
|
||||
{
|
||||
// We don't need the rustc sources here,
|
||||
// so just discard the error.
|
||||
_ = e.take();
|
||||
}
|
||||
|
||||
Ok(ProjectWorkspace {
|
||||
@ -1163,17 +1163,15 @@ fn project_json_to_crate_graph(
|
||||
crate = display_name.as_ref().map(|name| name.canonical_name().as_str()),
|
||||
"added root to crate graph"
|
||||
);
|
||||
if *is_proc_macro {
|
||||
if let Some(path) = proc_macro_dylib_path.clone() {
|
||||
let node = Ok((
|
||||
display_name
|
||||
.as_ref()
|
||||
.map(|it| it.canonical_name().as_str().to_owned())
|
||||
.unwrap_or_else(|| format!("crate{}", idx.0)),
|
||||
path,
|
||||
));
|
||||
proc_macros.insert(crate_graph_crate_id, node);
|
||||
}
|
||||
if *is_proc_macro && let Some(path) = proc_macro_dylib_path.clone() {
|
||||
let node = Ok((
|
||||
display_name
|
||||
.as_ref()
|
||||
.map(|it| it.canonical_name().as_str().to_owned())
|
||||
.unwrap_or_else(|| format!("crate{}", idx.0)),
|
||||
path,
|
||||
));
|
||||
proc_macros.insert(crate_graph_crate_id, node);
|
||||
}
|
||||
(idx, crate_graph_crate_id)
|
||||
},
|
||||
@ -1319,14 +1317,12 @@ fn cargo_to_crate_graph(
|
||||
|
||||
// Add dep edge of all targets to the package's lib target
|
||||
if let Some((to, name)) = lib_tgt.clone() {
|
||||
if to != from && kind != TargetKind::BuildScript {
|
||||
// (build script can not depend on its library target)
|
||||
|
||||
// For root projects with dashes in their name,
|
||||
// cargo metadata does not do any normalization,
|
||||
// so we do it ourselves currently
|
||||
let name = CrateName::normalize_dashes(&name);
|
||||
add_dep(crate_graph, from, name, to);
|
||||
match to != from && kind != TargetKind::BuildScript {
|
||||
true => {
|
||||
let name = CrateName::normalize_dashes(&name);
|
||||
add_dep(crate_graph, from, name, to);
|
||||
}
|
||||
false => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1638,9 +1634,21 @@ fn add_target_crate_root(
|
||||
let proc_macro = match build_data {
|
||||
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
|
||||
match proc_macro_dylib_path {
|
||||
Some(path) => Ok((cargo_name.to_owned(), path.clone())),
|
||||
None if has_errors => Err(ProcMacroLoadingError::FailedToBuild),
|
||||
None => Err(ProcMacroLoadingError::MissingDylibPath),
|
||||
ProcMacroDylibPath::Path(path) => Ok((cargo_name.to_owned(), path.clone())),
|
||||
ProcMacroDylibPath::NotBuilt => Err(ProcMacroLoadingError::NotYetBuilt),
|
||||
ProcMacroDylibPath::NotProcMacro | ProcMacroDylibPath::DylibNotFound(_)
|
||||
if has_errors =>
|
||||
{
|
||||
Err(ProcMacroLoadingError::FailedToBuild)
|
||||
}
|
||||
ProcMacroDylibPath::NotProcMacro => {
|
||||
Err(ProcMacroLoadingError::ExpectedProcMacroArtifact)
|
||||
}
|
||||
ProcMacroDylibPath::DylibNotFound(candidates) => {
|
||||
Err(ProcMacroLoadingError::MissingDylibPath(
|
||||
candidates.iter().map(ToString::to_string).collect(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
None => Err(ProcMacroLoadingError::NotYetBuilt),
|
||||
|
Loading…
x
Reference in New Issue
Block a user