mirror of
https://github.com/rust-lang/cargo.git
synced 2025-10-01 11:30:39 +00:00
Add --build-plan for 'cargo build'
With 'cargo build --build-plan', cargo does not actually run any commands, but instead prints out what it would have done in the form of a JSON data structure. Fixes #3815
This commit is contained in:
parent
9e53ac6e65
commit
72e6b9d326
@ -132,6 +132,10 @@ pub trait AppExt: Sized {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn arg_build_plan(self) -> Self {
|
||||||
|
self._arg(opt("build-plan", "Output the build plan in JSON"))
|
||||||
|
}
|
||||||
|
|
||||||
fn arg_new_opts(self) -> Self {
|
fn arg_new_opts(self) -> Self {
|
||||||
self._arg(
|
self._arg(
|
||||||
opt(
|
opt(
|
||||||
@ -275,6 +279,12 @@ pub trait ArgMatchesExt {
|
|||||||
let mut build_config = BuildConfig::new(config, self.jobs()?, &self.target(), mode)?;
|
let mut build_config = BuildConfig::new(config, self.jobs()?, &self.target(), mode)?;
|
||||||
build_config.message_format = message_format;
|
build_config.message_format = message_format;
|
||||||
build_config.release = self._is_present("release");
|
build_config.release = self._is_present("release");
|
||||||
|
build_config.build_plan = self._is_present("build-plan");
|
||||||
|
if build_config.build_plan && !config.cli_unstable().unstable_options {
|
||||||
|
Err(format_err!(
|
||||||
|
"`--build-plan` flag is unstable, pass `-Z unstable-options` to enable it"
|
||||||
|
))?;
|
||||||
|
};
|
||||||
|
|
||||||
let opts = CompileOptions {
|
let opts = CompileOptions {
|
||||||
config,
|
config,
|
||||||
|
@ -31,6 +31,7 @@ pub fn cli() -> App {
|
|||||||
.arg(opt("out-dir", "Copy final artifacts to this directory").value_name("PATH"))
|
.arg(opt("out-dir", "Copy final artifacts to this directory").value_name("PATH"))
|
||||||
.arg_manifest_path()
|
.arg_manifest_path()
|
||||||
.arg_message_format()
|
.arg_message_format()
|
||||||
|
.arg_build_plan()
|
||||||
.after_help(
|
.after_help(
|
||||||
"\
|
"\
|
||||||
If the --package argument is given, then SPEC is a package id specification
|
If the --package argument is given, then SPEC is a package id specification
|
||||||
|
@ -14,6 +14,8 @@ pub struct BuildConfig {
|
|||||||
pub mode: CompileMode,
|
pub mode: CompileMode,
|
||||||
/// Whether to print std output in json format (for machine reading)
|
/// Whether to print std output in json format (for machine reading)
|
||||||
pub message_format: MessageFormat,
|
pub message_format: MessageFormat,
|
||||||
|
/// Output a build plan to stdout instead of actually compiling.
|
||||||
|
pub build_plan: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuildConfig {
|
impl BuildConfig {
|
||||||
@ -87,6 +89,7 @@ impl BuildConfig {
|
|||||||
release: false,
|
release: false,
|
||||||
mode,
|
mode,
|
||||||
message_format: MessageFormat::Human,
|
message_format: MessageFormat::Human,
|
||||||
|
build_plan: false,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -229,6 +229,18 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
|
|||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return the list of filenames read by cargo to generate the BuildContext
|
||||||
|
/// (all Cargo.toml, etc).
|
||||||
|
pub fn inputs(&self) -> CargoResult<Vec<PathBuf>> {
|
||||||
|
let mut inputs = Vec::new();
|
||||||
|
for id in self.packages.package_ids() {
|
||||||
|
let pkg = self.get_package(id)?;
|
||||||
|
inputs.push(pkg.manifest_path().to_path_buf());
|
||||||
|
}
|
||||||
|
inputs.sort();
|
||||||
|
Ok(inputs)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Information required to build for a target
|
/// Information required to build for a target
|
||||||
|
158
src/cargo/core/compiler/build_plan.rs
Normal file
158
src/cargo/core/compiler/build_plan.rs
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
//! A graph-like structure used to represent the rustc commands to build the project and the
|
||||||
|
//! interdependencies between them.
|
||||||
|
//!
|
||||||
|
//! The BuildPlan structure is used to store the dependency graph of a dry run so that it can be
|
||||||
|
//! shared with an external build system. Each Invocation in the BuildPlan comprises a single
|
||||||
|
//! subprocess and defines the build environment, the outputs produced by the subprocess, and the
|
||||||
|
//! dependencies on other Invocations.
|
||||||
|
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
|
use core::TargetKind;
|
||||||
|
use super::{Context, Kind, Unit};
|
||||||
|
use super::context::OutputFile;
|
||||||
|
use util::{internal, CargoResult, ProcessBuilder};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use serde_json;
|
||||||
|
use semver;
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
struct Invocation {
|
||||||
|
package_name: String,
|
||||||
|
package_version: semver::Version,
|
||||||
|
target_kind: TargetKind,
|
||||||
|
kind: Kind,
|
||||||
|
deps: Vec<usize>,
|
||||||
|
outputs: Vec<PathBuf>,
|
||||||
|
links: BTreeMap<PathBuf, PathBuf>,
|
||||||
|
program: String,
|
||||||
|
args: Vec<String>,
|
||||||
|
env: BTreeMap<String, String>,
|
||||||
|
cwd: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BuildPlan {
|
||||||
|
invocation_map: BTreeMap<String, usize>,
|
||||||
|
plan: SerializedBuildPlan,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
struct SerializedBuildPlan {
|
||||||
|
invocations: Vec<Invocation>,
|
||||||
|
inputs: Vec<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Invocation {
|
||||||
|
pub fn new(unit: &Unit, deps: Vec<usize>) -> Invocation {
|
||||||
|
let id = unit.pkg.package_id();
|
||||||
|
Invocation {
|
||||||
|
package_name: id.name().to_string(),
|
||||||
|
package_version: id.version().clone(),
|
||||||
|
kind: unit.kind,
|
||||||
|
target_kind: unit.target.kind().clone(),
|
||||||
|
deps: deps,
|
||||||
|
outputs: Vec::new(),
|
||||||
|
links: BTreeMap::new(),
|
||||||
|
program: String::new(),
|
||||||
|
args: Vec::new(),
|
||||||
|
env: BTreeMap::new(),
|
||||||
|
cwd: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_output(&mut self, path: &PathBuf, link: &Option<PathBuf>) {
|
||||||
|
self.outputs.push(path.clone());
|
||||||
|
if let Some(ref link) = *link {
|
||||||
|
self.links.insert(link.clone(), path.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_cmd(&mut self, cmd: ProcessBuilder) -> CargoResult<()> {
|
||||||
|
self.program = cmd.get_program()
|
||||||
|
.to_str()
|
||||||
|
.ok_or_else(|| format_err!("unicode program string required"))?
|
||||||
|
.to_string()
|
||||||
|
.clone();
|
||||||
|
self.cwd = Some(cmd.get_cwd().unwrap().to_path_buf());
|
||||||
|
for arg in cmd.get_args().iter() {
|
||||||
|
self.args.push(
|
||||||
|
arg.to_str()
|
||||||
|
.ok_or_else(|| format_err!("unicode argument string required"))?
|
||||||
|
.to_string()
|
||||||
|
.clone(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
for var in cmd.get_envs().keys() {
|
||||||
|
let value = cmd.get_env(var).unwrap_or_default();
|
||||||
|
self.env.insert(
|
||||||
|
var.clone(),
|
||||||
|
value
|
||||||
|
.to_str()
|
||||||
|
.ok_or_else(|| format_err!("unicode environment value required"))?
|
||||||
|
.to_string(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BuildPlan {
|
||||||
|
pub fn new() -> BuildPlan {
|
||||||
|
BuildPlan {
|
||||||
|
invocation_map: BTreeMap::new(),
|
||||||
|
plan: SerializedBuildPlan::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add(&mut self, cx: &Context, unit: &Unit) -> CargoResult<()> {
|
||||||
|
let id = self.plan.invocations.len();
|
||||||
|
self.invocation_map.insert(unit.buildkey(), id);
|
||||||
|
let deps = cx.dep_targets(&unit)
|
||||||
|
.iter()
|
||||||
|
.map(|dep| self.invocation_map[&dep.buildkey()])
|
||||||
|
.collect();
|
||||||
|
let invocation = Invocation::new(unit, deps);
|
||||||
|
self.plan.invocations.push(invocation);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update(
|
||||||
|
&mut self,
|
||||||
|
invocation_name: String,
|
||||||
|
cmd: ProcessBuilder,
|
||||||
|
outputs: Arc<Vec<OutputFile>>,
|
||||||
|
) -> CargoResult<()> {
|
||||||
|
let id = self.invocation_map[&invocation_name];
|
||||||
|
let invocation = self.plan
|
||||||
|
.invocations
|
||||||
|
.get_mut(id)
|
||||||
|
.ok_or_else(|| internal(format!("couldn't find invocation for {}", invocation_name)))?;
|
||||||
|
|
||||||
|
invocation.update_cmd(cmd)?;
|
||||||
|
for output in outputs.iter() {
|
||||||
|
invocation.add_output(&output.path, &output.hardlink);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_inputs(&mut self, inputs: Vec<PathBuf>) {
|
||||||
|
self.plan.inputs = inputs;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn output_plan(self) {
|
||||||
|
let encoded = serde_json::to_string(&self.plan).unwrap();
|
||||||
|
println!("{}", encoded);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SerializedBuildPlan {
|
||||||
|
pub fn new() -> SerializedBuildPlan {
|
||||||
|
SerializedBuildPlan {
|
||||||
|
invocations: Vec::new(),
|
||||||
|
inputs: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -3,26 +3,28 @@ use std::collections::{HashMap, HashSet};
|
|||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
use jobserver::Client;
|
use jobserver::Client;
|
||||||
|
|
||||||
use core::{Package, PackageId, Resolve, Target};
|
use core::{Package, PackageId, Resolve, Target};
|
||||||
use core::profiles::Profile;
|
use core::profiles::Profile;
|
||||||
use util::errors::{CargoResult, CargoResultExt};
|
use util::errors::{CargoResult, CargoResultExt};
|
||||||
use util::{internal, profile, Config};
|
use util::{internal, profile, Config, short_hash};
|
||||||
|
|
||||||
use super::custom_build::{self, BuildDeps, BuildScripts, BuildState};
|
use super::custom_build::{self, BuildDeps, BuildScripts, BuildState};
|
||||||
use super::fingerprint::Fingerprint;
|
use super::fingerprint::Fingerprint;
|
||||||
use super::job_queue::JobQueue;
|
use super::job_queue::JobQueue;
|
||||||
use super::layout::Layout;
|
use super::layout::Layout;
|
||||||
use super::{BuildContext, Compilation, CompileMode, Executor, FileFlavor, Kind};
|
use super::{BuildContext, Compilation, CompileMode, Executor, FileFlavor, Kind};
|
||||||
|
use super::build_plan::BuildPlan;
|
||||||
|
|
||||||
mod unit_dependencies;
|
mod unit_dependencies;
|
||||||
use self::unit_dependencies::build_unit_dependencies;
|
use self::unit_dependencies::build_unit_dependencies;
|
||||||
|
|
||||||
mod compilation_files;
|
mod compilation_files;
|
||||||
pub use self::compilation_files::Metadata;
|
pub use self::compilation_files::{Metadata, OutputFile};
|
||||||
use self::compilation_files::{CompilationFiles, OutputFile};
|
use self::compilation_files::CompilationFiles;
|
||||||
|
|
||||||
/// All information needed to define a Unit.
|
/// All information needed to define a Unit.
|
||||||
///
|
///
|
||||||
@ -62,6 +64,24 @@ pub struct Unit<'a> {
|
|||||||
pub mode: CompileMode,
|
pub mode: CompileMode,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> Unit<'a> {
|
||||||
|
pub fn buildkey(&self) -> String {
|
||||||
|
format!("{}-{}", self.pkg.name(), short_hash(self))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Ord for Unit<'a> {
|
||||||
|
fn cmp(&self, other: &Unit) -> Ordering {
|
||||||
|
self.buildkey().cmp(&other.buildkey())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> PartialOrd for Unit<'a> {
|
||||||
|
fn partial_cmp(&self, other: &Unit) -> Option<Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct Context<'a, 'cfg: 'a> {
|
pub struct Context<'a, 'cfg: 'a> {
|
||||||
pub bcx: &'a BuildContext<'a, 'cfg>,
|
pub bcx: &'a BuildContext<'a, 'cfg>,
|
||||||
pub compilation: Compilation<'cfg>,
|
pub compilation: Compilation<'cfg>,
|
||||||
@ -121,6 +141,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
|||||||
exec: &Arc<Executor>,
|
exec: &Arc<Executor>,
|
||||||
) -> CargoResult<Compilation<'cfg>> {
|
) -> CargoResult<Compilation<'cfg>> {
|
||||||
let mut queue = JobQueue::new(self.bcx);
|
let mut queue = JobQueue::new(self.bcx);
|
||||||
|
let mut plan = BuildPlan::new();
|
||||||
|
let build_plan = self.bcx.build_config.build_plan;
|
||||||
self.prepare_units(export_dir, units)?;
|
self.prepare_units(export_dir, units)?;
|
||||||
self.prepare()?;
|
self.prepare()?;
|
||||||
custom_build::build_map(&mut self, units)?;
|
custom_build::build_map(&mut self, units)?;
|
||||||
@ -131,11 +153,16 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
|||||||
// part of this, that's all done next as part of the `execute`
|
// part of this, that's all done next as part of the `execute`
|
||||||
// function which will run everything in order with proper
|
// function which will run everything in order with proper
|
||||||
// parallelism.
|
// parallelism.
|
||||||
super::compile(&mut self, &mut queue, unit, exec)?;
|
super::compile(&mut self, &mut queue, &mut plan, unit, exec)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Now that we've figured out everything that we're going to do, do it!
|
// Now that we've figured out everything that we're going to do, do it!
|
||||||
queue.execute(&mut self)?;
|
queue.execute(&mut self, &mut plan)?;
|
||||||
|
|
||||||
|
if build_plan {
|
||||||
|
plan.set_inputs(self.bcx.inputs()?);
|
||||||
|
plan.output_plan();
|
||||||
|
}
|
||||||
|
|
||||||
for unit in units.iter() {
|
for unit in units.iter() {
|
||||||
for output in self.outputs(unit)?.iter() {
|
for output in self.outputs(unit)?.iter() {
|
||||||
@ -366,7 +393,9 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
|||||||
return Vec::new();
|
return Vec::new();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.unit_dependencies[unit].clone()
|
let mut deps = self.unit_dependencies[unit].clone();
|
||||||
|
deps.sort();
|
||||||
|
deps
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn incremental_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
|
pub fn incremental_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
|
||||||
|
@ -94,12 +94,16 @@ pub fn prepare<'a, 'cfg>(
|
|||||||
build_work(cx, unit)?
|
build_work(cx, unit)?
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if cx.bcx.build_config.build_plan {
|
||||||
|
Ok((work_dirty, work_fresh, Freshness::Dirty))
|
||||||
|
} else {
|
||||||
// Now that we've prep'd our work, build the work needed to manage the
|
// Now that we've prep'd our work, build the work needed to manage the
|
||||||
// fingerprint and then start returning that upwards.
|
// fingerprint and then start returning that upwards.
|
||||||
let (freshness, dirty, fresh) = fingerprint::prepare_build_cmd(cx, unit)?;
|
let (freshness, dirty, fresh) = fingerprint::prepare_build_cmd(cx, unit)?;
|
||||||
|
|
||||||
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
|
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work)> {
|
fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work)> {
|
||||||
assert!(unit.mode.is_run_custom_build());
|
assert!(unit.mode.is_run_custom_build());
|
||||||
@ -111,6 +115,8 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
|||||||
.expect("running a script not depending on an actual script");
|
.expect("running a script not depending on an actual script");
|
||||||
let script_output = cx.files().build_script_dir(build_script_unit);
|
let script_output = cx.files().build_script_dir(build_script_unit);
|
||||||
let build_output = cx.files().build_script_out_dir(unit);
|
let build_output = cx.files().build_script_out_dir(unit);
|
||||||
|
let build_plan = bcx.build_config.build_plan;
|
||||||
|
let invocation_name = unit.buildkey();
|
||||||
|
|
||||||
// Building the command to execute
|
// Building the command to execute
|
||||||
let to_exec = script_output.join(unit.target.name());
|
let to_exec = script_output.join(unit.target.name());
|
||||||
@ -269,7 +275,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
|||||||
// along to this custom build command. We're also careful to augment our
|
// along to this custom build command. We're also careful to augment our
|
||||||
// dynamic library search path in case the build script depended on any
|
// dynamic library search path in case the build script depended on any
|
||||||
// native dynamic libraries.
|
// native dynamic libraries.
|
||||||
{
|
if !build_plan {
|
||||||
let build_state = build_state.outputs.lock().unwrap();
|
let build_state = build_state.outputs.lock().unwrap();
|
||||||
for (name, id) in lib_deps {
|
for (name, id) in lib_deps {
|
||||||
let key = (id.clone(), kind);
|
let key = (id.clone(), kind);
|
||||||
@ -294,6 +300,9 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
|||||||
}
|
}
|
||||||
|
|
||||||
// And now finally, run the build command itself!
|
// And now finally, run the build command itself!
|
||||||
|
if build_plan {
|
||||||
|
state.build_plan(invocation_name, cmd.clone(), Arc::new(Vec::new()));
|
||||||
|
} else {
|
||||||
state.running(&cmd);
|
state.running(&cmd);
|
||||||
let output = cmd.exec_with_streaming(
|
let output = cmd.exec_with_streaming(
|
||||||
&mut |out_line| {
|
&mut |out_line| {
|
||||||
@ -340,8 +349,8 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
|||||||
env: &parsed_output.env,
|
env: &parsed_output.env,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
build_state.insert(id, kind, parsed_output);
|
build_state.insert(id, kind, parsed_output);
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@ use std::fmt;
|
|||||||
use std::io;
|
use std::io;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::sync::mpsc::{channel, Receiver, Sender};
|
use std::sync::mpsc::{channel, Receiver, Sender};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crossbeam::{self, Scope};
|
use crossbeam::{self, Scope};
|
||||||
use jobserver::{Acquired, HelperThread};
|
use jobserver::{Acquired, HelperThread};
|
||||||
@ -15,7 +16,8 @@ use util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder};
|
|||||||
use util::{Config, DependencyQueue, Dirty, Fresh, Freshness};
|
use util::{Config, DependencyQueue, Dirty, Fresh, Freshness};
|
||||||
|
|
||||||
use super::job::Job;
|
use super::job::Job;
|
||||||
use super::{BuildContext, CompileMode, Context, Kind, Unit};
|
use super::{BuildContext, BuildPlan, CompileMode, Context, Kind, Unit};
|
||||||
|
use super::context::OutputFile;
|
||||||
|
|
||||||
/// A management structure of the entire dependency graph to compile.
|
/// A management structure of the entire dependency graph to compile.
|
||||||
///
|
///
|
||||||
@ -58,6 +60,7 @@ pub struct JobState<'a> {
|
|||||||
|
|
||||||
enum Message<'a> {
|
enum Message<'a> {
|
||||||
Run(String),
|
Run(String),
|
||||||
|
BuildPlanMsg(String, ProcessBuilder, Arc<Vec<OutputFile>>),
|
||||||
Stdout(String),
|
Stdout(String),
|
||||||
Stderr(String),
|
Stderr(String),
|
||||||
Token(io::Result<Acquired>),
|
Token(io::Result<Acquired>),
|
||||||
@ -69,6 +72,16 @@ impl<'a> JobState<'a> {
|
|||||||
let _ = self.tx.send(Message::Run(cmd.to_string()));
|
let _ = self.tx.send(Message::Run(cmd.to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn build_plan(
|
||||||
|
&self,
|
||||||
|
module_name: String,
|
||||||
|
cmd: ProcessBuilder,
|
||||||
|
filenames: Arc<Vec<OutputFile>>,
|
||||||
|
) {
|
||||||
|
let _ = self.tx
|
||||||
|
.send(Message::BuildPlanMsg(module_name, cmd, filenames));
|
||||||
|
}
|
||||||
|
|
||||||
pub fn stdout(&self, out: &str) {
|
pub fn stdout(&self, out: &str) {
|
||||||
let _ = self.tx.send(Message::Stdout(out.to_string()));
|
let _ = self.tx.send(Message::Stdout(out.to_string()));
|
||||||
}
|
}
|
||||||
@ -115,7 +128,7 @@ impl<'a> JobQueue<'a> {
|
|||||||
/// This function will spawn off `config.jobs()` workers to build all of the
|
/// This function will spawn off `config.jobs()` workers to build all of the
|
||||||
/// necessary dependencies, in order. Freshness is propagated as far as
|
/// necessary dependencies, in order. Freshness is propagated as far as
|
||||||
/// possible along each dependency chain.
|
/// possible along each dependency chain.
|
||||||
pub fn execute(&mut self, cx: &mut Context) -> CargoResult<()> {
|
pub fn execute(&mut self, cx: &mut Context, plan: &mut BuildPlan) -> CargoResult<()> {
|
||||||
let _p = profile::start("executing the job graph");
|
let _p = profile::start("executing the job graph");
|
||||||
self.queue.queue_finished();
|
self.queue.queue_finished();
|
||||||
|
|
||||||
@ -141,17 +154,19 @@ impl<'a> JobQueue<'a> {
|
|||||||
})
|
})
|
||||||
.chain_err(|| "failed to create helper thread for jobserver management")?;
|
.chain_err(|| "failed to create helper thread for jobserver management")?;
|
||||||
|
|
||||||
crossbeam::scope(|scope| self.drain_the_queue(cx, scope, &helper))
|
crossbeam::scope(|scope| self.drain_the_queue(cx, plan, scope, &helper))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn drain_the_queue(
|
fn drain_the_queue(
|
||||||
&mut self,
|
&mut self,
|
||||||
cx: &mut Context,
|
cx: &mut Context,
|
||||||
|
plan: &mut BuildPlan,
|
||||||
scope: &Scope<'a>,
|
scope: &Scope<'a>,
|
||||||
jobserver_helper: &HelperThread,
|
jobserver_helper: &HelperThread,
|
||||||
) -> CargoResult<()> {
|
) -> CargoResult<()> {
|
||||||
let mut tokens = Vec::new();
|
let mut tokens = Vec::new();
|
||||||
let mut queue = Vec::new();
|
let mut queue = Vec::new();
|
||||||
|
let build_plan = cx.bcx.build_config.build_plan;
|
||||||
trace!("queue: {:#?}", self.queue);
|
trace!("queue: {:#?}", self.queue);
|
||||||
|
|
||||||
// Iteratively execute the entire dependency graph. Each turn of the
|
// Iteratively execute the entire dependency graph. Each turn of the
|
||||||
@ -192,7 +207,7 @@ impl<'a> JobQueue<'a> {
|
|||||||
// we're able to perform some parallel work.
|
// we're able to perform some parallel work.
|
||||||
while error.is_none() && self.active < tokens.len() + 1 && !queue.is_empty() {
|
while error.is_none() && self.active < tokens.len() + 1 && !queue.is_empty() {
|
||||||
let (key, job, fresh) = queue.remove(0);
|
let (key, job, fresh) = queue.remove(0);
|
||||||
self.run(key, fresh, job, cx.bcx.config, scope)?;
|
self.run(key, fresh, job, cx.bcx.config, scope, build_plan)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If after all that we're not actually running anything then we're
|
// If after all that we're not actually running anything then we're
|
||||||
@ -215,6 +230,9 @@ impl<'a> JobQueue<'a> {
|
|||||||
.shell()
|
.shell()
|
||||||
.verbose(|c| c.status("Running", &cmd))?;
|
.verbose(|c| c.status("Running", &cmd))?;
|
||||||
}
|
}
|
||||||
|
Message::BuildPlanMsg(module_name, cmd, filenames) => {
|
||||||
|
plan.update(module_name, cmd, filenames)?;
|
||||||
|
}
|
||||||
Message::Stdout(out) => {
|
Message::Stdout(out) => {
|
||||||
if cx.bcx.config.extra_verbose() {
|
if cx.bcx.config.extra_verbose() {
|
||||||
println!("{}", out);
|
println!("{}", out);
|
||||||
@ -303,7 +321,9 @@ impl<'a> JobQueue<'a> {
|
|||||||
"{} [{}] target(s) in {}",
|
"{} [{}] target(s) in {}",
|
||||||
build_type, opt_type, time_elapsed
|
build_type, opt_type, time_elapsed
|
||||||
);
|
);
|
||||||
|
if !build_plan {
|
||||||
cx.bcx.config.shell().status("Finished", message)?;
|
cx.bcx.config.shell().status("Finished", message)?;
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
} else if let Some(e) = error {
|
} else if let Some(e) = error {
|
||||||
Err(e)
|
Err(e)
|
||||||
@ -322,6 +342,7 @@ impl<'a> JobQueue<'a> {
|
|||||||
job: Job,
|
job: Job,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
scope: &Scope<'a>,
|
scope: &Scope<'a>,
|
||||||
|
build_plan: bool,
|
||||||
) -> CargoResult<()> {
|
) -> CargoResult<()> {
|
||||||
info!("start: {:?}", key);
|
info!("start: {:?}", key);
|
||||||
|
|
||||||
@ -340,8 +361,10 @@ impl<'a> JobQueue<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !build_plan {
|
||||||
// Print out some nice progress information
|
// Print out some nice progress information
|
||||||
self.note_working_on(config, &key, fresh)?;
|
self.note_working_on(config, &key, fresh)?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,7 @@ use util::paths;
|
|||||||
use util::{self, machine_message, Freshness, ProcessBuilder};
|
use util::{self, machine_message, Freshness, ProcessBuilder};
|
||||||
use util::{internal, join_paths, profile};
|
use util::{internal, join_paths, profile};
|
||||||
|
|
||||||
|
use self::build_plan::BuildPlan;
|
||||||
use self::job::{Job, Work};
|
use self::job::{Job, Work};
|
||||||
use self::job_queue::JobQueue;
|
use self::job_queue::JobQueue;
|
||||||
|
|
||||||
@ -30,6 +31,7 @@ pub use self::layout::is_bad_artifact_name;
|
|||||||
|
|
||||||
mod build_config;
|
mod build_config;
|
||||||
mod build_context;
|
mod build_context;
|
||||||
|
mod build_plan;
|
||||||
mod compilation;
|
mod compilation;
|
||||||
mod context;
|
mod context;
|
||||||
mod custom_build;
|
mod custom_build;
|
||||||
@ -42,7 +44,7 @@ mod output_depinfo;
|
|||||||
/// Whether an object is for the host arch, or the target arch.
|
/// Whether an object is for the host arch, or the target arch.
|
||||||
///
|
///
|
||||||
/// These will be the same unless cross-compiling.
|
/// These will be the same unless cross-compiling.
|
||||||
#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord)]
|
#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord, Serialize)]
|
||||||
pub enum Kind {
|
pub enum Kind {
|
||||||
Host,
|
Host,
|
||||||
Target,
|
Target,
|
||||||
@ -93,10 +95,12 @@ impl Executor for DefaultExecutor {}
|
|||||||
fn compile<'a, 'cfg: 'a>(
|
fn compile<'a, 'cfg: 'a>(
|
||||||
cx: &mut Context<'a, 'cfg>,
|
cx: &mut Context<'a, 'cfg>,
|
||||||
jobs: &mut JobQueue<'a>,
|
jobs: &mut JobQueue<'a>,
|
||||||
|
plan: &mut BuildPlan,
|
||||||
unit: &Unit<'a>,
|
unit: &Unit<'a>,
|
||||||
exec: &Arc<Executor>,
|
exec: &Arc<Executor>,
|
||||||
) -> CargoResult<()> {
|
) -> CargoResult<()> {
|
||||||
let bcx = cx.bcx;
|
let bcx = cx.bcx;
|
||||||
|
let build_plan = bcx.build_config.build_plan;
|
||||||
if !cx.compiled.insert(*unit) {
|
if !cx.compiled.insert(*unit) {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
@ -112,6 +116,12 @@ fn compile<'a, 'cfg: 'a>(
|
|||||||
} else if unit.mode == CompileMode::Doctest {
|
} else if unit.mode == CompileMode::Doctest {
|
||||||
// we run these targets later, so this is just a noop for now
|
// we run these targets later, so this is just a noop for now
|
||||||
(Work::noop(), Work::noop(), Freshness::Fresh)
|
(Work::noop(), Work::noop(), Freshness::Fresh)
|
||||||
|
} else if build_plan {
|
||||||
|
(
|
||||||
|
rustc(cx, unit, &exec.clone())?,
|
||||||
|
Work::noop(),
|
||||||
|
Freshness::Dirty,
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
let (mut freshness, dirty, fresh) = fingerprint::prepare_target(cx, unit)?;
|
let (mut freshness, dirty, fresh) = fingerprint::prepare_target(cx, unit)?;
|
||||||
let work = if unit.mode.is_doc() {
|
let work = if unit.mode.is_doc() {
|
||||||
@ -134,7 +144,10 @@ fn compile<'a, 'cfg: 'a>(
|
|||||||
|
|
||||||
// Be sure to compile all dependencies of this target as well.
|
// Be sure to compile all dependencies of this target as well.
|
||||||
for unit in cx.dep_targets(unit).iter() {
|
for unit in cx.dep_targets(unit).iter() {
|
||||||
compile(cx, jobs, unit, exec)?;
|
compile(cx, jobs, plan, unit, exec)?;
|
||||||
|
}
|
||||||
|
if build_plan {
|
||||||
|
plan.add(cx, unit)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -146,8 +159,10 @@ fn rustc<'a, 'cfg>(
|
|||||||
exec: &Arc<Executor>,
|
exec: &Arc<Executor>,
|
||||||
) -> CargoResult<Work> {
|
) -> CargoResult<Work> {
|
||||||
let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?;
|
let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?;
|
||||||
|
let build_plan = cx.bcx.build_config.build_plan;
|
||||||
|
|
||||||
let name = unit.pkg.name().to_string();
|
let name = unit.pkg.name().to_string();
|
||||||
|
let buildkey = unit.buildkey();
|
||||||
|
|
||||||
// If this is an upstream dep we don't want warnings from, turn off all
|
// If this is an upstream dep we don't want warnings from, turn off all
|
||||||
// lints.
|
// lints.
|
||||||
@ -209,6 +224,7 @@ fn rustc<'a, 'cfg>(
|
|||||||
// previous build scripts, we include them in the rustc invocation.
|
// previous build scripts, we include them in the rustc invocation.
|
||||||
if let Some(build_deps) = build_deps {
|
if let Some(build_deps) = build_deps {
|
||||||
let build_state = build_state.outputs.lock().unwrap();
|
let build_state = build_state.outputs.lock().unwrap();
|
||||||
|
if !build_plan {
|
||||||
add_native_deps(
|
add_native_deps(
|
||||||
&mut rustc,
|
&mut rustc,
|
||||||
&build_state,
|
&build_state,
|
||||||
@ -217,6 +233,7 @@ fn rustc<'a, 'cfg>(
|
|||||||
¤t_id,
|
¤t_id,
|
||||||
)?;
|
)?;
|
||||||
add_plugin_deps(&mut rustc, &build_state, &build_deps, &root_output)?;
|
add_plugin_deps(&mut rustc, &build_state, &build_deps, &root_output)?;
|
||||||
|
}
|
||||||
add_custom_env(&mut rustc, &build_state, ¤t_id, kind)?;
|
add_custom_env(&mut rustc, &build_state, ¤t_id, kind)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -268,6 +285,8 @@ fn rustc<'a, 'cfg>(
|
|||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
).chain_err(|| format!("Could not compile `{}`.", name))?;
|
).chain_err(|| format!("Could not compile `{}`.", name))?;
|
||||||
|
} else if build_plan {
|
||||||
|
state.build_plan(buildkey, rustc.clone(), outputs.clone());
|
||||||
} else {
|
} else {
|
||||||
exec.exec(rustc, &package_id, &target)
|
exec.exec(rustc, &package_id, &target)
|
||||||
.map_err(Internal::new)
|
.map_err(Internal::new)
|
||||||
|
207
tests/testsuite/build_plan.rs
Normal file
207
tests/testsuite/build_plan.rs
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
use cargotest::ChannelChanger;
|
||||||
|
use cargotest::support::{basic_bin_manifest, execs, main_file, project};
|
||||||
|
use hamcrest::{assert_that, existing_file, is_not};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cargo_build_plan_simple() {
|
||||||
|
let p = project("foo")
|
||||||
|
.file("Cargo.toml", &basic_bin_manifest("foo"))
|
||||||
|
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_that(
|
||||||
|
p.cargo("build")
|
||||||
|
.masquerade_as_nightly_cargo()
|
||||||
|
.arg("--build-plan")
|
||||||
|
.arg("-Zunstable-options"),
|
||||||
|
execs().with_status(0).with_json(
|
||||||
|
r#"
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
"[..][/]foo[/]Cargo.toml"
|
||||||
|
],
|
||||||
|
"invocations": [
|
||||||
|
{
|
||||||
|
"args": "{...}",
|
||||||
|
"cwd": "[..][/]target[/]cit[/][..][/]foo",
|
||||||
|
"deps": [],
|
||||||
|
"env": "{...}",
|
||||||
|
"kind": "Host",
|
||||||
|
"links": "{...}",
|
||||||
|
"outputs": "{...}",
|
||||||
|
"package_name": "foo",
|
||||||
|
"package_version": "0.5.0",
|
||||||
|
"program": "rustc",
|
||||||
|
"target_kind": ["bin"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
assert_that(&p.bin("foo"), is_not(existing_file()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cargo_build_plan_single_dep() {
|
||||||
|
let p = project("foo")
|
||||||
|
.file(
|
||||||
|
"Cargo.toml",
|
||||||
|
r#"
|
||||||
|
[package]
|
||||||
|
name = "foo"
|
||||||
|
authors = []
|
||||||
|
version = "0.5.0"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
bar = { path = "bar" }
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.file(
|
||||||
|
"src/lib.rs",
|
||||||
|
r#"
|
||||||
|
extern crate bar;
|
||||||
|
pub fn foo() { bar::bar(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test() { foo(); }
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.file(
|
||||||
|
"bar/Cargo.toml",
|
||||||
|
r#"
|
||||||
|
[package]
|
||||||
|
name = "bar"
|
||||||
|
version = "0.0.1"
|
||||||
|
authors = []
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.file("bar/src/lib.rs", "pub fn bar() {}")
|
||||||
|
.build();
|
||||||
|
assert_that(
|
||||||
|
p.cargo("build")
|
||||||
|
.masquerade_as_nightly_cargo()
|
||||||
|
.arg("--build-plan")
|
||||||
|
.arg("-Zunstable-options"),
|
||||||
|
execs().with_status(0).with_json(
|
||||||
|
r#"
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
"[..][/]foo[/]Cargo.toml",
|
||||||
|
"[..][/]foo[/]bar[/]Cargo.toml"
|
||||||
|
],
|
||||||
|
"invocations": [
|
||||||
|
{
|
||||||
|
"args": "{...}",
|
||||||
|
"cwd": "[..][/]target[/]cit[/][..][/]foo",
|
||||||
|
"deps": [],
|
||||||
|
"env": "{...}",
|
||||||
|
"kind": "Host",
|
||||||
|
"links": "{...}",
|
||||||
|
"outputs": [
|
||||||
|
"[..][/]foo[/]target[/]debug[/]deps[/]libbar-[..].rlib"
|
||||||
|
],
|
||||||
|
"package_name": "bar",
|
||||||
|
"package_version": "0.0.1",
|
||||||
|
"program": "rustc",
|
||||||
|
"target_kind": ["lib"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": "{...}",
|
||||||
|
"cwd": "[..][/]target[/]cit[/][..][/]foo",
|
||||||
|
"deps": [0],
|
||||||
|
"env": "{...}",
|
||||||
|
"kind": "Host",
|
||||||
|
"links": "{...}",
|
||||||
|
"outputs": [
|
||||||
|
"[..][/]foo[/]target[/]debug[/]deps[/]libfoo-[..].rlib"
|
||||||
|
],
|
||||||
|
"package_name": "foo",
|
||||||
|
"package_version": "0.5.0",
|
||||||
|
"program": "rustc",
|
||||||
|
"target_kind": ["lib"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cargo_build_plan_build_script() {
|
||||||
|
let p = project("foo")
|
||||||
|
.file(
|
||||||
|
"Cargo.toml",
|
||||||
|
r#"
|
||||||
|
[project]
|
||||||
|
|
||||||
|
name = "foo"
|
||||||
|
version = "0.5.0"
|
||||||
|
authors = ["wycats@example.com"]
|
||||||
|
build = "build.rs"
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.file("src/main.rs", r#"fn main() {}"#)
|
||||||
|
.file("build.rs", r#"fn main() {}"#)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_that(
|
||||||
|
p.cargo("build")
|
||||||
|
.masquerade_as_nightly_cargo()
|
||||||
|
.arg("--build-plan")
|
||||||
|
.arg("-Zunstable-options"),
|
||||||
|
execs().with_status(0).with_json(
|
||||||
|
r#"
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
"[..][/]foo[/]Cargo.toml"
|
||||||
|
],
|
||||||
|
"invocations": [
|
||||||
|
{
|
||||||
|
"args": "{...}",
|
||||||
|
"cwd": "[..][/]target[/]cit[/][..][/]foo",
|
||||||
|
"deps": [],
|
||||||
|
"env": "{...}",
|
||||||
|
"kind": "Host",
|
||||||
|
"links": "{...}",
|
||||||
|
"outputs": [
|
||||||
|
"[..][/]foo[/]target[/]debug[/]build[/][..][/]build_script_build-[..]"
|
||||||
|
],
|
||||||
|
"package_name": "foo",
|
||||||
|
"package_version": "0.5.0",
|
||||||
|
"program": "rustc",
|
||||||
|
"target_kind": ["custom-build"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": "{...}",
|
||||||
|
"cwd": "[..][/]target[/]cit[/][..][/]foo",
|
||||||
|
"deps": [0],
|
||||||
|
"env": "{...}",
|
||||||
|
"kind": "Host",
|
||||||
|
"links": "{...}",
|
||||||
|
"outputs": [],
|
||||||
|
"package_name": "foo",
|
||||||
|
"package_version": "0.5.0",
|
||||||
|
"program": "[..][/]build-script-build",
|
||||||
|
"target_kind": ["custom-build"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"args": "{...}",
|
||||||
|
"cwd": "[..][/]target[/]cit[/][..][/]foo",
|
||||||
|
"deps": [1],
|
||||||
|
"env": "{...}",
|
||||||
|
"kind": "Host",
|
||||||
|
"links": "{...}",
|
||||||
|
"outputs": "{...}",
|
||||||
|
"package_name": "foo",
|
||||||
|
"package_version": "0.5.0",
|
||||||
|
"program": "rustc",
|
||||||
|
"target_kind": ["bin"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
@ -30,6 +30,7 @@ mod bench;
|
|||||||
mod build_auth;
|
mod build_auth;
|
||||||
mod build_lib;
|
mod build_lib;
|
||||||
mod build;
|
mod build;
|
||||||
|
mod build_plan;
|
||||||
mod build_script_env;
|
mod build_script_env;
|
||||||
mod build_script;
|
mod build_script;
|
||||||
mod cargo_alias_config;
|
mod cargo_alias_config;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user