Merge remote-tracking branch 'origin/master' into custom-profile-pr-rfc

This commit is contained in:
Dan Aloni 2019-08-09 11:50:55 +03:00
commit 23e613d552
102 changed files with 2071 additions and 940 deletions

View File

@ -5,10 +5,20 @@
### Added
- Cargo build pipelining has been enabled by default to leverage more idle CPU
parallelism during builds.
[#7143](https://github.com/rust-lang/cargo/pull/7143)
- The `--message-format` option to Cargo can now be specified multiple times and
accepts a comma-separated list of values. In addition to the previous values
it also now accepts `json-diagnostic-short` and
`json-diagnostic-rendered-ansi` which configures the output coming from rustc
in `json` message mode.
[#7214](https://github.com/rust-lang/cargo/pull/7214)
### Changed
### Fixed
- (Nightly only): Fixed exponential blowup when using CARGO_BUILD_PIPELINING.
- (Nightly only): Fixed exponential blowup when using `CARGO_BUILD_PIPELINING`.
[#7062](https://github.com/rust-lang/cargo/pull/7062)
- Fixed using the wrong directory when updating git repositories when using
the `git-fetch-with-cli` config option, and the `GIT_DIR` environment

View File

@ -48,7 +48,7 @@ num_cpus = "1.0"
opener = "0.4"
percent-encoding = "2.0"
remove_dir_all = "0.5.2"
rustfix = "0.4.4"
rustfix = "0.4.6"
same-file = "1"
semver = { version = "0.9.0", features = ["serde"] }
serde = { version = "1.0.82", features = ['derive'] }

View File

@ -8,7 +8,7 @@ use std::rc::Rc;
use std::time::Instant;
use cargo::core::dependency::Kind;
use cargo::core::resolver::{self, Method};
use cargo::core::resolver::{self, ResolveOpts};
use cargo::core::source::{GitReference, SourceId};
use cargo::core::Resolve;
use cargo::core::{Dependency, PackageId, Registry, Summary};
@ -175,10 +175,10 @@ pub fn resolve_with_config_raw(
false,
)
.unwrap();
let method = Method::Everything;
let opts = ResolveOpts::everything();
let start = Instant::now();
let resolve = resolver::resolve(
&[(summary, method)],
&[(summary, opts)],
&[],
&mut registry,
&HashSet::new(),

View File

@ -129,7 +129,10 @@ impl BuildConfig {
/// Whether or not the *user* wants JSON output. Whether or not rustc
/// actually uses JSON is decided in `add_error_format`.
pub fn emit_json(&self) -> bool {
self.message_format == MessageFormat::Json
match self.message_format {
MessageFormat::Json { .. } => true,
_ => false,
}
}
pub fn profile_name(&self) -> &str {
@ -144,7 +147,17 @@ impl BuildConfig {
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MessageFormat {
Human,
Json,
Json {
/// Whether rustc diagnostics are rendered by cargo or included into the
/// output stream.
render_diagnostics: bool,
/// Whether the `rendered` field of rustc diagnostics are using the
/// "short" rendering.
short: bool,
/// Whether the `rendered` field of rustc diagnostics embed ansi color
/// codes.
ansi: bool,
},
Short,
}

View File

@ -16,6 +16,11 @@ mod target_info;
pub use self::target_info::{FileFlavor, TargetInfo};
/// The build context, containing all information about a build task.
///
/// It is intended that this is mostly static information. Stuff that mutates
/// during the build can be found in the parent `Context`. (I say mostly,
/// because this has internal caching, but nothing that should be observable
/// or require &mut.)
pub struct BuildContext<'a, 'cfg> {
/// The workspace the build is for.
pub ws: &'a Workspace<'cfg>,
@ -183,6 +188,17 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
pub fn extra_args_for(&self, unit: &Unit<'a>) -> Option<&Vec<String>> {
self.extra_compiler_args.get(unit)
}
/// If a build script is overridden, this returns the `BuildOutput` to use.
///
/// `lib_name` is the `links` library name and `kind` is whether it is for
/// Host or Target.
pub fn script_override(&self, lib_name: &str, kind: Kind) -> Option<&BuildOutput> {
match kind {
Kind::Host => self.host_config.overrides.get(lib_name),
Kind::Target => self.target_config.overrides.get(lib_name),
}
}
}
/// Information required to build for a target.
@ -192,7 +208,11 @@ pub struct TargetConfig {
pub ar: Option<PathBuf>,
/// The path of the linker for this target.
pub linker: Option<PathBuf>,
/// Special build options for any necessary input files (filename -> options).
/// Build script override for the given library name.
///
/// Any package with a `links` value for the given library name will skip
/// running its build script and instead use the given output from the
/// config file.
pub overrides: HashMap<String, BuildOutput>,
}

View File

@ -34,6 +34,7 @@ pub struct TargetInfo {
pub rustflags: Vec<String>,
/// Extra flags to pass to `rustdoc`, see `env_args`.
pub rustdocflags: Vec<String>,
pub supports_pipelining: Option<bool>,
}
/// Kind of each file generated by a Unit, part of `FileType`.
@ -52,14 +53,17 @@ pub struct FileType {
/// The kind of file.
pub flavor: FileFlavor,
/// The suffix for the file (for example, `.rlib`).
/// This is an empty string for executables on Unix-like platforms.
suffix: String,
/// The prefix for the file (for example, `lib`).
/// This is an empty string for things like executables.
prefix: String,
// Wasm bin target will generate two files in deps such as
// "web-stuff.js" and "web_stuff.wasm". Note the different usages of
// "-" and "_". should_replace_hyphens is a flag to indicate that
// we need to convert the stem "web-stuff" to "web_stuff", so we
// won't miss "web_stuff.wasm".
/// Flag to convert hyphen to underscore.
///
/// wasm bin targets will generate two files in deps such as
/// "web-stuff.js" and "web_stuff.wasm". Note the different usages of "-"
/// and "_". This flag indicates that the stem "web-stuff" should be
/// converted to "web_stuff".
should_replace_hyphens: bool,
}
@ -98,6 +102,18 @@ impl TargetInfo {
.args(&rustflags)
.env_remove("RUSTC_LOG");
// NOTE: set this unconditionally to `true` once support for `--json`
// rides to stable.
//
// Also note that we only learn about this functionality for the host
// compiler since the host/target rustc are always the same.
let mut pipelining_test = process.clone();
pipelining_test.args(&["--error-format=json", "--json=artifacts"]);
let supports_pipelining = match kind {
Kind::Host => Some(rustc.cached_output(&pipelining_test).is_ok()),
Kind::Target => None,
};
let target_triple = requested_target
.as_ref()
.map(|s| s.as_str())
@ -179,6 +195,7 @@ impl TargetInfo {
"RUSTDOCFLAGS",
)?,
cfg,
supports_pipelining,
})
}

View File

@ -22,6 +22,7 @@ pub struct Doctest {
/// A structure returning the result of a compilation.
pub struct Compilation<'cfg> {
/// An array of all tests created during this compilation.
/// `(package, target, path_to_test_exe)`
pub tests: Vec<(Package, Target, PathBuf)>,
/// An array of all binaries created.

View File

@ -145,7 +145,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
/// target.
pub fn out_dir(&self, unit: &Unit<'a>) -> PathBuf {
if unit.mode.is_doc() {
self.layout(unit.kind).root().parent().unwrap().join("doc")
self.layout(unit.kind).doc().to_path_buf()
} else if unit.mode.is_doc_test() {
panic!("doc tests do not have an out dir");
} else if unit.target.is_custom_build() {
@ -169,11 +169,6 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
}
}
/// Returns the root of the build output tree for the target
pub fn target_root(&self) -> &Path {
self.target.as_ref().unwrap_or(&self.host).dest()
}
/// Returns the root of the build output tree for the host
pub fn host_root(&self) -> &Path {
self.host.dest()
@ -261,8 +256,8 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
/// (eg a dependent lib).
fn link_stem(&self, unit: &Unit<'a>) -> Option<(PathBuf, String)> {
let out_dir = self.out_dir(unit);
let bin_stem = self.bin_stem(unit);
let file_stem = self.file_stem(unit);
let bin_stem = self.bin_stem(unit); // Stem without metadata.
let file_stem = self.file_stem(unit); // Stem with metadata.
// We currently only lift files up from the `deps` directory. If
// it was compiled into something like `example/` or `doc/` then

View File

@ -3,7 +3,7 @@ use std::collections::{HashMap, HashSet};
use std::ffi::OsStr;
use std::fmt::Write;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::{Arc, Mutex};
use filetime::FileTime;
use jobserver::Client;
@ -15,7 +15,7 @@ use crate::util::errors::{CargoResult, CargoResultExt};
use crate::util::{internal, profile, Config};
use super::build_plan::BuildPlan;
use super::custom_build::{self, BuildDeps, BuildScripts, BuildState};
use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
use super::fingerprint::Fingerprint;
use super::job_queue::JobQueue;
use super::layout::Layout;
@ -28,21 +28,45 @@ mod compilation_files;
use self::compilation_files::CompilationFiles;
pub use self::compilation_files::{Metadata, OutputFile};
/// Collection of all the stuff that is needed to perform a build.
pub struct Context<'a, 'cfg> {
/// Mostly static information about the build task.
pub bcx: &'a BuildContext<'a, 'cfg>,
/// A large collection of information about the result of the entire compilation.
pub compilation: Compilation<'cfg>,
pub build_state: Arc<BuildState>,
pub build_script_overridden: HashSet<(PackageId, Kind)>,
/// Output from build scripts, updated after each build script runs.
pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
/// Dependencies (like rerun-if-changed) declared by a build script.
/// This is *only* populated from the output from previous runs.
/// If the build script hasn't ever been run, then it must be run.
pub build_explicit_deps: HashMap<Unit<'a>, BuildDeps>,
/// Fingerprints used to detect if a unit is out-of-date.
pub fingerprints: HashMap<Unit<'a>, Arc<Fingerprint>>,
/// Cache of file mtimes to reduce filesystem hits.
pub mtime_cache: HashMap<PathBuf, FileTime>,
/// A set used to track which units have been compiled.
/// A unit may appear in the job graph multiple times as a dependency of
/// multiple packages, but it only needs to run once.
pub compiled: HashSet<Unit<'a>>,
/// Linking information for each `Unit`.
/// See `build_map` for details.
pub build_scripts: HashMap<Unit<'a>, Arc<BuildScripts>>,
/// Used to check the `links` field in the manifest is not duplicated and
/// is used correctly.
pub links: Links,
/// Job server client to manage concurrency with other processes.
pub jobserver: Client,
/// "Primary" packages are the ones the user selected on the command-line
/// with `-p` flags. If no flags are specified, then it is the defaults
/// based on the current directory and the default workspace members.
primary_packages: HashSet<PackageId>,
/// The dependency graph of units to compile.
unit_dependencies: HashMap<Unit<'a>, Vec<Unit<'a>>>,
/// An abstraction of the files and directories that will be generated by
/// the compilation. This is `None` until after `unit_dependencies` has
/// been computed.
files: Option<CompilationFiles<'a, 'cfg>>,
/// Cache of packages, populated when they are downloaded.
package_cache: HashMap<PackageId, &'a Package>,
/// A flag indicating whether pipelining is enabled for this compilation
@ -77,12 +101,12 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
.config
.get_bool("build.pipelining")?
.map(|t| t.val)
.unwrap_or(false);
.unwrap_or(bcx.host_info.supports_pipelining.unwrap());
Ok(Self {
bcx,
compilation: Compilation::new(bcx)?,
build_state: Arc::new(BuildState::new(&bcx.host_config, &bcx.target_config)),
build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
fingerprints: HashMap::new(),
mtime_cache: HashMap::new(),
compiled: HashSet::new(),
@ -90,8 +114,6 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
build_explicit_deps: HashMap::new(),
links: Links::new(),
jobserver,
build_script_overridden: HashSet::new(),
primary_packages: HashSet::new(),
unit_dependencies: HashMap::new(),
files: None,
@ -228,7 +250,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
super::output_depinfo(&mut self, unit)?;
}
for (&(ref pkg, _), output) in self.build_state.outputs.lock().unwrap().iter() {
for (&(ref pkg, _), output) in self.build_script_outputs.lock().unwrap().iter() {
self.compilation
.cfgs
.entry(pkg.clone())
@ -335,22 +357,6 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
//
// TODO: this ideally should be `-> &[Unit<'a>]`.
pub fn dep_targets(&self, unit: &Unit<'a>) -> Vec<Unit<'a>> {
// If this build script's execution has been overridden then we don't
// actually depend on anything, we've reached the end of the dependency
// chain as we've got all the info we're gonna get.
//
// Note there's a subtlety about this piece of code! The
// `build_script_overridden` map here is populated in
// `custom_build::build_map` which you need to call before inspecting
// dependencies. However, that code itself calls this method and
// gets a full pre-filtered set of dependencies. This is not super
// obvious, and clear, but it does work at the moment.
if unit.target.is_custom_build() {
let key = (unit.pkg.package_id(), unit.kind);
if self.build_script_overridden.contains(&key) {
return Vec::new();
}
}
self.unit_dependencies[unit].clone()
}

View File

@ -258,6 +258,13 @@ fn compute_deps_custom_build<'a, 'cfg>(
unit: &Unit<'a>,
bcx: &BuildContext<'a, 'cfg>,
) -> CargoResult<Vec<(Unit<'a>, UnitFor)>> {
if let Some(links) = unit.pkg.manifest().links() {
if bcx.script_override(links, unit.kind).is_some() {
// Overridden build scripts don't have any dependencies.
return Ok(Vec::new());
}
}
// When not overridden, then the dependencies to run a build script are:
//
// 1. Compiling the build script itself.

View File

@ -3,7 +3,7 @@ use std::collections::{BTreeSet, HashSet};
use std::fs;
use std::path::{Path, PathBuf};
use std::str;
use std::sync::{Arc, Mutex};
use std::sync::Arc;
use crate::core::compiler::job_queue::JobState;
use crate::core::{profiles::ProfileRoot, PackageId};
@ -13,7 +13,7 @@ use crate::util::Cfg;
use crate::util::{self, internal, paths, profile};
use super::job::{Freshness, Job, Work};
use super::{fingerprint, Context, Kind, TargetConfig, Unit};
use super::{fingerprint, Context, Kind, Unit};
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug, Hash)]
@ -39,48 +39,57 @@ pub struct BuildOutput {
pub warnings: Vec<String>,
}
/// Map of packages to build info.
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
/// Build info and overrides.
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
overrides: HashMap<(String, Kind), BuildOutput>,
}
/// Map of packages to build script output.
///
/// This initially starts out as empty. Overridden build scripts get
/// inserted during `build_map`. The rest of the entries are added
/// immediately after each build script runs.
pub type BuildScriptOutputs = HashMap<(PackageId, Kind), BuildOutput>;
/// Linking information for a `Unit`.
///
/// See `build_map` for more details.
#[derive(Default)]
pub struct BuildScripts {
// Cargo will use this `to_link` vector to add `-L` flags to compiles as we
// propagate them upwards towards the final build. Note, however, that we
// need to preserve the ordering of `to_link` to be topologically sorted.
// This will ensure that build scripts which print their paths properly will
// correctly pick up the files they generated (if there are duplicates
// elsewhere).
//
// To preserve this ordering, the (id, kind) is stored in two places, once
// in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain
// this as we're building interactively below to ensure that the memory
// usage here doesn't blow up too much.
//
// For more information, see #2354.
/// Cargo will use this `to_link` vector to add `-L` flags to compiles as we
/// propagate them upwards towards the final build. Note, however, that we
/// need to preserve the ordering of `to_link` to be topologically sorted.
/// This will ensure that build scripts which print their paths properly will
/// correctly pick up the files they generated (if there are duplicates
/// elsewhere).
///
/// To preserve this ordering, the (id, kind) is stored in two places, once
/// in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain
/// this as we're building interactively below to ensure that the memory
/// usage here doesn't blow up too much.
///
/// For more information, see #2354.
pub to_link: Vec<(PackageId, Kind)>,
/// This is only used while constructing `to_link` to avoid duplicates.
seen_to_link: HashSet<(PackageId, Kind)>,
/// Host-only dependencies that have build scripts.
///
/// This is the set of transitive dependencies that are host-only
/// (proc-macro, plugin, build-dependency) that contain a build script.
/// Any `BuildOutput::library_paths` path relative to `target` will be
/// added to LD_LIBRARY_PATH so that the compiler can find any dynamic
/// libraries a build script may have generated.
pub plugins: BTreeSet<PackageId>,
}
/// Dependency information as declared by a build script.
#[derive(Debug)]
pub struct BuildDeps {
/// Absolute path to the file in the target directory that stores the
/// output of the build script.
pub build_script_output: PathBuf,
/// Files that trigger a rebuild if they change.
pub rerun_if_changed: Vec<PathBuf>,
/// Environment variables that trigger a rebuild if they change.
pub rerun_if_env_changed: Vec<String>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<Job> {
let _p = profile::start(format!(
"build script prepare: {}/{}",
@ -90,7 +99,8 @@ pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRe
let key = (unit.pkg.package_id(), unit.kind);
if cx.build_script_overridden.contains(&key) {
if cx.build_script_outputs.lock().unwrap().contains_key(&key) {
// The output is already set, thus the build script is overridden.
fingerprint::prepare_target(cx, unit, false)
} else {
build_work(cx, unit)
@ -232,7 +242,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
.collect::<Vec<_>>()
};
let pkg_name = unit.pkg.to_string();
let build_state = Arc::clone(&cx.build_state);
let build_script_outputs = Arc::clone(&cx.build_script_outputs);
let id = unit.pkg.package_id();
let output_file = script_run_dir.join("output");
let err_file = script_run_dir.join("stderr");
@ -241,11 +251,11 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
let all = (
id,
pkg_name.clone(),
Arc::clone(&build_state),
Arc::clone(&build_script_outputs),
output_file.clone(),
script_out_dir.clone(),
);
let build_scripts = super::load_build_deps(cx, unit);
let build_scripts = cx.build_scripts.get(unit).cloned();
let kind = unit.kind;
let json_messages = bcx.build_config.emit_json();
let extra_verbose = bcx.config.extra_verbose();
@ -278,17 +288,17 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
if !build_plan {
let build_state = build_state.outputs.lock().unwrap();
let build_script_outputs = build_script_outputs.lock().unwrap();
for (name, id) in lib_deps {
let key = (id, kind);
let state = build_state.get(&key).ok_or_else(|| {
let script_output = build_script_outputs.get(&key).ok_or_else(|| {
internal(format!(
"failed to locate build state for env \
vars: {}/{:?}",
id, kind
))
})?;
let data = &state.metadata;
let data = &script_output.metadata;
for &(ref key, ref value) in data.iter() {
cmd.env(
&format!("DEP_{}_{}", super::envify(&name), super::envify(key)),
@ -297,7 +307,12 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
}
}
if let Some(build_scripts) = build_scripts {
super::add_plugin_deps(&mut cmd, &build_state, &build_scripts, &host_target_root)?;
super::add_plugin_deps(
&mut cmd,
&build_script_outputs,
&build_scripts,
&host_target_root,
)?;
}
}
@ -345,7 +360,10 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
if json_messages {
emit_build_output(state, &parsed_output, id);
}
build_state.insert(id, kind, parsed_output);
build_script_outputs
.lock()
.unwrap()
.insert((id, kind), parsed_output);
Ok(())
});
@ -353,7 +371,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
// itself to run when we actually end up just discarding what we calculated
// above.
let fresh = Work::new(move |state| {
let (id, pkg_name, build_state, output_file, script_out_dir) = all;
let (id, pkg_name, build_script_outputs, output_file, script_out_dir) = all;
let output = match prev_output {
Some(output) => output,
None => BuildOutput::parse_file(
@ -368,7 +386,10 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
emit_build_output(state, &output, id);
}
build_state.insert(id, kind, output);
build_script_outputs
.lock()
.unwrap()
.insert((id, kind), output);
Ok(())
});
@ -385,25 +406,6 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
Ok(job)
}
impl BuildState {
pub fn new(host_config: &TargetConfig, target_config: &TargetConfig) -> BuildState {
let mut overrides = HashMap::new();
let i1 = host_config.overrides.iter().map(|p| (p, Kind::Host));
let i2 = target_config.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
overrides.insert((name.clone(), kind), output.clone());
}
BuildState {
outputs: Mutex::new(HashMap::new()),
overrides,
}
}
fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {
self.outputs.lock().unwrap().insert((id, kind), output);
}
}
impl BuildOutput {
pub fn parse_file(
path: &Path,
@ -470,6 +472,7 @@ impl BuildOutput {
script_out_dir.to_str().unwrap(),
);
// Keep in sync with TargetConfig::new.
match key {
"rustc-flags" => {
let (paths, links) = BuildOutput::parse_rustc_flags(&value, &whence)?;
@ -596,14 +599,21 @@ impl BuildDeps {
}
}
/// Computes the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
/// Computes several maps in `Context`:
/// - `build_scripts`: A map that tracks which build scripts each package
/// depends on.
/// - `build_explicit_deps`: Dependency statements emitted by build scripts
/// from a previous run.
/// - `build_script_outputs`: Pre-populates this with any overridden build
/// scripts.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
/// The important one here is `build_scripts`, which for each `(package,
/// kind)` stores a `BuildScripts` object which contains a list of
/// dependencies with build scripts that the unit should consider when
/// linking. For example this lists all dependencies' `-L` flags which need to
/// be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// The given set of units to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CargoResult<()> {
let mut ret = HashMap::new();
@ -627,20 +637,15 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca
return Ok(&out[unit]);
}
let key = unit
.pkg
.manifest()
.links()
.map(|l| (l.to_string(), unit.kind));
let build_state = &cx.build_state;
if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) {
let key = (unit.pkg.package_id(), unit.kind);
cx.build_script_overridden.insert(key);
build_state
.outputs
.lock()
.unwrap()
.insert(key, output.clone());
// If there is a build script override, pre-fill the build output.
if let Some(links) = unit.pkg.manifest().links() {
if let Some(output) = cx.bcx.script_override(links, unit.kind) {
let key = (unit.pkg.package_id(), unit.kind);
cx.build_script_outputs
.lock()
.unwrap()
.insert(key, output.clone());
}
}
let mut ret = BuildScripts::default();
@ -649,6 +654,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca
add_to_link(&mut ret, unit.pkg.package_id(), unit.kind);
}
// Load any dependency declarations from a previous run.
if unit.mode.is_run_custom_build() {
parse_previous_explicit_deps(cx, unit)?;
}
@ -657,16 +663,16 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca
// to rustc invocation caching schemes, so be sure to generate the same
// set of build script dependency orderings via sorting the targets that
// come out of the `Context`.
let mut targets = cx.dep_targets(unit);
targets.sort_by_key(|u| u.pkg.package_id());
let mut dependencies = cx.dep_targets(unit);
dependencies.sort_by_key(|u| u.pkg.package_id());
for unit in targets.iter() {
let dep_scripts = build(out, cx, unit)?;
for dep_unit in dependencies.iter() {
let dep_scripts = build(out, cx, dep_unit)?;
if unit.target.for_host() {
if dep_unit.target.for_host() {
ret.plugins
.extend(dep_scripts.to_link.iter().map(|p| &p.0).cloned());
} else if unit.target.linkable() {
} else if dep_unit.target.linkable() {
for &(pkg, kind) in dep_scripts.to_link.iter() {
add_to_link(&mut ret, pkg, kind);
}

View File

@ -286,12 +286,12 @@ pub fn prepare_target<'a, 'cfg>(
// build script's fingerprint after it's executed. We do this by
// using the `build_script_local_fingerprints` function which returns a
// thunk we can invoke on a foreign thread to calculate this.
let state = Arc::clone(&cx.build_state);
let build_script_outputs = Arc::clone(&cx.build_script_outputs);
let key = (unit.pkg.package_id(), unit.kind);
let (gen_local, _overridden) = build_script_local_fingerprints(cx, unit);
let output_path = cx.build_explicit_deps[unit].build_script_output.clone();
Work::new(move |_| {
let outputs = state.outputs.lock().unwrap();
let outputs = build_script_outputs.lock().unwrap();
let outputs = &outputs[&key];
let deps = BuildDeps::new(&output_path, Some(outputs));
@ -1264,8 +1264,11 @@ fn build_script_override_fingerprint<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
) -> Option<LocalFingerprint> {
let state = cx.build_state.outputs.lock().unwrap();
let output = state.get(&(unit.pkg.package_id(), unit.kind))?;
// Build script output is only populated at this stage when it is
// overridden.
let build_script_outputs = cx.build_script_outputs.lock().unwrap();
// Returns None if it is not overridden.
let output = build_script_outputs.get(&(unit.pkg.package_id(), unit.kind))?;
let s = format!(
"overridden build state with hash: {}",
util::hash_u64(output)
@ -1578,8 +1581,11 @@ pub fn translate_dep_info(
for file in deps {
// The path may be absolute or relative, canonical or not. Make sure
// it is canonicalized so we are comparing the same kinds of paths.
let canon_file = rustc_cwd.join(file).canonicalize()?;
let abs_file = rustc_cwd.join(file);
// If canonicalization fails, just use the abs path. There is currently
// a bug where --remap-path-prefix is affecting .d files, causing them
// to point to non-existent paths.
let canon_file = abs_file.canonicalize().unwrap_or_else(|_| abs_file.clone());
let (ty, path) = if let Ok(stripped) = canon_file.strip_prefix(&target_root) {
(DepInfoPathType::TargetRootRelative, stripped)

View File

@ -535,9 +535,9 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> {
unit: &Unit<'a>,
cx: &mut Context<'_, '_>,
) -> CargoResult<()> {
let output = cx.build_state.outputs.lock().unwrap();
let outputs = cx.build_script_outputs.lock().unwrap();
let bcx = &mut cx.bcx;
if let Some(output) = output.get(&(unit.pkg.package_id(), unit.kind)) {
if let Some(output) = outputs.get(&(unit.pkg.package_id(), unit.kind)) {
if !output.warnings.is_empty() {
if let Some(msg) = msg {
writeln!(bcx.config.shell().err(), "{}\n", msg)?;

View File

@ -3,76 +3,128 @@
//! The directory layout is a little tricky at times, hence a separate file to
//! house this logic. The current layout looks like this:
//!
//! ```ignore
//! ```text
//! # This is the root directory for all output, the top-level package
//! # places all of its output here.
//! target/
//!
//! # This is the root directory for all output of *dependencies*
//! deps/
//! # Cache of `rustc -Vv` output for performance.
//! .rustc-info.json
//!
//! # Root directory for all compiled examples
//! examples/
//! # All final artifacts are linked into this directory from `deps`.
//! debug/ # or release/
//!
//! # File used to lock the directory to prevent multiple cargo processes
//! # from using it at the same time.
//! .cargo-lock
//!
//! # Hidden directory that holds all of the fingerprint files for all
//! # packages
//! .fingerprint/
//! # Each package is in a separate directory.
//! $pkgname-$META/
//! # Set of source filenames for this package.
//! dep-lib-$pkgname-$META
//! # Timestamp when this package was last built.
//! invoked.timestamp
//! # The fingerprint hash.
//! lib-$pkgname-$META
//! # Detailed information used for logging the reason why
//! # something is being recompiled.
//! lib-$pkgname-$META.json
//!
//! # This is the root directory for all rustc artifacts except build
//! # scripts, examples, and test and bench executables. Almost every
//! # artifact should have a metadata hash added to its filename to
//! # prevent collisions. One notable exception is dynamic libraries.
//! deps/
//!
//! # Root directory for all compiled examples.
//! examples/
//!
//! # Directory used to store incremental data for the compiler (when
//! # incremental is enabled.
//! incremental/
//!
//! # This is the location at which the output of all custom build
//! # commands are rooted
//! # commands are rooted.
//! build/
//!
//! # Each package gets its own directory where its build script and
//! # script output are placed
//! $pkg1/
//! $pkg2/
//! $pkg3/
//! $pkgname-$META/ # For the build script itself.
//! # The build script executable (name may be changed by user).
//! build-script-build-$META
//! # Hard link to build-script-build-$META.
//! build-script-build
//! # Dependency information generated by rustc.
//! build-script-build-$META.d
//! # Debug information, depending on platform and profile
//! # settings.
//! <debug symbols>
//!
//! # Each directory package has a `out` directory where output
//! # is placed.
//! # The package shows up twice with two different metadata hashes.
//! $pkgname-$META/ # For the output of the build script.
//! # Timestamp when the build script was last executed.
//! invoked.timestamp
//! # Directory where script can output files ($OUT_DIR).
//! out/
//! # Output from the build script.
//! output
//! # Path to `out`, used to help when the target directory is
//! # moved.
//! root-output
//! # Stderr output from the build script.
//! stderr
//!
//! # This is the location at which the output of all old custom build
//! # commands are rooted
//! native/
//! # Output from rustdoc
//! doc/
//!
//! # Each package gets its own directory for where its output is
//! # placed. We can't track exactly what's getting put in here, so
//! # we just assume that all relevant output is in these
//! # directories.
//! $pkg1/
//! $pkg2/
//! $pkg3/
//! # Used by `cargo package` and `cargo publish` to build a `.crate` file.
//! package/
//!
//! # Directory used to store incremental data for the compiler (when
//! # incremental is enabled.
//! incremental/
//!
//! # Hidden directory that holds all of the fingerprint files for all
//! # packages
//! .fingerprint/
//! # Experimental feature for generated build scripts.
//! .metabuild/
//! ```
//!
//! When cross-compiling, the layout is the same, except it appears in
//! `target/$TRIPLE`.
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
use crate::core::Workspace;
use crate::util::{CargoResult, Config, FileLock, Filesystem};
use crate::util::{CargoResult, FileLock};
/// Contains the paths of all target output locations.
///
/// See module docs for more information.
pub struct Layout {
/// The root directory: `/path/to/target`.
/// If cross compiling: `/path/to/target/$TRIPLE`.
root: PathBuf,
/// The final artifact destination: `$root/debug` (or `release`).
dest: PathBuf,
/// The directory with rustc artifacts: `$dest/deps`
deps: PathBuf,
native: PathBuf,
/// The directory for build scripts: `$dest/build`
build: PathBuf,
/// The directory for incremental files: `$dest/incremental`
incremental: PathBuf,
/// The directory for fingerprints: `$dest/.fingerprint`
fingerprint: PathBuf,
/// The directory for examples: `$dest/examples`
examples: PathBuf,
/// The lock file for a build, will be unlocked when this struct is `drop`ped.
/// The directory for rustdoc output: `$root/doc`
doc: PathBuf,
/// The lockfile for a build (`.cargo-lock`). Will be unlocked when this
/// struct is `drop`ped.
_lock: FileLock,
}
pub fn is_bad_artifact_name(name: &str) -> bool {
["deps", "examples", "build", "native", "incremental"]
["deps", "examples", "build", "incremental"]
.iter()
.any(|&reserved| reserved == name)
}
@ -82,89 +134,57 @@ impl Layout {
///
/// This function will block if the directory is already locked.
///
/// Differs from `at` in that this calculates the root path from the workspace target directory,
/// adding the target triple and the profile (debug, release, ...).
/// `dest` should be the final artifact directory name. Currently either
/// "debug" or "release".
pub fn new(ws: &Workspace<'_>, triple: Option<&str>, dest: &str) -> CargoResult<Layout> {
let mut path = ws.target_dir();
let mut root = ws.target_dir();
// Flexible target specifications often point at json files, so interpret
// the target triple as a Path and then just use the file stem as the
// component for the directory name in that case.
if let Some(triple) = triple {
let triple = Path::new(triple);
if triple.extension().and_then(|s| s.to_str()) == Some("json") {
path.push(
root.push(
triple
.file_stem()
.ok_or_else(|| failure::format_err!("invalid target"))?,
);
} else {
path.push(triple);
root.push(triple);
}
}
path.push(dest);
Layout::at(ws.config(), path)
}
let dest = root.join(dest);
// If the root directory doesn't already exist go ahead and create it
// here. Use this opportunity to exclude it from backups as well if the
// system supports it since this is a freshly created folder.
if !dest.as_path_unlocked().exists() {
dest.create_dir()?;
exclude_from_backups(dest.as_path_unlocked());
}
/// Calculate the paths for build output, lock the build directory, and return as a Layout.
///
/// This function will block if the directory is already locked.
pub fn at(config: &Config, root: Filesystem) -> CargoResult<Layout> {
// For now we don't do any more finer-grained locking on the artifact
// directory, so just lock the entire thing for the duration of this
// compile.
let lock = root.open_rw(".cargo-lock", config, "build directory")?;
let lock = dest.open_rw(".cargo-lock", ws.config(), "build directory")?;
let root = root.into_path_unlocked();
let dest = dest.into_path_unlocked();
Ok(Layout {
deps: root.join("deps"),
native: root.join("native"),
build: root.join("build"),
incremental: root.join("incremental"),
fingerprint: root.join(".fingerprint"),
examples: root.join("examples"),
deps: dest.join("deps"),
build: dest.join("build"),
incremental: dest.join("incremental"),
fingerprint: dest.join(".fingerprint"),
examples: dest.join("examples"),
doc: root.join("doc"),
root,
dest,
_lock: lock,
})
}
#[cfg(not(target_os = "macos"))]
fn exclude_from_backups(&self, _: &Path) {}
#[cfg(target_os = "macos")]
/// Marks files or directories as excluded from Time Machine on macOS
///
/// This is recommended to prevent derived/temporary files from bloating backups.
fn exclude_from_backups(&self, path: &Path) {
use core_foundation::base::TCFType;
use core_foundation::{number, string, url};
use std::ptr;
// For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey
let is_excluded_key: Result<string::CFString, _> = "NSURLIsExcludedFromBackupKey".parse();
let path = url::CFURL::from_path(path, false);
if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) {
unsafe {
url::CFURLSetResourcePropertyForKey(
path.as_concrete_TypeRef(),
is_excluded_key.as_concrete_TypeRef(),
number::kCFBooleanTrue as *const _,
ptr::null_mut(),
);
}
}
// Errors are ignored, since it's an optional feature and failure
// doesn't prevent Cargo from working
}
/// Makes sure all directories stored in the Layout exist on the filesystem.
pub fn prepare(&mut self) -> io::Result<()> {
if fs::metadata(&self.root).is_err() {
fs::create_dir_all(&self.root)?;
self.exclude_from_backups(&self.root);
}
mkdir(&self.deps)?;
mkdir(&self.native)?;
mkdir(&self.incremental)?;
mkdir(&self.fingerprint)?;
mkdir(&self.examples)?;
@ -180,9 +200,9 @@ impl Layout {
}
}
/// Fetch the root path.
/// Fetch the destination path for final artifacts (`/…/target/debug`).
pub fn dest(&self) -> &Path {
&self.root
&self.dest
}
/// Fetch the deps path.
pub fn deps(&self) -> &Path {
@ -192,7 +212,11 @@ impl Layout {
pub fn examples(&self) -> &Path {
&self.examples
}
/// Fetch the root path.
/// Fetch the doc path.
pub fn doc(&self) -> &Path {
&self.doc
}
/// Fetch the root path (`/…/target`).
pub fn root(&self) -> &Path {
&self.root
}
@ -204,8 +228,37 @@ impl Layout {
pub fn fingerprint(&self) -> &Path {
&self.fingerprint
}
/// Fetch the build path.
/// Fetch the build script path.
pub fn build(&self) -> &Path {
&self.build
}
}
#[cfg(not(target_os = "macos"))]
fn exclude_from_backups(_: &Path) {}
#[cfg(target_os = "macos")]
/// Marks files or directories as excluded from Time Machine on macOS
///
/// This is recommended to prevent derived/temporary files from bloating backups.
fn exclude_from_backups(path: &Path) {
use core_foundation::base::TCFType;
use core_foundation::{number, string, url};
use std::ptr;
// For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey
let is_excluded_key: Result<string::CFString, _> = "NSURLIsExcludedFromBackupKey".parse();
let path = url::CFURL::from_path(path, false);
if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) {
unsafe {
url::CFURLSetResourcePropertyForKey(
path.as_concrete_TypeRef(),
is_excluded_key.as_concrete_TypeRef(),
number::kCFBooleanTrue as *const _,
ptr::null_mut(),
);
}
}
// Errors are ignored, since it's an optional feature and failure
// doesn't prevent Cargo from working
}

View File

@ -18,7 +18,7 @@ use std::io::Write;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use failure::{bail, Error};
use failure::Error;
use lazycell::LazyCell;
use log::debug;
use same_file::is_same_file;
@ -29,7 +29,7 @@ pub use self::build_context::{BuildContext, FileFlavor, TargetConfig, TargetInfo
use self::build_plan::BuildPlan;
pub use self::compilation::{Compilation, Doctest};
pub use self::context::Context;
pub use self::custom_build::{BuildMap, BuildOutput, BuildScripts};
pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts};
pub use self::job::Freshness;
use self::job::{Job, Work};
use self::job_queue::{JobQueue, JobState};
@ -192,9 +192,9 @@ fn rustc<'a, 'cfg>(
let kind = unit.kind;
// Prepare the native lib state (extra `-L` and `-l` flags).
let build_state = cx.build_state.clone();
let build_script_outputs = Arc::clone(&cx.build_script_outputs);
let current_id = unit.pkg.package_id();
let build_deps = load_build_deps(cx, unit);
let build_scripts = cx.build_scripts.get(unit).cloned();
// If we are a binary and the package also contains a library, then we
// don't pass the `-l` flags.
@ -242,20 +242,20 @@ fn rustc<'a, 'cfg>(
// located somewhere in there.
// Finally, if custom environment variables have been produced by
// previous build scripts, we include them in the rustc invocation.
if let Some(build_deps) = build_deps {
let build_state = build_state.outputs.lock().unwrap();
if let Some(build_scripts) = build_scripts {
let script_outputs = build_script_outputs.lock().unwrap();
if !build_plan {
add_native_deps(
&mut rustc,
&build_state,
&build_deps,
&script_outputs,
&build_scripts,
pass_l_flag,
pass_cdylib_link_args,
current_id,
)?;
add_plugin_deps(&mut rustc, &build_state, &build_deps, &root_output)?;
add_plugin_deps(&mut rustc, &script_outputs, &build_scripts, &root_output)?;
}
add_custom_env(&mut rustc, &build_state, current_id, kind)?;
add_custom_env(&mut rustc, &script_outputs, current_id, kind)?;
}
for output in outputs.iter() {
@ -341,16 +341,16 @@ fn rustc<'a, 'cfg>(
// present in `state`) to the command provided.
fn add_native_deps(
rustc: &mut ProcessBuilder,
build_state: &BuildMap,
build_script_outputs: &BuildScriptOutputs,
build_scripts: &BuildScripts,
pass_l_flag: bool,
pass_cdylib_link_args: bool,
current_id: PackageId,
) -> CargoResult<()> {
for key in build_scripts.to_link.iter() {
let output = build_state.get(key).ok_or_else(|| {
let output = build_script_outputs.get(key).ok_or_else(|| {
internal(format!(
"couldn't find build state for {}/{:?}",
"couldn't find build script output for {}/{:?}",
key.0, key.1
))
})?;
@ -381,12 +381,12 @@ fn rustc<'a, 'cfg>(
// been put there by one of the `build_scripts`) to the command provided.
fn add_custom_env(
rustc: &mut ProcessBuilder,
build_state: &BuildMap,
build_script_outputs: &BuildScriptOutputs,
current_id: PackageId,
kind: Kind,
) -> CargoResult<()> {
let key = (current_id, kind);
if let Some(output) = build_state.get(&key) {
if let Some(output) = build_script_outputs.get(&key) {
for &(ref name, ref value) in output.env.iter() {
rustc.env(name, value);
}
@ -480,6 +480,7 @@ fn link_targets<'a, 'cfg>(
}))
}
/// Hardlink (file) or symlink (dir) src to dst if possible, otherwise copy it.
fn hardlink_or_copy(src: &Path, dst: &Path) -> CargoResult<()> {
debug!("linking {} to {}", src.display(), dst.display());
if is_same_file(src, dst).unwrap_or(false) {
@ -522,16 +523,12 @@ fn hardlink_or_copy(src: &Path, dst: &Path) -> CargoResult<()> {
Ok(())
}
fn load_build_deps(cx: &Context<'_, '_>, unit: &Unit<'_>) -> Option<Arc<BuildScripts>> {
cx.build_scripts.get(unit).cloned()
}
// For all plugin dependencies, add their -L paths (now calculated and
// present in `state`) to the dynamic library load path for the command to
// execute.
// For all plugin dependencies, add their -L paths (now calculated and present
// in `build_script_outputs`) to the dynamic library load path for the command
// to execute.
fn add_plugin_deps(
rustc: &mut ProcessBuilder,
build_state: &BuildMap,
build_script_outputs: &BuildScriptOutputs,
build_scripts: &BuildScripts,
root_output: &PathBuf,
) -> CargoResult<()> {
@ -539,7 +536,7 @@ fn add_plugin_deps(
let search_path = rustc.get_env(var).unwrap_or_default();
let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>();
for &id in build_scripts.plugins.iter() {
let output = build_state
let output = build_script_outputs
.get(&(id, Kind::Host))
.ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", id)))?;
search_path.append(&mut filter_dynamic_search_path(
@ -614,7 +611,6 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult
rustdoc.arg("--crate-name").arg(&unit.target.crate_name());
add_path_args(bcx, unit, &mut rustdoc);
add_cap_lints(bcx, unit, &mut rustdoc);
add_color(bcx, &mut rustdoc);
if unit.kind != Kind::Host {
if let Some(ref target) = bcx.build_config.requested_target {
@ -635,7 +631,7 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult
rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat));
}
add_error_format(cx, &mut rustdoc, false, false)?;
add_error_format_and_color(cx, &mut rustdoc, false)?;
if let Some(args) = bcx.extra_args_for(unit) {
rustdoc.args(args);
@ -646,14 +642,14 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult
rustdoc.args(bcx.rustdocflags_args(unit));
let name = unit.pkg.name().to_string();
let build_state = cx.build_state.clone();
let build_script_outputs = Arc::clone(&cx.build_script_outputs);
let key = (unit.pkg.package_id(), unit.kind);
let package_id = unit.pkg.package_id();
let target = unit.target.clone();
let mut output_options = OutputOptions::new(cx, unit);
Ok(Work::new(move |state| {
if let Some(output) = build_state.outputs.lock().unwrap().get(&key) {
if let Some(output) = build_script_outputs.lock().unwrap().get(&key) {
for cfg in output.cfgs.iter() {
rustdoc.arg("--cfg").arg(cfg);
}
@ -722,39 +718,20 @@ fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit<'_>, cmd: &mut ProcessB
}
}
fn add_color(bcx: &BuildContext<'_, '_>, cmd: &mut ProcessBuilder) {
let shell = bcx.config.shell();
let color = if shell.supports_color() {
"always"
} else {
"never"
};
cmd.args(&["--color", color]);
}
/// Add error-format flags to the command.
///
/// This is rather convoluted right now. The general overview is:
/// - If -Zcache-messages or `build.pipelining` is enabled, Cargo always uses
/// JSON output. This has several benefits, such as being easier to parse,
/// handles changing formats (for replaying cached messages), ensures
/// atomic output (so messages aren't interleaved), etc.
/// - `supports_termcolor` is a temporary flag. rustdoc does not yet support
/// the `--json-rendered` flag, but it is intended to fix that soon.
/// - `short` output is not yet supported for JSON output. We haven't yet
/// decided how this problem will be resolved. Probably either adding
/// "short" to the JSON output, or more ambitiously moving diagnostic
/// rendering to an external library that Cargo can share with rustc.
/// This is somewhat odd right now, but the general overview is that if
/// `-Zcache-messages` or `pipelined` is enabled then Cargo always uses JSON
/// output. This has several benefits, such as being easier to parse, handles
/// changing formats (for replaying cached messages), ensures atomic output (so
/// messages aren't interleaved), etc.
///
/// It is intended in the future that Cargo *always* uses the JSON output, and
/// this function can be simplified. The above issues need to be resolved, the
/// flags need to be stabilized, and we need more testing to ensure there
/// aren't any regressions.
fn add_error_format(
/// It is intended in the future that Cargo *always* uses the JSON output (by
/// turning on cache-messages by default), and this function can be simplified.
fn add_error_format_and_color(
cx: &Context<'_, '_>,
cmd: &mut ProcessBuilder,
pipelined: bool,
supports_termcolor: bool,
) -> CargoResult<()> {
// If this unit is producing a required rmeta file then we need to know
// when the rmeta file is ready so we can signal to the rest of Cargo that
@ -769,36 +746,52 @@ fn add_error_format(
// internally understand that we should extract the `rendered` field and
// present it if we can.
if cx.bcx.build_config.cache_messages() || pipelined {
cmd.arg("--error-format=json").arg("-Zunstable-options");
if supports_termcolor {
cmd.arg("--json-rendered=termcolor");
}
if cx.bcx.build_config.message_format == MessageFormat::Short {
// FIXME(rust-lang/rust#60419): right now we have no way of
// turning on JSON messages from the compiler and also asking
// the rendered field to be in the `short` format.
bail!(
"currently `--message-format short` is incompatible with {}",
if pipelined {
"pipelined compilation"
} else {
"cached output"
}
);
}
cmd.arg("--error-format=json");
let mut json = String::from("--json=diagnostic-rendered-ansi");
if pipelined {
cmd.arg("-Zemit-artifact-notifications");
json.push_str(",artifacts");
}
match cx.bcx.build_config.message_format {
MessageFormat::Short | MessageFormat::Json { short: true, .. } => {
json.push_str(",diagnostic-short");
}
_ => {}
}
cmd.arg(json);
} else {
let mut color = true;
match cx.bcx.build_config.message_format {
MessageFormat::Human => (),
MessageFormat::Json => {
MessageFormat::Json {
ansi,
short,
render_diagnostics,
} => {
cmd.arg("--error-format").arg("json");
// If ansi is explicitly requested, enable it. If we're
// rendering diagnostics ourselves then also enable it because
// we'll figure out what to do with the colors later.
if ansi || render_diagnostics {
cmd.arg("--json=diagnostic-rendered-ansi");
}
if short {
cmd.arg("--json=diagnostic-short");
}
color = false;
}
MessageFormat::Short => {
cmd.arg("--error-format").arg("short");
}
}
if color {
let color = if cx.bcx.config.shell().supports_color() {
"always"
} else {
"never"
};
cmd.args(&["--color", color]);
}
}
Ok(())
}
@ -829,8 +822,7 @@ fn build_base_args<'a, 'cfg>(
cmd.arg("--crate-name").arg(&unit.target.crate_name());
add_path_args(bcx, unit, cmd);
add_color(bcx, cmd);
add_error_format(cx, cmd, cx.rmeta_required(unit), true)?;
add_error_format_and_color(cx, cmd, cx.rmeta_required(unit))?;
if !test {
for crate_type in crate_types.iter() {
@ -1119,9 +1111,8 @@ impl Kind {
}
struct OutputOptions {
/// Get the `"rendered"` field from the JSON output and display it on
/// stderr instead of the JSON message.
extract_rendered_messages: bool,
/// What format we're emitting from Cargo itself.
format: MessageFormat,
/// Look for JSON message that indicates .rmeta file is available for
/// pipelined compilation.
look_for_metadata_directive: bool,
@ -1135,7 +1126,6 @@ struct OutputOptions {
impl OutputOptions {
fn new<'a>(cx: &Context<'a, '_>, unit: &Unit<'a>) -> OutputOptions {
let extract_rendered_messages = cx.bcx.build_config.message_format != MessageFormat::Json;
let look_for_metadata_directive = cx.rmeta_required(unit);
let color = cx.bcx.config.shell().supports_color();
let cache_cell = if cx.bcx.build_config.cache_messages() {
@ -1147,7 +1137,7 @@ impl OutputOptions {
None
};
OutputOptions {
extract_rendered_messages,
format: cx.bcx.build_config.message_format,
look_for_metadata_directive,
color,
cache_cell,
@ -1204,55 +1194,66 @@ fn on_stderr_line(
}
};
// In some modes of compilation Cargo switches the compiler to JSON mode
// but the user didn't request that so we still want to print pretty rustc
// colorized diagnostics. In those cases (`extract_rendered_messages`) we
// take a look at the JSON blob we go, see if it's a relevant diagnostics,
// and if so forward just that diagnostic for us to print.
if options.extract_rendered_messages {
#[derive(serde::Deserialize)]
struct CompilerMessage {
rendered: String,
}
if let Ok(mut error) = serde_json::from_str::<CompilerMessage>(compiler_message.get()) {
// state.stderr will add a newline
if error.rendered.ends_with('\n') {
error.rendered.pop();
// Depending on what we're emitting from Cargo itself, we figure out what to
// do with this JSON message.
match options.format {
// In the "human" output formats (human/short) or if diagnostic messages
// from rustc aren't being included in the output of Cargo's JSON
// messages then we extract the diagnostic (if present) here and handle
// it ourselves.
MessageFormat::Human
| MessageFormat::Short
| MessageFormat::Json {
render_diagnostics: true,
..
} => {
#[derive(serde::Deserialize)]
struct CompilerMessage {
rendered: String,
}
let rendered = if options.color {
error.rendered
} else {
// Strip only fails if the the Writer fails, which is Cursor
// on a Vec, which should never fail.
strip_ansi_escapes::strip(&error.rendered)
if let Ok(mut error) = serde_json::from_str::<CompilerMessage>(compiler_message.get()) {
// state.stderr will add a newline
if error.rendered.ends_with('\n') {
error.rendered.pop();
}
let rendered = if options.color {
error.rendered
} else {
// Strip only fails if the the Writer fails, which is Cursor
// on a Vec, which should never fail.
strip_ansi_escapes::strip(&error.rendered)
.map(|v| String::from_utf8(v).expect("utf8"))
.expect("strip should never fail")
};
state.stderr(rendered);
return Ok(());
}
}
// Remove color information from the rendered string. When pipelining is
// enabled and/or when cached messages are enabled we're always asking
// for ANSI colors from rustc, so unconditionally postprocess here and
// remove ansi color codes.
MessageFormat::Json { ansi: false, .. } => {
#[derive(serde::Deserialize, serde::Serialize)]
struct CompilerMessage {
rendered: String,
#[serde(flatten)]
other: std::collections::BTreeMap<String, serde_json::Value>,
}
if let Ok(mut error) = serde_json::from_str::<CompilerMessage>(compiler_message.get()) {
error.rendered = strip_ansi_escapes::strip(&error.rendered)
.map(|v| String::from_utf8(v).expect("utf8"))
.expect("strip should never fail")
};
state.stderr(rendered);
return Ok(());
}
} else {
// Remove color information from the rendered string. rustc has not
// included color in the past, so to avoid breaking anything, strip it
// out when --json-rendered=termcolor is used. This runs
// unconditionally under the assumption that Cargo will eventually
// move to this as the default mode. Perhaps in the future, cargo
// could allow the user to enable/disable color (such as with a
// `--json-rendered` or `--color` or `--message-format` flag).
#[derive(serde::Deserialize, serde::Serialize)]
struct CompilerMessage {
rendered: String,
#[serde(flatten)]
other: std::collections::BTreeMap<String, serde_json::Value>,
}
if let Ok(mut error) = serde_json::from_str::<CompilerMessage>(compiler_message.get()) {
error.rendered = strip_ansi_escapes::strip(&error.rendered)
.map(|v| String::from_utf8(v).expect("utf8"))
.unwrap_or(error.rendered);
let new_line = serde_json::to_string(&error)?;
let new_msg: Box<serde_json::value::RawValue> = serde_json::from_str(&new_line)?;
compiler_message = new_msg;
.unwrap_or(error.rendered);
let new_line = serde_json::to_string(&error)?;
let new_msg: Box<serde_json::value::RawValue> = serde_json::from_str(&new_line)?;
compiler_message = new_msg;
}
}
// If ansi colors are desired then we should be good to go! We can just
// pass through this message as-is.
MessageFormat::Json { ansi: true, .. } => {}
}
// In some modes of execution we will execute rustc with `-Z
@ -1303,14 +1304,8 @@ fn replay_output_cache(
color: bool,
) -> Work {
let target = target.clone();
let extract_rendered_messages = match format {
MessageFormat::Human => true,
MessageFormat::Json => false,
// FIXME: short not supported.
MessageFormat::Short => false,
};
let mut options = OutputOptions {
extract_rendered_messages,
format,
look_for_metadata_directive: false,
color,
cache_cell: None,

View File

@ -59,7 +59,7 @@ fn add_deps_for_unit<'a, 'b>(
// Add rerun-if-changed dependencies
let key = (unit.pkg.package_id(), unit.kind);
if let Some(output) = context.build_state.outputs.lock().unwrap().get(&key) {
if let Some(output) = context.build_script_outputs.lock().unwrap().get(&key) {
for path in &output.rerun_if_changed {
deps.insert(path.into());
}

View File

@ -21,7 +21,7 @@
//! 3. To actually perform the feature gate, you'll want to have code that looks
//! like:
//!
//! ```rust,ignore
//! ```rust,compile_fail
//! use core::{Feature, Features};
//!
//! let feature = Feature::launch_into_space();

View File

@ -761,6 +761,7 @@ impl Target {
pub fn documented(&self) -> bool {
self.doc
}
// A plugin, proc-macro, or build-script.
pub fn for_host(&self) -> bool {
self.for_host
}

View File

@ -13,7 +13,7 @@ use crate::util::CargoResult;
use crate::util::Graph;
use super::dep_cache::RegistryQueryer;
use super::types::{ConflictMap, FeaturesSet, Method};
use super::types::{ConflictMap, FeaturesSet, ResolveOpts};
pub use super::encode::Metadata;
pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
@ -103,11 +103,11 @@ impl Context {
/// cased `summary` to get activated. This may not be present for the root
/// crate, for example.
///
/// Returns `true` if this summary with the given method is already activated.
/// Returns `true` if this summary with the given features is already activated.
pub fn flag_activated(
&mut self,
summary: &Summary,
method: &Method,
opts: &ResolveOpts,
parent: Option<(&Summary, &Dependency)>,
) -> CargoResult<bool> {
let id = summary.package_id();
@ -158,25 +158,21 @@ impl Context {
}
}
debug!("checking if {} is already activated", summary.package_id());
let (features, use_default) = match method {
Method::Everything
| Method::Required {
all_features: true, ..
} => return Ok(false),
Method::Required {
features,
uses_default_features,
..
} => (features, uses_default_features),
};
if opts.all_features {
return Ok(false);
}
let has_default_feature = summary.features().contains_key("default");
Ok(match self.resolve_features.get(&id) {
Some(prev) => {
features.is_subset(prev)
&& (!use_default || prev.contains("default") || !has_default_feature)
opts.features.is_subset(prev)
&& (!opts.uses_default_features
|| prev.contains("default")
|| !has_default_feature)
}
None => {
opts.features.is_empty() && (!opts.uses_default_features || !has_default_feature)
}
None => features.is_empty() && (!use_default || !has_default_feature),
})
}

View File

@ -20,7 +20,7 @@ use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry,
use crate::util::errors::CargoResult;
use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesSet};
use crate::core::resolver::{ActivateResult, Method};
use crate::core::resolver::{ActivateResult, ResolveOpts};
pub struct RegistryQueryer<'a> {
pub registry: &'a mut (dyn Registry + 'a),
@ -34,7 +34,7 @@ pub struct RegistryQueryer<'a> {
registry_cache: HashMap<Dependency, Rc<Vec<Summary>>>,
/// a cache of `Dependency`s that are required for a `Summary`
summary_cache: HashMap<
(Option<PackageId>, Summary, Method),
(Option<PackageId>, Summary, ResolveOpts),
Rc<(HashSet<InternedString>, Rc<Vec<DepInfo>>)>,
>,
/// all the cases we ended up using a supplied replacement
@ -192,20 +192,20 @@ impl<'a> RegistryQueryer<'a> {
}
/// Find out what dependencies will be added by activating `candidate`,
/// with features described in `method`. Then look up in the `registry`
/// with features described in `opts`. Then look up in the `registry`
/// the candidates that will fulfil each of these dependencies, as it is the
/// next obvious question.
pub fn build_deps(
&mut self,
parent: Option<PackageId>,
candidate: &Summary,
method: &Method,
opts: &ResolveOpts,
) -> ActivateResult<Rc<(HashSet<InternedString>, Rc<Vec<DepInfo>>)>> {
// if we have calculated a result before, then we can just return it,
// as it is a "pure" query of its arguments.
if let Some(out) = self
.summary_cache
.get(&(parent, candidate.clone(), method.clone()))
.get(&(parent, candidate.clone(), opts.clone()))
.cloned()
{
return Ok(out);
@ -213,7 +213,7 @@ impl<'a> RegistryQueryer<'a> {
// First, figure out our set of dependencies based on the requested set
// of features. This also calculates what features we're going to enable
// for our own dependencies.
let (used_features, deps) = resolve_features(parent, candidate, method)?;
let (used_features, deps) = resolve_features(parent, candidate, opts)?;
// Next, transform all dependencies into a list of possible candidates
// which can satisfy that dependency.
@ -236,7 +236,7 @@ impl<'a> RegistryQueryer<'a> {
// If we succeed we add the result to the cache so we can use it again next time.
// We dont cache the failure cases as they dont impl Clone.
self.summary_cache
.insert((parent, candidate.clone(), method.clone()), out.clone());
.insert((parent, candidate.clone(), opts.clone()), out.clone());
Ok(out)
}
@ -247,18 +247,13 @@ impl<'a> RegistryQueryer<'a> {
pub fn resolve_features<'b>(
parent: Option<PackageId>,
s: &'b Summary,
method: &'b Method,
opts: &'b ResolveOpts,
) -> ActivateResult<(HashSet<InternedString>, Vec<(Dependency, FeaturesSet)>)> {
let dev_deps = match *method {
Method::Everything => true,
Method::Required { dev_deps, .. } => dev_deps,
};
// First, filter by dev-dependencies.
let deps = s.dependencies();
let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps);
let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps);
let reqs = build_requirements(s, method)?;
let reqs = build_requirements(s, opts)?;
let mut ret = Vec::new();
let mut used_features = HashSet::new();
let default_dep = (false, BTreeSet::new());
@ -336,52 +331,34 @@ pub fn resolve_features<'b>(
Ok((reqs.into_used(), ret))
}
/// Takes requested features for a single package from the input `Method` and
/// Takes requested features for a single package from the input `ResolveOpts` and
/// recurses to find all requested features, dependencies and requested
/// dependency features in a `Requirements` object, returning it to the resolver.
fn build_requirements<'a, 'b: 'a>(
s: &'a Summary,
method: &'b Method,
opts: &'b ResolveOpts,
) -> CargoResult<Requirements<'a>> {
let mut reqs = Requirements::new(s);
match method {
Method::Everything
| Method::Required {
all_features: true, ..
} => {
for key in s.features().keys() {
reqs.require_feature(*key)?;
}
for dep in s.dependencies().iter().filter(|d| d.is_optional()) {
reqs.require_dependency(dep.name_in_toml());
}
if opts.all_features {
for key in s.features().keys() {
reqs.require_feature(*key)?;
}
Method::Required {
all_features: false,
features: requested,
..
} => {
for &f in requested.iter() {
reqs.require_value(&FeatureValue::new(f, s))?;
}
for dep in s.dependencies().iter().filter(|d| d.is_optional()) {
reqs.require_dependency(dep.name_in_toml());
}
} else {
for &f in opts.features.iter() {
reqs.require_value(&FeatureValue::new(f, s))?;
}
}
match *method {
Method::Everything
| Method::Required {
uses_default_features: true,
..
} => {
if s.features().contains_key("default") {
reqs.require_feature(InternedString::new("default"))?;
}
if opts.uses_default_features {
if s.features().contains_key("default") {
reqs.require_feature(InternedString::new("default"))?;
}
Method::Required {
uses_default_features: false,
..
} => {}
}
Ok(reqs)
}

View File

@ -105,6 +105,7 @@ use crate::util::{internal, Graph};
use super::{Resolve, ResolveVersion};
/// The `Cargo.lock` structure.
#[derive(Serialize, Deserialize, Debug)]
pub struct EncodableResolve {
package: Option<Vec<EncodableDependency>>,
@ -123,6 +124,14 @@ struct Patch {
pub type Metadata = BTreeMap<String, String>;
impl EncodableResolve {
/// Convert a `Cargo.lock` to a Resolve.
///
/// Note that this `Resolve` is not "complete". For example, the
/// dependencies do not know the difference between regular/dev/build
/// dependencies, so they are not filled in. It also does not include
/// `features`. Care should be taken when using this Resolve. One of the
/// primary uses is to be used with `resolve_with_previous` to guide the
/// resolver to create a complete Resolve.
pub fn into_resolve(self, ws: &Workspace<'_>) -> CargoResult<Resolve> {
let path_deps = build_path_deps(ws);
let mut checksums = HashMap::new();

View File

@ -203,100 +203,115 @@ pub(super) fn activation_error(
};
candidates.sort_unstable_by(|a, b| b.version().cmp(a.version()));
let mut msg = if !candidates.is_empty() {
let versions = {
let mut versions = candidates
.iter()
.take(3)
.map(|cand| cand.version().to_string())
.collect::<Vec<_>>();
if candidates.len() > 3 {
versions.push("...".into());
}
versions.join(", ")
};
let mut msg = format!(
"failed to select a version for the requirement `{} = \"{}\"`\n \
candidate versions found which didn't match: {}\n \
location searched: {}\n",
dep.package_name(),
dep.version_req(),
versions,
registry.describe_source(dep.source_id()),
);
msg.push_str("required by ");
msg.push_str(&describe_path(
&cx.parents.path_to_bottom(&parent.package_id()),
));
// If we have a path dependency with a locked version, then this may
// indicate that we updated a sub-package and forgot to run `cargo
// update`. In this case try to print a helpful error!
if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') {
msg.push_str(
"\nconsider running `cargo update` to update \
a path dependency's locked version",
);
}
if registry.is_replaced(dep.source_id()) {
msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?");
}
msg
} else {
// Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing`
// was meant. So we try asking the registry for a `fuzzy` search for suggestions.
let mut candidates = Vec::new();
if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.name()), true) {
return to_resolve_err(e);
};
candidates.sort_unstable();
candidates.dedup();
let mut candidates: Vec<_> = candidates
.iter()
.map(|n| (lev_distance(&*new_dep.package_name(), &*n), n))
.filter(|&(d, _)| d < 4)
.collect();
candidates.sort_by_key(|o| o.0);
let mut msg = format!(
"no matching package named `{}` found\n\
location searched: {}\n",
dep.package_name(),
dep.source_id()
);
let mut msg =
if !candidates.is_empty() {
let mut names = candidates
.iter()
.take(3)
.map(|c| c.1.as_str())
.collect::<Vec<_>>();
let versions = {
let mut versions = candidates
.iter()
.take(3)
.map(|cand| cand.version().to_string())
.collect::<Vec<_>>();
if candidates.len() > 3 {
names.push("...");
if candidates.len() > 3 {
versions.push("...".into());
}
versions.join(", ")
};
let mut msg = format!(
"failed to select a version for the requirement `{} = \"{}\"`\n \
candidate versions found which didn't match: {}\n \
location searched: {}\n",
dep.package_name(),
dep.version_req(),
versions,
registry.describe_source(dep.source_id()),
);
msg.push_str("required by ");
msg.push_str(&describe_path(
&cx.parents.path_to_bottom(&parent.package_id()),
));
// If we have a path dependency with a locked version, then this may
// indicate that we updated a sub-package and forgot to run `cargo
// update`. In this case try to print a helpful error!
if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') {
msg.push_str(
"\nconsider running `cargo update` to update \
a path dependency's locked version",
);
}
msg.push_str("perhaps you meant: ");
msg.push_str(&names.iter().enumerate().fold(
String::default(),
|acc, (i, el)| match i {
0 => acc + el,
i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el,
_ => acc + ", " + el,
},
));
msg.push_str("\n");
}
msg.push_str("required by ");
msg.push_str(&describe_path(
&cx.parents.path_to_bottom(&parent.package_id()),
));
if registry.is_replaced(dep.source_id()) {
msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?");
}
msg
};
msg
} else {
// Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing`
// was meant. So we try asking the registry for a `fuzzy` search for suggestions.
let mut candidates = Vec::new();
if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.clone()), true) {
return to_resolve_err(e);
};
candidates.sort_unstable_by(|a, b| a.name().cmp(&b.name()));
candidates.dedup_by(|a, b| a.name() == b.name());
let mut candidates: Vec<_> = candidates
.iter()
.map(|n| (lev_distance(&*new_dep.package_name(), &*n.name()), n))
.filter(|&(d, _)| d < 4)
.collect();
candidates.sort_by_key(|o| o.0);
let mut msg = format!(
"no matching package named `{}` found\n\
location searched: {}\n",
dep.package_name(),
dep.source_id()
);
if !candidates.is_empty() {
// If dependency package name is equal to the name of the candidate here
// it may be a prerelease package which hasn't been speficied correctly
if dep.package_name() == candidates[0].1.name()
&& candidates[0].1.package_id().version().is_prerelease()
{
msg.push_str("prerelease package needs to be specified explicitly\n");
msg.push_str(&format!(
"{name} = {{ version = \"{version}\" }}",
name = candidates[0].1.name(),
version = candidates[0].1.package_id().version()
));
} else {
let mut names = candidates
.iter()
.take(3)
.map(|c| c.1.name().as_str())
.collect::<Vec<_>>();
if candidates.len() > 3 {
names.push("...");
}
msg.push_str("perhaps you meant: ");
msg.push_str(&names.iter().enumerate().fold(
String::default(),
|acc, (i, el)| match i {
0 => acc + el,
i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el,
_ => acc + ", " + el,
},
));
}
msg.push_str("\n");
}
msg.push_str("required by ");
msg.push_str(&describe_path(
&cx.parents.path_to_bottom(&parent.package_id()),
));
msg
};
if let Some(config) = config {
if config.offline() {

View File

@ -69,7 +69,7 @@ pub use self::encode::Metadata;
pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
pub use self::errors::{ActivateError, ActivateResult, ResolveError};
pub use self::resolve::{Resolve, ResolveVersion};
pub use self::types::Method;
pub use self::types::ResolveOpts;
mod conflict_cache;
mod context;
@ -120,7 +120,7 @@ mod types;
/// When we have a decision for how to implement is without breaking existing functionality
/// this flag can be removed.
pub fn resolve(
summaries: &[(Summary, Method)],
summaries: &[(Summary, ResolveOpts)],
replacements: &[(PackageIdSpec, Dependency)],
registry: &mut dyn Registry,
try_to_use: &HashSet<PackageId>,
@ -169,7 +169,7 @@ pub fn resolve(
fn activate_deps_loop(
mut cx: Context,
registry: &mut RegistryQueryer<'_>,
summaries: &[(Summary, Method)],
summaries: &[(Summary, ResolveOpts)],
config: Option<&Config>,
) -> CargoResult<Context> {
let mut backtrack_stack = Vec::new();
@ -180,9 +180,9 @@ fn activate_deps_loop(
let mut past_conflicting_activations = conflict_cache::ConflictCache::new();
// Activate all the initial summaries to kick off some work.
for &(ref summary, ref method) in summaries {
for &(ref summary, ref opts) in summaries {
debug!("initial activation: {}", summary.package_id());
let res = activate(&mut cx, registry, None, summary.clone(), method.clone());
let res = activate(&mut cx, registry, None, summary.clone(), opts.clone());
match res {
Ok(Some((frame, _))) => remaining_deps.push(frame),
Ok(None) => (),
@ -366,7 +366,7 @@ fn activate_deps_loop(
};
let pid = candidate.package_id();
let method = Method::Required {
let opts = ResolveOpts {
dev_deps: false,
features: Rc::clone(&features),
all_features: false,
@ -379,7 +379,7 @@ fn activate_deps_loop(
dep.package_name(),
candidate.version()
);
let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, method);
let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, opts);
let successfully_activated = match res {
// Success! We've now activated our `candidate` in our context
@ -583,7 +583,7 @@ fn activate_deps_loop(
/// Attempts to activate the summary `candidate` in the context `cx`.
///
/// This function will pull dependency summaries from the registry provided, and
/// the dependencies of the package will be determined by the `method` provided.
/// the dependencies of the package will be determined by the `opts` provided.
/// If `candidate` was activated, this function returns the dependency frame to
/// iterate through next.
fn activate(
@ -591,7 +591,7 @@ fn activate(
registry: &mut RegistryQueryer<'_>,
parent: Option<(&Summary, &Dependency)>,
candidate: Summary,
method: Method,
opts: ResolveOpts,
) -> ActivateResult<Option<(DepsFrame, Duration)>> {
let candidate_pid = candidate.package_id();
if let Some((parent, dep)) = parent {
@ -652,7 +652,7 @@ fn activate(
}
}
let activated = cx.flag_activated(&candidate, &method, parent)?;
let activated = cx.flag_activated(&candidate, &opts, parent)?;
let candidate = match registry.replacement_summary(candidate_pid) {
Some(replace) => {
@ -661,7 +661,7 @@ fn activate(
// does. TBH it basically cause panics in the test suite if
// `parent` is passed through here and `[replace]` is otherwise
// on life support so it's not critical to fix bugs anyway per se.
if cx.flag_activated(replace, &method, None)? && activated {
if cx.flag_activated(replace, &opts, None)? && activated {
return Ok(None);
}
trace!(
@ -682,7 +682,7 @@ fn activate(
let now = Instant::now();
let (used_features, deps) =
&*registry.build_deps(parent.map(|p| p.0.package_id()), &candidate, &method)?;
&*registry.build_deps(parent.map(|p| p.0.package_id()), &candidate, &opts)?;
// Record what list of features is active for this package.
if !used_features.is_empty() {

View File

@ -23,15 +23,34 @@ pub struct Resolve {
/// from `Cargo.toml`. We need a `Vec` here because the same package
/// might be present in both `[dependencies]` and `[build-dependencies]`.
graph: Graph<PackageId, Vec<Dependency>>,
/// Replacements from the `[replace]` table.
replacements: HashMap<PackageId, PackageId>,
/// Inverted version of `replacements`.
reverse_replacements: HashMap<PackageId, PackageId>,
/// An empty `HashSet` to avoid creating a new `HashSet` for every package
/// that does not have any features, and to avoid using `Option` to
/// simplify the API.
empty_features: HashSet<String>,
/// Features enabled for a given package.
features: HashMap<PackageId, HashSet<String>>,
/// Checksum for each package. A SHA256 hash of the `.crate` file used to
/// validate the correct crate file is used. This is `None` for sources
/// that do not use `.crate` files, like path or git dependencies.
checksums: HashMap<PackageId, Option<String>>,
/// "Unknown" metadata. This is a collection of extra, unrecognized data
/// found in the `[metadata]` section of `Cargo.lock`, preserved for
/// forwards compatibility.
metadata: Metadata,
/// `[patch]` entries that did not match anything, preserved in
/// `Cargo.lock` as the `[[patch.unused]]` table array. Tracking unused
/// patches helps prevent Cargo from being forced to re-update the
/// registry every time it runs, and keeps the resolve in a locked state
/// so it doesn't re-resolve the unused entries.
unused_patches: Vec<PackageId>,
// A map from packages to a set of their public dependencies
/// A map from packages to a set of their public dependencies
public_dependencies: HashMap<PackageId, HashSet<PackageId>>,
/// Version of the `Cargo.lock` format, see
/// `cargo::core::resolver::encode` for more.
version: ResolveVersion,
}

View File

@ -99,19 +99,47 @@ impl ResolverProgress {
/// optimized comparison operators like `is_subset` at the interfaces.
pub type FeaturesSet = Rc<BTreeSet<InternedString>>;
#[derive(Clone, Eq, PartialEq, Hash)]
pub enum Method {
Everything, // equivalent to Required { dev_deps: true, all_features: true, .. }
Required {
dev_deps: bool,
features: FeaturesSet,
all_features: bool,
uses_default_features: bool,
},
/// Options for how the resolve should work.
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct ResolveOpts {
/// Whether or not dev-dependencies should be included.
///
/// This may be set to `false` by things like `cargo install` or `-Z avoid-dev-deps`.
pub dev_deps: bool,
/// Set of features to enable (`--features=…`).
pub features: FeaturesSet,
/// Indicates *all* features should be enabled (`--all-features`).
pub all_features: bool,
/// Include the `default` feature (`--no-default-features` sets this false).
pub uses_default_features: bool,
}
impl Method {
pub fn split_features(features: &[String]) -> BTreeSet<InternedString> {
impl ResolveOpts {
/// Creates a ResolveOpts that resolves everything.
pub fn everything() -> ResolveOpts {
ResolveOpts {
dev_deps: true,
features: Rc::new(BTreeSet::new()),
all_features: true,
uses_default_features: true,
}
}
pub fn new(
dev_deps: bool,
features: &[String],
all_features: bool,
uses_default_features: bool,
) -> ResolveOpts {
ResolveOpts {
dev_deps,
features: Rc::new(ResolveOpts::split_features(features)),
all_features,
uses_default_features,
}
}
fn split_features(features: &[String]) -> BTreeSet<InternedString> {
features
.iter()
.flat_map(|s| s.split_whitespace())

View File

@ -366,7 +366,7 @@ impl ColorChoice {
}
}
#[cfg(any(target_os = "linux", target_os = "macos"))]
#[cfg(any(target_os = "linux", target_os = "macos", target_os = "freebsd"))]
mod imp {
use std::mem;
@ -377,7 +377,7 @@ mod imp {
pub fn stderr_width() -> Option<usize> {
unsafe {
let mut winsize: libc::winsize = mem::zeroed();
if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) < 0 {
if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ.into(), &mut winsize) < 0 {
return None;
}
if winsize.ws_col > 0 {
@ -396,7 +396,10 @@ mod imp {
}
}
#[cfg(all(unix, not(any(target_os = "linux", target_os = "macos"))))]
#[cfg(all(
unix,
not(any(target_os = "linux", target_os = "macos", target_os = "freebsd"))
))]
mod imp {
pub(super) use super::default_err_erase_line as err_erase_line;
@ -461,7 +464,13 @@ mod imp {
}
}
#[cfg(any(all(unix, not(any(target_os = "linux", target_os = "macos"))), windows,))]
#[cfg(any(
all(
unix,
not(any(target_os = "linux", target_os = "macos", target_os = "freebsd"))
),
windows,
))]
fn default_err_erase_line(shell: &mut Shell) {
if let Some(max_width) = imp::stderr_width() {
let blank = " ".repeat(max_width);

View File

@ -283,7 +283,7 @@ impl<'cfg> Workspace<'cfg> {
.unwrap_or_else(|| Filesystem::new(self.root().join("target")))
}
/// Returns the root [replace] section of this workspace.
/// Returns the root `[replace]` section of this workspace.
///
/// This may be from a virtual crate or an actual crate.
pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] {
@ -293,7 +293,7 @@ impl<'cfg> Workspace<'cfg> {
}
}
/// Returns the root [patch] section of this workspace.
/// Returns the root `[patch]` section of this workspace.
///
/// This may be from a virtual crate or an actual crate.
pub fn root_patch(&self) -> &HashMap<Url, Vec<Dependency>> {

View File

@ -1,36 +1,37 @@
//! Cargo `compile` currently does the following steps.
//! The Cargo "compile" operation.
//!
//! All configurations are already injected as environment variables via the
//! main cargo command.
//! This module contains the entry point for starting the compilation process
//! for commands like `build`, `test`, `doc`, `rustc`, etc.
//!
//! 1. Read the manifest.
//! 2. Shell out to `cargo-resolve` with a list of dependencies and sources as
//! stdin.
//! The `compile` function will do all the work to compile a workspace. A
//! rough outline is:
//!
//! a. Shell out to `--do update` and `--do list` for each source.
//! b. Resolve dependencies and return a list of name/version/source.
//!
//! 3. Shell out to `--do download` for each source.
//! 4. Shell out to `--do get` for each source, and build up the list of paths
//! to pass to `rustc -L`.
//! 5. Call `cargo-rustc` with the results of the resolver zipped together with
//! the results of the `get`.
//!
//! a. Topologically sort the dependencies.
//! b. Compile each dependency in order, passing in the -L's pointing at each
//! previously compiled dependency.
//! - Resolve the dependency graph (see `ops::resolve`).
//! - Download any packages needed (see `PackageSet`).
//! - Generate a list of top-level "units" of work for the targets the user
//! requested on the command-line. Each `Unit` corresponds to a compiler
//! invocation. This is done in this module (`generate_targets`).
//! - Create a `Context` which will perform the following steps:
//! - Build the graph of `Unit` dependencies (see
//! `core::compiler::context::unit_dependencies`).
//! - Prepare the `target` directory (see `Layout`).
//! - Create a job queue (see `JobQueue`). The queue checks the
//! fingerprint of each `Unit` to determine if it should run or be
//! skipped.
//! - Execute the queue. Each leaf in the queue's dependency graph is
//! executed, and then removed from the graph when finished. This
//! repeats until the queue is empty.
use std::collections::{BTreeSet, HashMap, HashSet};
use std::iter::FromIterator;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context};
use crate::core::compiler::{CompileMode, Kind, Unit};
use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
use crate::core::profiles::{Profiles, UnitFor};
use crate::core::resolver::{Method, Resolve};
use crate::core::resolver::{Resolve, ResolveOpts};
use crate::core::{Package, Target};
use crate::core::{PackageId, PackageIdSpec, TargetKind, Workspace};
use crate::ops;
@ -297,14 +298,9 @@ pub fn compile_ws<'a>(
};
let specs = spec.to_package_id_specs(ws)?;
let features = Method::split_features(features);
let method = Method::Required {
dev_deps: ws.require_optional_deps() || filter.need_dev_deps(build_config.mode),
features: Rc::new(features),
all_features,
uses_default_features: !no_default_features,
};
let resolve = ops::resolve_ws_with_method(ws, method, &specs)?;
let dev_deps = ws.require_optional_deps() || filter.need_dev_deps(build_config.mode);
let opts = ResolveOpts::new(dev_deps, features, all_features, !no_default_features);
let resolve = ops::resolve_ws_with_opts(ws, opts, &specs)?;
let (packages, resolve_with_overrides) = resolve;
let to_build_ids = specs

View File

@ -5,6 +5,7 @@ use std::path::Path;
use failure::Fail;
use opener;
use crate::core::resolver::ResolveOpts;
use crate::core::Workspace;
use crate::ops;
use crate::util::CargoResult;
@ -21,13 +22,13 @@ pub struct DocOptions<'a> {
/// Main method for `cargo doc`.
pub fn doc(ws: &Workspace<'_>, options: &DocOptions<'_>) -> CargoResult<()> {
let specs = options.compile_opts.spec.to_package_id_specs(ws)?;
let resolve = ops::resolve_ws_precisely(
ws,
let opts = ResolveOpts::new(
/*dev_deps*/ true,
&options.compile_opts.features,
options.compile_opts.all_features,
options.compile_opts.no_default_features,
&specs,
)?;
!options.compile_opts.no_default_features,
);
let resolve = ops::resolve_ws_with_opts(ws, opts, &specs)?;
let (packages, resolve_with_overrides) = resolve;
let ids = specs

View File

@ -4,7 +4,7 @@ use log::debug;
use termcolor::Color::{self, Cyan, Green, Red};
use crate::core::registry::PackageRegistry;
use crate::core::resolver::Method;
use crate::core::resolver::ResolveOpts;
use crate::core::PackageId;
use crate::core::{Resolve, SourceId, Workspace};
use crate::ops;
@ -21,8 +21,15 @@ pub struct UpdateOptions<'a> {
pub fn generate_lockfile(ws: &Workspace<'_>) -> CargoResult<()> {
let mut registry = PackageRegistry::new(ws.config())?;
let resolve =
ops::resolve_with_previous(&mut registry, ws, Method::Everything, None, None, &[], true)?;
let resolve = ops::resolve_with_previous(
&mut registry,
ws,
ResolveOpts::everything(),
None,
None,
&[],
true,
)?;
ops::write_pkg_lockfile(ws, &resolve)?;
Ok(())
}
@ -57,7 +64,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes
ops::resolve_with_previous(
&mut registry,
ws,
Method::Everything,
ResolveOpts::everything(),
None,
None,
&[],
@ -103,7 +110,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes
let resolve = ops::resolve_with_previous(
&mut registry,
ws,
Method::Everything,
ResolveOpts::everything(),
Some(&previous_resolve),
Some(&to_avoid),
&[],

View File

@ -8,7 +8,7 @@ use tempfile::Builder as TempFileBuilder;
use crate::core::compiler::Freshness;
use crate::core::compiler::{DefaultExecutor, Executor};
use crate::core::resolver::Method;
use crate::core::resolver::ResolveOpts;
use crate::core::{Edition, PackageId, PackageIdSpec, Source, SourceId, Workspace};
use crate::ops;
use crate::ops::common_for_install_and_uninstall::*;
@ -486,10 +486,10 @@ fn check_yanked_install(ws: &Workspace<'_>) -> CargoResult<()> {
// It would be best if `source` could be passed in here to avoid a
// duplicate "Updating", but since `source` is taken by value, then it
// wouldn't be available for `compile_ws`.
let (pkg_set, resolve) = ops::resolve_ws_with_method(ws, Method::Everything, &specs)?;
let (pkg_set, resolve) = ops::resolve_ws_with_opts(ws, ResolveOpts::everything(), &specs)?;
let mut sources = pkg_set.sources_mut();
// Checking the yanked status invovles taking a look at the registry and
// Checking the yanked status involves taking a look at the registry and
// maybe updating files, so be sure to lock it here.
let _lock = ws.config().acquire_package_cache_lock()?;

View File

@ -4,7 +4,7 @@ use std::path::PathBuf;
use serde::ser;
use serde::Serialize;
use crate::core::resolver::Resolve;
use crate::core::resolver::{Resolve, ResolveOpts};
use crate::core::{Package, PackageId, Workspace};
use crate::ops::{self, Packages};
use crate::util::CargoResult;
@ -50,13 +50,13 @@ fn metadata_no_deps(ws: &Workspace<'_>, _opt: &OutputMetadataOptions) -> CargoRe
fn metadata_full(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
let specs = Packages::All.to_package_id_specs(ws)?;
let (package_set, resolve) = ops::resolve_ws_precisely(
ws,
let opts = ResolveOpts::new(
/*dev_deps*/ true,
&opt.features,
opt.all_features,
opt.no_default_features,
&specs,
)?;
!opt.no_default_features,
);
let (package_set, resolve) = ops::resolve_ws_with_opts(ws, opts, &specs)?;
let mut packages = HashMap::new();
for pkg in package_set.get_many(package_set.package_ids())? {
packages.insert(pkg.package_id(), pkg.clone());

View File

@ -14,7 +14,7 @@ use tar::{Archive, Builder, EntryType, Header};
use termcolor::Color;
use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor};
use crate::core::resolver::Method;
use crate::core::resolver::ResolveOpts;
use crate::core::Feature;
use crate::core::{
Package, PackageId, PackageIdSpec, PackageSet, Resolve, Source, SourceId, Verbosity, Workspace,
@ -152,7 +152,8 @@ fn build_lock(ws: &Workspace<'_>) -> CargoResult<String> {
// Regenerate Cargo.lock using the old one as a guide.
let specs = vec![PackageIdSpec::from_package_id(new_pkg.package_id())];
let tmp_ws = Workspace::ephemeral(new_pkg, ws.config(), None, true)?;
let (pkg_set, new_resolve) = ops::resolve_ws_with_method(&tmp_ws, Method::Everything, &specs)?;
let (pkg_set, new_resolve) =
ops::resolve_ws_with_opts(&tmp_ws, ResolveOpts::everything(), &specs)?;
if let Some(orig_resolve) = orig_resolve {
compare_resolve(config, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
@ -558,7 +559,7 @@ fn compare_resolve(
}
fn check_yanked(config: &Config, pkg_set: &PackageSet<'_>, resolve: &Resolve) -> CargoResult<()> {
// Checking the yanked status invovles taking a look at the registry and
// Checking the yanked status involves taking a look at the registry and
// maybe updating files, so be sure to lock it here.
let _lock = config.acquire_package_cache_lock()?;

View File

@ -621,7 +621,7 @@ impl FixArgs {
ret.enabled_edition = Some(s[prefix.len()..].to_string());
continue;
}
if s.starts_with("--error-format=") || s.starts_with("--json-rendered=") {
if s.starts_with("--error-format=") || s.starts_with("--json=") {
// Cargo may add error-format in some cases, but `cargo
// fix` wants to add its own.
continue;

View File

@ -23,8 +23,7 @@ pub use self::registry::{http_handle, needs_custom_http_transport, registry_logi
pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts};
pub use self::registry::{publish, registry_configuration, RegistryConfig};
pub use self::resolve::{
add_overrides, get_resolved_packages, resolve_with_previous, resolve_ws, resolve_ws_precisely,
resolve_ws_with_method,
add_overrides, get_resolved_packages, resolve_with_previous, resolve_ws, resolve_ws_with_opts,
};
pub use self::vendor::{vendor, VendorOptions};

View File

@ -1,10 +1,22 @@
//! High-level APIs for executing the resolver.
//!
//! This module provides functions for running the resolver given a workspace.
//! There are roughly 3 main functions:
//!
//! - `resolve_ws`: A simple, high-level function with no options.
//! - `resolve_ws_with_opts`: A medium-level function with options like
//! user-provided features. This is the most appropriate function to use in
//! most cases.
//! - `resolve_with_previous`: A low-level function for running the resolver,
//! providing the most power and flexibility.
use std::collections::HashSet;
use std::rc::Rc;
use log::{debug, trace};
use crate::core::registry::PackageRegistry;
use crate::core::resolver::{self, Method, Resolve};
use crate::core::resolver::{self, Resolve, ResolveOpts};
use crate::core::Feature;
use crate::core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace};
use crate::ops;
@ -21,8 +33,12 @@ version. This may also occur with an optional dependency that is not enabled.";
/// Resolves all dependencies for the workspace using the previous
/// lock file as a guide if present.
///
/// This function will also write the result of resolution as a new
/// lock file.
/// This function will also write the result of resolution as a new lock file
/// (unless it is an ephemeral workspace such as `cargo install` or `cargo
/// package`).
///
/// This is a simple interface used by commands like `clean`, `fetch`, and
/// `package`, which don't specify any options or features.
pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> {
let mut registry = PackageRegistry::new(ws.config())?;
let resolve = resolve_with_registry(ws, &mut registry)?;
@ -32,30 +48,17 @@ pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolv
/// Resolves dependencies for some packages of the workspace,
/// taking into account `paths` overrides and activated features.
pub fn resolve_ws_precisely<'a>(
///
/// This function will also write the result of resolution as a new lock file
/// (unless `Workspace::require_optional_deps` is false, such as `cargo
/// install` or `-Z avoid-dev-deps`), or it is an ephemeral workspace (`cargo
/// install` or `cargo package`).
///
/// `specs` may be empty, which indicates it should resolve all workspace
/// members. In this case, `opts.all_features` must be `true`.
pub fn resolve_ws_with_opts<'a>(
ws: &Workspace<'a>,
features: &[String],
all_features: bool,
no_default_features: bool,
specs: &[PackageIdSpec],
) -> CargoResult<(PackageSet<'a>, Resolve)> {
let features = Method::split_features(features);
let method = if all_features {
Method::Everything
} else {
Method::Required {
dev_deps: true,
features: Rc::new(features),
all_features: false,
uses_default_features: !no_default_features,
}
};
resolve_ws_with_method(ws, method, specs)
}
pub fn resolve_ws_with_method<'a>(
ws: &Workspace<'a>,
method: Method,
opts: ResolveOpts,
specs: &[PackageIdSpec],
) -> CargoResult<(PackageSet<'a>, Resolve)> {
let mut registry = PackageRegistry::new(ws.config())?;
@ -67,6 +70,7 @@ pub fn resolve_ws_with_method<'a>(
// First, resolve the root_package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
let resolve = resolve_with_registry(ws, &mut registry)?;
// No need to add patches again, `resolve_with_registry` has done it.
add_patches = false;
// Second, resolve with precisely what we're doing. Filter out
@ -92,10 +96,10 @@ pub fn resolve_ws_with_method<'a>(
ops::load_pkg_lockfile(ws)?
};
let resolved_with_overrides = ops::resolve_with_previous(
let resolved_with_overrides = resolve_with_previous(
&mut registry,
ws,
method,
opts,
resolve.as_ref(),
None,
specs,
@ -115,7 +119,7 @@ fn resolve_with_registry<'cfg>(
let resolve = resolve_with_previous(
registry,
ws,
Method::Everything,
ResolveOpts::everything(),
prev.as_ref(),
None,
&[],
@ -137,15 +141,26 @@ fn resolve_with_registry<'cfg>(
///
/// The previous resolve normally comes from a lock file. This function does not
/// read or write lock files from the filesystem.
///
/// `specs` may be empty, which indicates it should resolve all workspace
/// members. In this case, `opts.all_features` must be `true`.
///
/// If `register_patches` is true, then entries from the `[patch]` table in
/// the manifest will be added to the given `PackageRegistry`.
pub fn resolve_with_previous<'cfg>(
registry: &mut PackageRegistry<'cfg>,
ws: &Workspace<'cfg>,
method: Method,
opts: ResolveOpts,
previous: Option<&Resolve>,
to_avoid: Option<&HashSet<PackageId>>,
specs: &[PackageIdSpec],
register_patches: bool,
) -> CargoResult<Resolve> {
assert!(
!specs.is_empty() || opts.all_features,
"no specs requires all_features"
);
// We only want one Cargo at a time resolving a crate graph since this can
// involve a lot of frobbing of the global caches.
let _lock = ws.config().acquire_package_cache_lock()?;
@ -228,85 +243,75 @@ pub fn resolve_with_previous<'cfg>(
let mut summaries = Vec::new();
if ws.config().cli_unstable().package_features {
let mut members = Vec::new();
match &method {
Method::Everything => members.extend(ws.members()),
Method::Required {
features,
all_features,
uses_default_features,
..
} => {
if specs.len() > 1 && !features.is_empty() {
failure::bail!("cannot specify features for more than one package");
}
members.extend(
ws.members()
.filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))),
);
// Edge case: running `cargo build -p foo`, where `foo` is not a member
// of current workspace. Add all packages from workspace to get `foo`
// into the resolution graph.
if members.is_empty() {
if !(features.is_empty() && !all_features && *uses_default_features) {
failure::bail!("cannot specify features for packages outside of workspace");
}
members.extend(ws.members());
if specs.is_empty() {
members.extend(ws.members());
} else {
if specs.len() > 1 && !opts.features.is_empty() {
failure::bail!("cannot specify features for more than one package");
}
members.extend(
ws.members()
.filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))),
);
// Edge case: running `cargo build -p foo`, where `foo` is not a member
// of current workspace. Add all packages from workspace to get `foo`
// into the resolution graph.
if members.is_empty() {
if !(opts.features.is_empty() && !opts.all_features && opts.uses_default_features) {
failure::bail!("cannot specify features for packages outside of workspace");
}
members.extend(ws.members());
}
}
for member in members {
let summary = registry.lock(member.summary().clone());
summaries.push((summary, method.clone()))
summaries.push((summary, opts.clone()))
}
} else {
for member in ws.members() {
let method_to_resolve = match method {
// When everything for a workspace we want to be sure to resolve all
// members in the workspace, so propagate the `Method::Everything`.
Method::Everything => Method::Everything,
let summary_resolve_opts = if specs.is_empty() {
// When resolving the entire workspace, resolve each member
// with all features enabled.
opts.clone()
} else {
// If we're not resolving everything though then we're constructing the
// exact crate graph we're going to build. Here we don't necessarily
// want to keep around all workspace crates as they may not all be
// built/tested.
//
// Additionally, the `method` specified represents command line
// Additionally, the `opts` specified represents command line
// flags, which really only matters for the current package
// (determined by the cwd). If other packages are specified (via
// `-p`) then the command line flags like features don't apply to
// them.
//
// As a result, if this `member` is the current member of the
// workspace, then we use `method` specified. Otherwise we use a
// base method with no features specified but using default features
// workspace, then we use `opts` specified. Otherwise we use a
// base `opts` with no features specified but using default features
// for any other packages specified with `-p`.
Method::Required {
dev_deps,
all_features,
..
} => {
let base = Method::Required {
dev_deps,
features: Rc::default(),
all_features,
uses_default_features: true,
};
let member_id = member.package_id();
match ws.current_opt() {
Some(current) if member_id == current.package_id() => method.clone(),
_ => {
if specs.iter().any(|spec| spec.matches(member_id)) {
base
} else {
continue;
let member_id = member.package_id();
match ws.current_opt() {
Some(current) if member_id == current.package_id() => opts.clone(),
_ => {
if specs.iter().any(|spec| spec.matches(member_id)) {
// -p for a workspace member that is not the
// "current" one, don't use the local `--features`.
ResolveOpts {
dev_deps: opts.dev_deps,
features: Rc::default(),
all_features: opts.all_features,
uses_default_features: true,
}
} else {
// `-p` for non-member, skip.
continue;
}
}
}
};
let summary = registry.lock(member.summary().clone());
summaries.push((summary, method_to_resolve));
summaries.push((summary, summary_resolve_opts));
}
};

View File

@ -219,9 +219,17 @@ impl<'cfg> PathSource<'cfg> {
// the untracked files are often part of a build and may become relevant
// as part of a future commit.
let index_files = index.iter().map(|entry| {
use libgit2_sys::GIT_FILEMODE_COMMIT;
let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32;
(join(root, &entry.path), Some(is_dir))
use libgit2_sys::{GIT_FILEMODE_COMMIT, GIT_FILEMODE_LINK};
// ``is_dir`` is an optimization to avoid calling
// ``fs::metadata`` on every file.
let is_dir = if entry.mode == GIT_FILEMODE_LINK as u32 {
// Let the code below figure out if this symbolic link points
// to a directory or not.
None
} else {
Some(entry.mode == GIT_FILEMODE_COMMIT as u32)
};
(join(root, &entry.path), is_dir)
});
let mut opts = git2::StatusOptions::new();
opts.include_untracked(true);

View File

@ -1,7 +1,3 @@
use std::ffi::{OsStr, OsString};
use std::fs;
use std::path::PathBuf;
use crate::core::compiler::{BuildConfig, MessageFormat};
use crate::core::Workspace;
use crate::ops::{CompileFilter, CompileOptions, NewOptions, Packages, VersionControl};
@ -14,6 +10,10 @@ use crate::util::{
};
use crate::CargoResult;
use clap::{self, SubCommand};
use failure::bail;
use std::ffi::{OsStr, OsString};
use std::fs;
use std::path::PathBuf;
pub use crate::core::compiler::{CompileMode, ProfileKind};
pub use crate::{CliError, CliResult, Config};
@ -138,13 +138,7 @@ pub trait AppExt: Sized {
}
fn arg_message_format(self) -> Self {
self._arg(
opt("message-format", "Error format")
.value_name("FMT")
.case_insensitive(true)
.possible_values(&["human", "json", "short"])
.default_value("human"),
)
self._arg(multi_opt("message-format", "FMT", "Error format"))
}
fn arg_build_plan(self) -> Self {
@ -350,23 +344,70 @@ pub trait ArgMatchesExt {
self._values_of("package"),
)?;
let message_format = match self._value_of("message-format") {
None => MessageFormat::Human,
Some(f) => {
if f.eq_ignore_ascii_case("json") {
MessageFormat::Json
} else if f.eq_ignore_ascii_case("human") {
MessageFormat::Human
} else if f.eq_ignore_ascii_case("short") {
MessageFormat::Short
} else {
panic!("Impossible message format: {:?}", f)
let mut message_format = None;
let default_json = MessageFormat::Json {
short: false,
ansi: false,
render_diagnostics: false,
};
for fmt in self._values_of("message-format") {
for fmt in fmt.split(',') {
let fmt = fmt.to_ascii_lowercase();
match fmt.as_str() {
"json" => {
if message_format.is_some() {
bail!("cannot specify two kinds of `message-format` arguments");
}
message_format = Some(default_json);
}
"human" => {
if message_format.is_some() {
bail!("cannot specify two kinds of `message-format` arguments");
}
message_format = Some(MessageFormat::Human);
}
"short" => {
if message_format.is_some() {
bail!("cannot specify two kinds of `message-format` arguments");
}
message_format = Some(MessageFormat::Short);
}
"json-render-diagnostics" => {
if message_format.is_none() {
message_format = Some(default_json);
}
match &mut message_format {
Some(MessageFormat::Json {
render_diagnostics, ..
}) => *render_diagnostics = true,
_ => bail!("cannot specify two kinds of `message-format` arguments"),
}
}
"json-diagnostic-short" => {
if message_format.is_none() {
message_format = Some(default_json);
}
match &mut message_format {
Some(MessageFormat::Json { short, .. }) => *short = true,
_ => bail!("cannot specify two kinds of `message-format` arguments"),
}
}
"json-diagnostic-rendered-ansi" => {
if message_format.is_none() {
message_format = Some(default_json);
}
match &mut message_format {
Some(MessageFormat::Json { ansi, .. }) => *ansi = true,
_ => bail!("cannot specify two kinds of `message-format` arguments"),
}
}
s => bail!("invalid message format specifier: `{}`", s),
}
}
};
}
let mut build_config = BuildConfig::new(config, self.jobs()?, &self.target(), mode)?;
build_config.message_format = message_format;
build_config.message_format = message_format.unwrap_or(MessageFormat::Human);
build_config.profile_kind =
self.get_profile_kind(config, ProfileKind::Dev, profile_checking)?;
build_config.build_plan = self._is_present("build-plan");

View File

@ -321,27 +321,8 @@ fn acquire(
let msg = format!("waiting for file lock on {}", msg);
config.shell().status_with_color("Blocking", &msg, Cyan)?;
// We're about to block the current process and not really do anything
// productive for what could possibly be a very long time. We could be
// waiting, for example, on another Cargo to finish a download, finish an
// entire build, etc. Since we're not doing anything productive we're not
// making good use of our jobserver token, if we have one.
//
// This can typically come about if `cargo` is invoked from `make` (or some
// other jobserver-providing system). In this situation it's actually best
// if we release the token back to the original jobserver to let some other
// cpu-hungry work continue to make progress. After we're done blocking
// we'll block waiting to reacquire a token as we'll probably be doing cpu
// hungry work ourselves.
let jobserver = config.jobserver_from_env();
if let Some(server) = jobserver {
server.release_raw()?;
}
let result = block().chain_err(|| format!("failed to lock file: {}", path.display()));
if let Some(server) = jobserver {
server.acquire_raw()?;
}
return Ok(result?);
block().chain_err(|| format!("failed to lock file: {}", path.display()))?;
return Ok(());
#[cfg(all(target_os = "linux", not(target_env = "musl")))]
fn is_on_nfs_mount(path: &Path) -> bool {

View File

@ -73,9 +73,12 @@ fn maybe_spurious(err: &Error) -> bool {
///
/// # Examples
///
/// ```ignore
/// use util::network;
/// cargo_result = network::with_retry(&config, || something.download());
/// ```
/// # use crate::cargo::util::{CargoResult, Config};
/// # let download_something = || return Ok(());
/// # let config = Config::default().unwrap();
/// use cargo::util::network;
/// let cargo_result = network::with_retry(&config, || download_something());
/// ```
pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>
where

View File

@ -301,17 +301,33 @@ terminal.</p>
</dd>
<dt class="hdlist1"><strong>--message-format</strong> <em>FMT</em></dt>
<dd>
<p>The output format for diagnostic messages. Valid values:</p>
<p>The output format for diagnostic messages. Can be specified multiple times
and consists of comma-separated values. Valid values:</p>
<div class="ulist">
<ul>
<li>
<p><code>human</code> (default): Display in a human-readable text format.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
</li>
<li>
<p><code>json</code>: Emit JSON messages to stdout.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
<p><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
the "short" rendering from rustc.</p>
</li>
<li>
<p><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
contains embedded ANSI color codes for respecting rustc&#8217;s default color
scheme.</p>
</li>
<li>
<p><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo&#8217;s own JSON diagnostics and others
coming from rustc are still emitted.</p>
</li>
</ul>
</div>

View File

@ -235,17 +235,33 @@ terminal.</p>
</dd>
<dt class="hdlist1"><strong>--message-format</strong> <em>FMT</em></dt>
<dd>
<p>The output format for diagnostic messages. Valid values:</p>
<p>The output format for diagnostic messages. Can be specified multiple times
and consists of comma-separated values. Valid values:</p>
<div class="ulist">
<ul>
<li>
<p><code>human</code> (default): Display in a human-readable text format.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
</li>
<li>
<p><code>json</code>: Emit JSON messages to stdout.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
<p><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
the "short" rendering from rustc.</p>
</li>
<li>
<p><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
contains embedded ANSI color codes for respecting rustc&#8217;s default color
scheme.</p>
</li>
<li>
<p><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo&#8217;s own JSON diagnostics and others
coming from rustc are still emitted.</p>
</li>
</ul>
</div>

View File

@ -237,17 +237,33 @@ terminal.</p>
</dd>
<dt class="hdlist1"><strong>--message-format</strong> <em>FMT</em></dt>
<dd>
<p>The output format for diagnostic messages. Valid values:</p>
<p>The output format for diagnostic messages. Can be specified multiple times
and consists of comma-separated values. Valid values:</p>
<div class="ulist">
<ul>
<li>
<p><code>human</code> (default): Display in a human-readable text format.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
</li>
<li>
<p><code>json</code>: Emit JSON messages to stdout.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
<p><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
the "short" rendering from rustc.</p>
</li>
<li>
<p><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
contains embedded ANSI color codes for respecting rustc&#8217;s default color
scheme.</p>
</li>
<li>
<p><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo&#8217;s own JSON diagnostics and others
coming from rustc are still emitted.</p>
</li>
</ul>
</div>

View File

@ -205,17 +205,33 @@ terminal.</p>
</dd>
<dt class="hdlist1"><strong>--message-format</strong> <em>FMT</em></dt>
<dd>
<p>The output format for diagnostic messages. Valid values:</p>
<p>The output format for diagnostic messages. Can be specified multiple times
and consists of comma-separated values. Valid values:</p>
<div class="ulist">
<ul>
<li>
<p><code>human</code> (default): Display in a human-readable text format.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
</li>
<li>
<p><code>json</code>: Emit JSON messages to stdout.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
<p><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
the "short" rendering from rustc.</p>
</li>
<li>
<p><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
contains embedded ANSI color codes for respecting rustc&#8217;s default color
scheme.</p>
</li>
<li>
<p><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo&#8217;s own JSON diagnostics and others
coming from rustc are still emitted.</p>
</li>
</ul>
</div>

View File

@ -308,17 +308,33 @@ terminal.</p>
</dd>
<dt class="hdlist1"><strong>--message-format</strong> <em>FMT</em></dt>
<dd>
<p>The output format for diagnostic messages. Valid values:</p>
<p>The output format for diagnostic messages. Can be specified multiple times
and consists of comma-separated values. Valid values:</p>
<div class="ulist">
<ul>
<li>
<p><code>human</code> (default): Display in a human-readable text format.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
</li>
<li>
<p><code>json</code>: Emit JSON messages to stdout.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
<p><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
the "short" rendering from rustc.</p>
</li>
<li>
<p><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
contains embedded ANSI color codes for respecting rustc&#8217;s default color
scheme.</p>
</li>
<li>
<p><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo&#8217;s own JSON diagnostics and others
coming from rustc are still emitted.</p>
</li>
</ul>
</div>

View File

@ -35,7 +35,7 @@ for a Rust API for reading the metadata.</p>
</div>
<div class="listingblock">
<div class="content">
<pre class="highlightjs highlight"><code class="language-javascript hljs" data-lang="javascript">{
<pre class="highlightjs highlight"><code data-lang="javascript" class="language-javascript hljs">{
/* Array of all packages in the workspace.
It also includes all feature-enabled dependencies unless --no-deps is used.
*/

View File

@ -168,17 +168,33 @@ terminal.</p>
</dd>
<dt class="hdlist1"><strong>--message-format</strong> <em>FMT</em></dt>
<dd>
<p>The output format for diagnostic messages. Valid values:</p>
<p>The output format for diagnostic messages. Can be specified multiple times
and consists of comma-separated values. Valid values:</p>
<div class="ulist">
<ul>
<li>
<p><code>human</code> (default): Display in a human-readable text format.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
</li>
<li>
<p><code>json</code>: Emit JSON messages to stdout.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
<p><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
the "short" rendering from rustc.</p>
</li>
<li>
<p><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
contains embedded ANSI color codes for respecting rustc&#8217;s default color
scheme.</p>
</li>
<li>
<p><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo&#8217;s own JSON diagnostics and others
coming from rustc are still emitted.</p>
</li>
</ul>
</div>

View File

@ -229,17 +229,33 @@ terminal.</p>
</dd>
<dt class="hdlist1"><strong>--message-format</strong> <em>FMT</em></dt>
<dd>
<p>The output format for diagnostic messages. Valid values:</p>
<p>The output format for diagnostic messages. Can be specified multiple times
and consists of comma-separated values. Valid values:</p>
<div class="ulist">
<ul>
<li>
<p><code>human</code> (default): Display in a human-readable text format.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
</li>
<li>
<p><code>json</code>: Emit JSON messages to stdout.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
<p><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
the "short" rendering from rustc.</p>
</li>
<li>
<p><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
contains embedded ANSI color codes for respecting rustc&#8217;s default color
scheme.</p>
</li>
<li>
<p><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo&#8217;s own JSON diagnostics and others
coming from rustc are still emitted.</p>
</li>
</ul>
</div>

View File

@ -242,17 +242,33 @@ terminal.</p>
</dd>
<dt class="hdlist1"><strong>--message-format</strong> <em>FMT</em></dt>
<dd>
<p>The output format for diagnostic messages. Valid values:</p>
<p>The output format for diagnostic messages. Can be specified multiple times
and consists of comma-separated values. Valid values:</p>
<div class="ulist">
<ul>
<li>
<p><code>human</code> (default): Display in a human-readable text format.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
</li>
<li>
<p><code>json</code>: Emit JSON messages to stdout.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
<p><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
the "short" rendering from rustc.</p>
</li>
<li>
<p><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
contains embedded ANSI color codes for respecting rustc&#8217;s default color
scheme.</p>
</li>
<li>
<p><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo&#8217;s own JSON diagnostics and others
coming from rustc are still emitted.</p>
</li>
</ul>
</div>

View File

@ -326,17 +326,33 @@ terminal.</p>
</dd>
<dt class="hdlist1"><strong>--message-format</strong> <em>FMT</em></dt>
<dd>
<p>The output format for diagnostic messages. Valid values:</p>
<p>The output format for diagnostic messages. Can be specified multiple times
and consists of comma-separated values. Valid values:</p>
<div class="ulist">
<ul>
<li>
<p><code>human</code> (default): Display in a human-readable text format.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
</li>
<li>
<p><code>json</code>: Emit JSON messages to stdout.</p>
</li>
<li>
<p><code>short</code>: Emit shorter, human-readable text messages.</p>
<p><code>json-diagnostic-short</code>: Ensure the <code>rendered</code> field of JSON messages contains
the "short" rendering from rustc.</p>
</li>
<li>
<p><code>json-diagnostic-rendered-ansi</code>: Ensure the <code>rendered</code> field of JSON messages
contains embedded ANSI color codes for respecting rustc&#8217;s default color
scheme.</p>
</li>
<li>
<p><code>json-render-diagnostics</code>: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo&#8217;s own JSON diagnostics and others
coming from rustc are still emitted.</p>
</li>
</ul>
</div>

View File

@ -1,6 +1,16 @@
*--message-format* _FMT_::
The output format for diagnostic messages. Valid values:
The output format for diagnostic messages. Can be specified multiple times
and consists of comma-separated values. Valid values:
+
- `human` (default): Display in a human-readable text format.
- `json`: Emit JSON messages to stdout.
- `short`: Emit shorter, human-readable text messages.
- `json`: Emit JSON messages to stdout.
- `json-diagnostic-short`: Ensure the `rendered` field of JSON messages contains
the "short" rendering from rustc.
- `json-diagnostic-rendered-ansi`: Ensure the `rendered` field of JSON messages
contains embedded ANSI color codes for respecting rustc's default color
scheme.
- `json-render-diagnostics`: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others
coming from rustc are still emitted.

View File

@ -628,7 +628,7 @@ dependencies residing in the workspace directory become members. You can add
additional packages to the workspace by listing them in the `members` key. Note
that members of the workspaces listed explicitly will also have their path
dependencies included in the workspace. Sometimes a package may have a lot of
workspace members and it can be onerous to keep up to date. The path dependency
workspace members and it can be onerous to keep up to date. The `members` list
can also use [globs][globs] to match multiple paths. Finally, the `exclude`
key can be used to blacklist paths from being included in a workspace. This can
be useful if some path dependencies aren't desired to be in the workspace at

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-bench
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-05-08
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-BENCH" "1" "2019-05-08" "\ \&" "\ \&"
.TH "CARGO\-BENCH" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0
@ -347,7 +347,8 @@ May also be specified with the \fBterm.color\fP
.sp
\fB\-\-message\-format\fP \fIFMT\fP
.RS 4
The output format for diagnostic messages. Valid values:
The output format for diagnostic messages. Can be specified multiple times
and consists of comma\-separated values. Valid values:
.sp
.RS 4
.ie n \{\
@ -368,6 +369,17 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\fP: Emit JSON messages to stdout.
.RE
.sp
@ -379,7 +391,35 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains
the "short" rendering from rustc.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages
contains embedded ANSI color codes for respecting rustc\(cqs default color
scheme.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others
coming from rustc are still emitted.
.RE
.RE
.SS "Manifest Options"

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-build
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-05-08
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-BUILD" "1" "2019-05-08" "\ \&" "\ \&"
.TH "CARGO\-BUILD" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0
@ -256,7 +256,8 @@ May also be specified with the \fBterm.color\fP
.sp
\fB\-\-message\-format\fP \fIFMT\fP
.RS 4
The output format for diagnostic messages. Valid values:
The output format for diagnostic messages. Can be specified multiple times
and consists of comma\-separated values. Valid values:
.sp
.RS 4
.ie n \{\
@ -277,6 +278,17 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\fP: Emit JSON messages to stdout.
.RE
.sp
@ -288,7 +300,35 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains
the "short" rendering from rustc.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages
contains embedded ANSI color codes for respecting rustc\(cqs default color
scheme.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others
coming from rustc are still emitted.
.RE
.RE
.sp

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-check
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-04-16
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-CHECK" "1" "2019-04-16" "\ \&" "\ \&"
.TH "CARGO\-CHECK" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0
@ -256,7 +256,8 @@ May also be specified with the \fBterm.color\fP
.sp
\fB\-\-message\-format\fP \fIFMT\fP
.RS 4
The output format for diagnostic messages. Valid values:
The output format for diagnostic messages. Can be specified multiple times
and consists of comma\-separated values. Valid values:
.sp
.RS 4
.ie n \{\
@ -277,6 +278,17 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\fP: Emit JSON messages to stdout.
.RE
.sp
@ -288,7 +300,35 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains
the "short" rendering from rustc.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages
contains embedded ANSI color codes for respecting rustc\(cqs default color
scheme.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others
coming from rustc are still emitted.
.RE
.RE
.SS "Manifest Options"

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-clean
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-04-16
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-CLEAN" "1" "2019-04-16" "\ \&" "\ \&"
.TH "CARGO\-CLEAN" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-doc
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-04-16
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-DOC" "1" "2019-04-16" "\ \&" "\ \&"
.TH "CARGO\-DOC" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0
@ -213,7 +213,8 @@ May also be specified with the \fBterm.color\fP
.sp
\fB\-\-message\-format\fP \fIFMT\fP
.RS 4
The output format for diagnostic messages. Valid values:
The output format for diagnostic messages. Can be specified multiple times
and consists of comma\-separated values. Valid values:
.sp
.RS 4
.ie n \{\
@ -234,6 +235,17 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\fP: Emit JSON messages to stdout.
.RE
.sp
@ -245,7 +257,35 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains
the "short" rendering from rustc.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages
contains embedded ANSI color codes for respecting rustc\(cqs default color
scheme.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others
coming from rustc are still emitted.
.RE
.RE
.SS "Manifest Options"

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-fetch
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-05-12
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-FETCH" "1" "2019-05-12" "\ \&" "\ \&"
.TH "CARGO\-FETCH" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-fix
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-04-16
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-FIX" "1" "2019-04-16" "\ \&" "\ \&"
.TH "CARGO\-FIX" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0
@ -326,7 +326,8 @@ May also be specified with the \fBterm.color\fP
.sp
\fB\-\-message\-format\fP \fIFMT\fP
.RS 4
The output format for diagnostic messages. Valid values:
The output format for diagnostic messages. Can be specified multiple times
and consists of comma\-separated values. Valid values:
.sp
.RS 4
.ie n \{\
@ -347,6 +348,17 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\fP: Emit JSON messages to stdout.
.RE
.sp
@ -358,7 +370,35 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains
the "short" rendering from rustc.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages
contains embedded ANSI color codes for respecting rustc\(cqs default color
scheme.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others
coming from rustc are still emitted.
.RE
.RE
.SS "Manifest Options"

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-generate-lockfile
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-04-16
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-GENERATE\-LOCKFILE" "1" "2019-04-16" "\ \&" "\ \&"
.TH "CARGO\-GENERATE\-LOCKFILE" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-help
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2018-12-20
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-HELP" "1" "2018-12-20" "\ \&" "\ \&"
.TH "CARGO\-HELP" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-init
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-01-23
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-INIT" "1" "2019-01-23" "\ \&" "\ \&"
.TH "CARGO\-INIT" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-install
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-06-10
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-07-15
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-INSTALL" "1" "2019-06-10" "\ \&" "\ \&"
.TH "CARGO\-INSTALL" "1" "2019-07-15" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-locate-project
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2018-12-20
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-LOCATE\-PROJECT" "1" "2018-12-20" "\ \&" "\ \&"
.TH "CARGO\-LOCATE\-PROJECT" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-login
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-01-23
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-LOGIN" "1" "2019-01-23" "\ \&" "\ \&"
.TH "CARGO\-LOGIN" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-metadata
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-05-20
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-METADATA" "1" "2019-05-20" "\ \&" "\ \&"
.TH "CARGO\-METADATA" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-new
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-01-23
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-NEW" "1" "2019-01-23" "\ \&" "\ \&"
.TH "CARGO\-NEW" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-owner
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-02-05
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-OWNER" "1" "2019-02-05" "\ \&" "\ \&"
.TH "CARGO\-OWNER" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-package
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-06-10
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-07-15
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-PACKAGE" "1" "2019-06-10" "\ \&" "\ \&"
.TH "CARGO\-PACKAGE" "1" "2019-07-15" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-pkgid
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-04-16
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-PKGID" "1" "2019-04-16" "\ \&" "\ \&"
.TH "CARGO\-PKGID" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-publish
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-05-08
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-PUBLISH" "1" "2019-05-08" "\ \&" "\ \&"
.TH "CARGO\-PUBLISH" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-run
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-06-21
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-07-15
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-RUN" "1" "2019-06-21" "\ \&" "\ \&"
.TH "CARGO\-RUN" "1" "2019-07-15" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0
@ -176,7 +176,8 @@ May also be specified with the \fBterm.color\fP
.sp
\fB\-\-message\-format\fP \fIFMT\fP
.RS 4
The output format for diagnostic messages. Valid values:
The output format for diagnostic messages. Can be specified multiple times
and consists of comma\-separated values. Valid values:
.sp
.RS 4
.ie n \{\
@ -197,6 +198,17 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\fP: Emit JSON messages to stdout.
.RE
.sp
@ -208,7 +220,35 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains
the "short" rendering from rustc.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages
contains embedded ANSI color codes for respecting rustc\(cqs default color
scheme.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others
coming from rustc are still emitted.
.RE
.RE
.SS "Manifest Options"

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-rustc
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-04-16
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-RUSTC" "1" "2019-04-16" "\ \&" "\ \&"
.TH "CARGO\-RUSTC" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0
@ -245,7 +245,8 @@ May also be specified with the \fBterm.color\fP
.sp
\fB\-\-message\-format\fP \fIFMT\fP
.RS 4
The output format for diagnostic messages. Valid values:
The output format for diagnostic messages. Can be specified multiple times
and consists of comma\-separated values. Valid values:
.sp
.RS 4
.ie n \{\
@ -266,6 +267,17 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\fP: Emit JSON messages to stdout.
.RE
.sp
@ -277,7 +289,35 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains
the "short" rendering from rustc.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages
contains embedded ANSI color codes for respecting rustc\(cqs default color
scheme.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others
coming from rustc are still emitted.
.RE
.RE
.SS "Manifest Options"

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-rustdoc
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-04-16
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-RUSTDOC" "1" "2019-04-16" "\ \&" "\ \&"
.TH "CARGO\-RUSTDOC" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0
@ -253,7 +253,8 @@ May also be specified with the \fBterm.color\fP
.sp
\fB\-\-message\-format\fP \fIFMT\fP
.RS 4
The output format for diagnostic messages. Valid values:
The output format for diagnostic messages. Can be specified multiple times
and consists of comma\-separated values. Valid values:
.sp
.RS 4
.ie n \{\
@ -274,6 +275,17 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\fP: Emit JSON messages to stdout.
.RE
.sp
@ -285,7 +297,35 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains
the "short" rendering from rustc.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages
contains embedded ANSI color codes for respecting rustc\(cqs default color
scheme.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others
coming from rustc are still emitted.
.RE
.RE
.SS "Manifest Options"

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-search
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-01-23
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-SEARCH" "1" "2019-01-23" "\ \&" "\ \&"
.TH "CARGO\-SEARCH" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-test
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-05-08
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-TEST" "1" "2019-05-08" "\ \&" "\ \&"
.TH "CARGO\-TEST" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0
@ -389,7 +389,8 @@ May also be specified with the \fBterm.color\fP
.sp
\fB\-\-message\-format\fP \fIFMT\fP
.RS 4
The output format for diagnostic messages. Valid values:
The output format for diagnostic messages. Can be specified multiple times
and consists of comma\-separated values. Valid values:
.sp
.RS 4
.ie n \{\
@ -410,6 +411,17 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\fP: Emit JSON messages to stdout.
.RE
.sp
@ -421,7 +433,35 @@ The output format for diagnostic messages. Valid values:
. sp -1
. IP \(bu 2.3
.\}
\fBshort\fP: Emit shorter, human\-readable text messages.
\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains
the "short" rendering from rustc.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages
contains embedded ANSI color codes for respecting rustc\(cqs default color
scheme.
.RE
.sp
.RS 4
.ie n \{\
\h'-04'\(bu\h'+03'\c
.\}
.el \{\
. sp -1
. IP \(bu 2.3
.\}
\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in
in JSON messages printed, but instead Cargo itself should render the
JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others
coming from rustc are still emitted.
.RE
.RE
.SS "Manifest Options"

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-uninstall
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2018-12-20
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-UNINSTALL" "1" "2018-12-20" "\ \&" "\ \&"
.TH "CARGO\-UNINSTALL" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-update
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-04-16
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-UPDATE" "1" "2019-04-16" "\ \&" "\ \&"
.TH "CARGO\-UPDATE" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -2,12 +2,12 @@
.\" Title: cargo-vendor
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-04-29
.\" Date: 2019-07-15
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-VENDOR" "1" "2019-04-29" "\ \&" "\ \&"
.TH "CARGO\-VENDOR" "1" "2019-07-15" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0
@ -149,6 +149,23 @@ These may be used in environments where you want to assert that the
\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network
access.
.RE
.sp
\fB\-\-offline\fP
.RS 4
Prevents Cargo from accessing the network for any reason. Without this
flag, Cargo will stop with an error if it needs to access the network and
the network is not available. With this flag, Cargo will attempt to
proceed without the network if possible.
.sp
Beware that this may result in different dependency resolution than online
mode. Cargo will restrict itself to crates that are downloaded locally, even
if there might be a newer version as indicated in the local copy of the index.
See the \fBcargo\-fetch\fP(1) command to download dependencies before going
offline.
.sp
May also be specified with the \fBnet.offline\fP \c
.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "."
.RE
.SH "ENVIRONMENT"
.sp
See \c

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-verify-project
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-04-16
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-VERIFY\-PROJECT" "1" "2019-04-16" "\ \&" "\ \&"
.TH "CARGO\-VERIFY\-PROJECT" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-version
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2018-12-20
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-VERSION" "1" "2018-12-20" "\ \&" "\ \&"
.TH "CARGO\-VERSION" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo-yank
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-01-23
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO\-YANK" "1" "2019-01-23" "\ \&" "\ \&"
.TH "CARGO\-YANK" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0

View File

@ -1,13 +1,13 @@
'\" t
.\" Title: cargo
.\" Author: [see the "AUTHOR(S)" section]
.\" Generator: Asciidoctor 1.5.8
.\" Date: 2019-05-20
.\" Generator: Asciidoctor 2.0.8
.\" Date: 2019-06-07
.\" Manual: \ \&
.\" Source: \ \&
.\" Language: English
.\"
.TH "CARGO" "1" "2019-05-20" "\ \&" "\ \&"
.TH "CARGO" "1" "2019-06-07" "\ \&" "\ \&"
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.ss \n[.ss] 0
@ -487,4 +487,4 @@ See \c
for issues.
.SH "SEE ALSO"
.sp
\fBrustc\fP(1), \fBrustdoc\fP(1)
\fBrustc\fP(1), \fBrustdoc\fP(1)

View File

@ -58,7 +58,7 @@ fn bench_bench_implicit() {
.file(
"src/main.rs",
r#"
#![cfg_attr(test, feature(test))]
#![feature(test)]
#[cfg(test)]
extern crate test;
#[bench] fn run1(_ben: &mut test::Bencher) { }
@ -364,7 +364,7 @@ fn bench_with_lib_dep() {
.file(
"src/lib.rs",
r#"
#![cfg_attr(test, feature(test))]
#![feature(test)]
#[cfg(test)]
extern crate test;
///
@ -432,7 +432,7 @@ fn bench_with_deep_lib_dep() {
.file(
"src/lib.rs",
"
#![cfg_attr(test, feature(test))]
#![feature(test)]
#[cfg(test)]
extern crate foo;
#[cfg(test)]
@ -448,7 +448,7 @@ fn bench_with_deep_lib_dep() {
.file(
"src/lib.rs",
"
#![cfg_attr(test, feature(test))]
#![feature(test)]
#[cfg(test)]
extern crate test;
@ -495,7 +495,7 @@ fn external_bench_explicit() {
.file(
"src/lib.rs",
r#"
#![cfg_attr(test, feature(test))]
#![feature(test)]
#[cfg(test)]
extern crate test;
pub fn get_hello() -> &'static str { "Hello" }
@ -541,7 +541,7 @@ fn external_bench_implicit() {
.file(
"src/lib.rs",
r#"
#![cfg_attr(test, feature(test))]
#![feature(test)]
#[cfg(test)]
extern crate test;
@ -760,7 +760,7 @@ fn lib_bin_same_name() {
.file(
"src/lib.rs",
"
#![cfg_attr(test, feature(test))]
#![feature(test)]
#[cfg(test)]
extern crate test;
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
@ -769,7 +769,7 @@ fn lib_bin_same_name() {
.file(
"src/main.rs",
"
#![cfg_attr(test, feature(test))]
#![feature(test)]
#[allow(unused_extern_crates)]
extern crate foo;
#[cfg(test)]
@ -804,7 +804,7 @@ fn lib_with_standard_name() {
.file(
"src/lib.rs",
"
#![cfg_attr(test, feature(test))]
#![feature(test)]
#[cfg(test)]
extern crate test;
@ -919,7 +919,7 @@ fn bench_dylib() {
.file(
"src/lib.rs",
r#"
#![cfg_attr(test, feature(test))]
#![feature(test)]
extern crate bar as the_bar;
#[cfg(test)]
extern crate test;
@ -1061,7 +1061,7 @@ fn bench_with_examples() {
.file(
"src/lib.rs",
r#"
#![cfg_attr(test, feature(test))]
#![feature(test)]
#[cfg(test)]
extern crate test;
#[cfg(test)]

View File

@ -4,11 +4,10 @@ use std::io::prelude::*;
use crate::support::paths::{root, CargoPathExt};
use crate::support::registry::Package;
use crate::support::ProjectBuilder;
use crate::support::{
basic_bin_manifest, basic_lib_manifest, basic_manifest, rustc_host, sleep_ms,
basic_bin_manifest, basic_lib_manifest, basic_manifest, main_file, project, rustc_host,
sleep_ms, symlink_supported, Execs, ProjectBuilder,
};
use crate::support::{main_file, project, Execs};
use cargo::util::paths::dylib_path_envvar;
#[cargo_test]
@ -1495,9 +1494,12 @@ package `test v0.0.0 ([CWD])`",
}
#[cargo_test]
/// Make sure broken symlinks don't break the build
///
/// This test requires you to be able to make symlinks.
/// For windows, this may require you to enable developer mode.
fn ignore_broken_symlinks() {
// windows and symlinks don't currently agree that well
if cfg!(windows) {
if !symlink_supported() {
return;
}
@ -3262,11 +3264,10 @@ fn wrong_message_format_option() {
.build();
p.cargo("build --message-format XML")
.with_status(1)
.with_status(101)
.with_stderr_contains(
"\
error: 'XML' isn't a valid value for '--message-format <FMT>'
<tab>[possible values: human, json, short]
error: invalid message format specifier: `xml`
",
)
.run();

View File

@ -195,7 +195,6 @@ fn custom_build_script_wrong_rustc_flags() {
.run();
}
/*
#[cargo_test]
fn custom_build_script_rustc_flags() {
let p = project()
@ -211,7 +210,8 @@ fn custom_build_script_rustc_flags() {
[dependencies.foo]
path = "foo"
"#,
).file("src/main.rs", "fn main() {}")
)
.file("src/main.rs", "fn main() {}")
.file(
"foo/Cargo.toml",
r#"
@ -222,7 +222,8 @@ fn custom_build_script_rustc_flags() {
authors = ["wycats@example.com"]
build = "build.rs"
"#,
).file("foo/src/lib.rs", "")
)
.file("foo/src/lib.rs", "")
.file(
"foo/build.rs",
r#"
@ -230,25 +231,28 @@ fn custom_build_script_rustc_flags() {
println!("cargo:rustc-flags=-l nonexistinglib -L /dummy/path1 -L /dummy/path2");
}
"#,
).build();
)
.build();
// TODO: TEST FAILS BECAUSE OF WRONG STDOUT (but otherwise, the build works).
p.cargo("build --verbose")
.with_status(101)
.with_stderr(
"\
[COMPILING] bar v0.5.0 ([CWD])
[RUNNING] `rustc --crate-name test [CWD]/src/lib.rs --crate-type lib -C debuginfo=2 \
-C metadata=[..] \
-C extra-filename=-[..] \
--out-dir [CWD]/target \
--emit=[..]link \
-L [CWD]/target \
-L [CWD]/target/deps`
[COMPILING] foo [..]
[RUNNING] `rustc --crate-name build_script_build foo/build.rs [..]
[RUNNING] `[..]build-script-build`
[RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\
-L dependency=[CWD]/target/debug/deps \
-L /dummy/path1 -L /dummy/path2 -l nonexistinglib`
[COMPILING] bar [..]
[RUNNING] `rustc --crate-name bar src/main.rs [..]\
-L dependency=[CWD]/target/debug/deps \
--extern foo=[..]libfoo-[..] \
-L /dummy/path1 -L /dummy/path2`
[FINISHED] dev [..]
",
).run();
)
.run();
}
*/
#[cargo_test]
fn links_no_build_cmd() {
@ -2155,6 +2159,11 @@ fn flags_go_into_tests() {
#[cargo_test]
fn diamond_passes_args_only_once() {
// FIXME: when pipelining rides to stable, enable this test on all channels.
if !crate::support::is_nightly() {
return;
}
let p = project()
.file(
"Cargo.toml",
@ -2229,7 +2238,7 @@ fn diamond_passes_args_only_once() {
[COMPILING] a v0.5.0 ([..]
[RUNNING] `rustc [..]`
[COMPILING] foo v0.5.0 ([..]
[RUNNING] `[..]rlib -L native=test`
[RUNNING] `[..]rmeta -L native=test`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)

View File

@ -54,6 +54,52 @@ fn simple() {
assert!(cargo_output2.stdout.is_empty());
}
// same as `simple`, except everything is using the short format
#[cargo_test]
fn simple_short() {
if !is_nightly() {
// --json-rendered is unstable
return;
}
let p = project()
.file(
"src/lib.rs",
"
fn a() {}
fn b() {}
",
)
.build();
let agnostic_path = Path::new("src").join("lib.rs");
let agnostic_path_s = agnostic_path.to_str().unwrap();
let rustc_output = process("rustc")
.cwd(p.root())
.args(&["--crate-type=lib", agnostic_path_s, "--error-format=short"])
.exec_with_output()
.expect("rustc to run");
assert!(rustc_output.stdout.is_empty());
assert!(rustc_output.status.success());
let cargo_output1 = p
.cargo("check -Zcache-messages -q --color=never --message-format=short")
.masquerade_as_nightly_cargo()
.exec_with_output()
.expect("cargo to run");
assert_eq!(as_str(&rustc_output.stderr), as_str(&cargo_output1.stderr));
// assert!(cargo_output1.stdout.is_empty());
let cargo_output2 = p
.cargo("check -Zcache-messages -q --message-format=short")
.masquerade_as_nightly_cargo()
.exec_with_output()
.expect("cargo to run");
println!("{}", String::from_utf8_lossy(&cargo_output2.stdout));
assert_eq!(as_str(&rustc_output.stderr), as_str(&cargo_output2.stderr));
assert!(cargo_output2.stdout.is_empty());
}
#[cargo_test]
fn color() {
if !is_nightly() {
@ -334,15 +380,3 @@ fn very_verbose() {
.with_stderr_contains("[..]not_used[..]")
.run();
}
#[cargo_test]
fn short_incompatible() {
let p = project().file("src/lib.rs", "").build();
p.cargo("check -Zcache-messages --message-format=short")
.masquerade_as_nightly_cargo()
.with_stderr(
"[ERROR] currently `--message-format short` is incompatible with cached output",
)
.with_status(101)
.run();
}

View File

@ -209,7 +209,6 @@ fn plugin_deps() {
use syntax::source_map::Span;
use syntax::ast::*;
use syntax::ext::base::{ExtCtxt, MacEager, MacResult};
use syntax::ext::build::AstBuilder;
#[plugin_registrar]
pub fn foo(reg: &mut Registry) {
@ -306,7 +305,6 @@ fn plugin_to_the_max() {
use syntax::source_map::Span;
use syntax::ast::*;
use syntax::ext::base::{ExtCtxt, MacEager, MacResult};
use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
#[plugin_registrar]

View File

@ -511,6 +511,6 @@ fn canonical_path() {
assert_deps_contains(
&p,
"target/debug/.fingerprint/foo-*/dep-lib-foo-*",
&[(1, "src/lib.rs"), (2, "debug/deps/libregdep-*.rlib")],
&[(1, "src/lib.rs"), (2, "debug/deps/libregdep-*.rmeta")],
);
}

View File

@ -1425,6 +1425,9 @@ fn combining_features_and_package() {
p.cargo("run -Z package-features --package bar --features main")
.masquerade_as_nightly_cargo()
.run();
p.cargo("build -Z package-features --package dep")
.masquerade_as_nightly_cargo()
.run();
}
#[cargo_test]

View File

@ -937,7 +937,7 @@ fn both_edition_migrate_flags() {
error: The argument '--edition' cannot be used with '--prepare-for <prepare-for>'
USAGE:
cargo[..] fix --edition --message-format <FMT>
cargo[..] fix --edition
For more information try --help
";

View File

@ -57,6 +57,7 @@ mod local_registry;
mod lockfile_compat;
mod login;
mod member_errors;
mod message_format;
mod metabuild;
mod metadata;
mod net_config;

View File

@ -0,0 +1,126 @@
use crate::support::{basic_manifest, project};
#[cargo_test]
fn cannot_specify_two() {
if !crate::support::is_nightly() {
return;
}
let p = project()
.file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
.file("src/main.rs", "fn main() {}")
.build();
let formats = ["human", "json", "short"];
let two_kinds = "error: cannot specify two kinds of `message-format` arguments\n";
for a in formats.iter() {
for b in formats.iter() {
p.cargo(&format!("build --message-format {},{}", a, b))
.with_status(101)
.with_stderr(two_kinds)
.run();
}
}
}
#[cargo_test]
fn double_json_works() {
if !crate::support::is_nightly() {
return;
}
let p = project()
.file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
.file("src/main.rs", "fn main() {}")
.build();
p.cargo("build --message-format json,json-render-diagnostics")
.run();
p.cargo("build --message-format json,json-diagnostic-short")
.run();
p.cargo("build --message-format json,json-diagnostic-rendered-ansi")
.run();
p.cargo("build --message-format json --message-format json-diagnostic-rendered-ansi")
.run();
p.cargo("build --message-format json-diagnostic-rendered-ansi")
.run();
p.cargo("build --message-format json-diagnostic-short,json-diagnostic-rendered-ansi")
.run();
}
#[cargo_test]
fn cargo_renders() {
if !crate::support::is_nightly() {
return;
}
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = 'foo'
version = '0.1.0'
[dependencies]
bar = { path = 'bar' }
"#,
)
.file("src/main.rs", "")
.file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
.file("bar/src/lib.rs", "")
.build();
p.cargo("build --message-format json-render-diagnostics")
.with_status(101)
.with_stdout("{\"reason\":\"compiler-artifact\",[..]")
.with_stderr_contains(
"\
[COMPILING] bar [..]
[COMPILING] foo [..]
error[..]`main`[..]
",
)
.run();
}
#[cargo_test]
fn cargo_renders_short() {
if !crate::support::is_nightly() {
return;
}
let p = project()
.file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
.file("src/main.rs", "")
.build();
p.cargo("build --message-format json-render-diagnostics,json-diagnostic-short")
.with_status(101)
.with_stderr_contains(
"\
[COMPILING] foo [..]
error[..]`main`[..]
",
)
.with_stderr_does_not_contain("note:")
.run();
}
#[cargo_test]
fn cargo_renders_ansi() {
if !crate::support::is_nightly() {
return;
}
let p = project()
.file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
.file("src/main.rs", "")
.build();
p.cargo("build --message-format json-diagnostic-rendered-ansi")
.with_status(101)
.with_stdout_contains("[..]\\u001b[38;5;9merror[..]")
.run();
}

View File

@ -3,11 +3,11 @@ use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use crate::support::cargo_process;
use crate::support::paths::CargoPathExt;
use crate::support::registry::Package;
use crate::support::{
basic_manifest, git, path2url, paths, project, publish::validate_crate_contents, registry,
basic_manifest, cargo_process, git, path2url, paths, project, publish::validate_crate_contents,
registry, symlink_supported,
};
use git2;
@ -504,6 +504,56 @@ fn package_git_submodule() {
.run();
}
#[cargo_test]
/// Tests if a symlink to a git submodule is properly handled.
///
/// This test requires you to be able to make symlinks.
/// For windows, this may require you to enable developer mode.
fn package_symlink_to_submodule() {
#[cfg(unix)]
use std::os::unix::fs::symlink;
#[cfg(windows)]
use std::os::windows::fs::symlink_dir as symlink;
if !symlink_supported() {
return;
}
let project = git::new("foo", |project| {
project.file("src/lib.rs", "pub fn foo() {}")
})
.unwrap();
let library = git::new("submodule", |library| {
library.no_manifest().file("Makefile", "all:")
})
.unwrap();
let repository = git2::Repository::open(&project.root()).unwrap();
let url = path2url(library.root()).to_string();
git::add_submodule(&repository, &url, Path::new("submodule"));
t!(symlink(
&project.root().join("submodule"),
&project.root().join("submodule-link")
));
git::add(&repository);
git::commit(&repository);
let repository = git2::Repository::open(&project.root().join("submodule")).unwrap();
repository
.reset(
&repository.revparse_single("HEAD").unwrap(),
git2::ResetType::Hard,
None,
)
.unwrap();
project
.cargo("package --no-verify -v")
.with_stderr_contains("[ARCHIVING] submodule/Makefile")
.run();
}
#[cargo_test]
fn no_duplicates_from_modified_tracked_files() {
let root = paths::root().join("all");
@ -660,9 +710,19 @@ See [..]
}
#[cargo_test]
#[cfg(unix)]
/// Tests if a broken symlink is properly handled when packaging.
///
/// This test requires you to be able to make symlinks.
/// For windows, this may require you to enable developer mode.
fn broken_symlink() {
use std::os::unix::fs;
#[cfg(unix)]
use std::os::unix::fs::symlink;
#[cfg(windows)]
use std::os::windows::fs::symlink_dir as symlink;
if !symlink_supported() {
return;
}
let p = project()
.file(
@ -681,7 +741,7 @@ fn broken_symlink() {
)
.file("src/main.rs", r#"fn main() { println!("hello"); }"#)
.build();
t!(fs::symlink("nowhere", &p.root().join("src/foo.rs")));
t!(symlink("nowhere", &p.root().join("src/foo.rs")));
p.cargo("package -v")
.with_status(101)
@ -699,6 +759,26 @@ Caused by:
.run();
}
#[cargo_test]
/// Tests if a symlink to a directory is proberly included.
///
/// This test requires you to be able to make symlinks.
/// For windows, this may require you to enable developer mode.
fn package_symlink_to_dir() {
if !symlink_supported() {
return;
}
project()
.file("src/main.rs", r#"fn main() { println!("hello"); }"#)
.file("bla/Makefile", "all:")
.symlink_dir("bla", "foo")
.build()
.cargo("package -v")
.with_stderr_contains("[ARCHIVING] foo/Makefile")
.run();
}
#[cargo_test]
fn do_not_package_if_repository_is_dirty() {
let p = project().build();

View File

@ -321,17 +321,17 @@ fn profile_override_hierarchy() {
p.cargo("build -v").masquerade_as_nightly_cargo().with_stderr_unordered("\
[COMPILING] m3 [..]
[COMPILING] dep [..]
[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=4 [..]
[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=3 [..]
[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=1 [..]
[RUNNING] `rustc --crate-name build_script_build m1/build.rs --color never --crate-type bin --emit=[..]link -C codegen-units=4 [..]
[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=4 [..]
[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=3 [..]
[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=1 [..]
[RUNNING] `rustc --crate-name build_script_build m1/build.rs [..] --crate-type bin --emit=[..]link -C codegen-units=4 [..]
[COMPILING] m2 [..]
[RUNNING] `rustc --crate-name build_script_build m2/build.rs --color never --crate-type bin --emit=[..]link -C codegen-units=2 [..]
[RUNNING] `rustc --crate-name build_script_build m2/build.rs [..] --crate-type bin --emit=[..]link -C codegen-units=2 [..]
[RUNNING] `[..]/m1-[..]/build-script-build`
[RUNNING] `[..]/m2-[..]/build-script-build`
[RUNNING] `rustc --crate-name m2 m2/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=2 [..]
[RUNNING] `rustc --crate-name m2 m2/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=2 [..]
[COMPILING] m1 [..]
[RUNNING] `rustc --crate-name m1 m1/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=1 [..]
[RUNNING] `rustc --crate-name m1 m1/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=1 [..]
[FINISHED] dev [unoptimized + debuginfo] [..]
",
)

View File

@ -1395,6 +1395,55 @@ fn use_semver() {
p.cargo("build").run();
}
#[cargo_test]
fn use_semver_package_incorrectly() {
let p = project()
.file(
"Cargo.toml",
r#"
[workspace]
members = ["a", "b"]
"#,
)
.file(
"a/Cargo.toml",
r#"
[project]
name = "a"
version = "0.1.1-alpha.0"
authors = []
"#,
)
.file(
"b/Cargo.toml",
r#"
[project]
name = "b"
version = "0.1.0"
authors = []
[dependencies]
a = { version = "^0.1", path = "../a" }
"#,
)
.file("a/src/main.rs", "fn main() {}")
.file("b/src/main.rs", "fn main() {}")
.build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
error: no matching package named `a` found
location searched: [..]
prerelease package needs to be specified explicitly
a = { version = \"0.1.1-alpha.0\" }
required by package `b v0.1.0 ([..])`
",
)
.run();
}
#[cargo_test]
fn only_download_relevant() {
let p = project()

View File

@ -4,6 +4,11 @@ use std::env;
#[cargo_test]
fn rustc_info_cache() {
// FIXME: when pipelining rides to stable, enable this test on all channels.
if !crate::support::is_nightly() {
return;
}
let p = project()
.file("src/main.rs", r#"fn main() { println!("hello"); }"#)
.build();

View File

@ -10,6 +10,7 @@ fn rustdoc_simple() {
[DOCUMENTING] foo v0.0.1 ([CWD])
[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\
-o [CWD]/target/doc \
[..] \
-L dependency=[CWD]/target/debug/deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
@ -27,6 +28,7 @@ fn rustdoc_args() {
[DOCUMENTING] foo v0.0.1 ([CWD])
[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\
-o [CWD]/target/doc \
[..] \
--cfg=foo \
-L dependency=[CWD]/target/debug/deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
@ -66,6 +68,7 @@ fn rustdoc_foo_with_bar_dependency() {
[DOCUMENTING] foo v0.0.1 ([CWD])
[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\
-o [CWD]/target/doc \
[..] \
--cfg=foo \
-L dependency=[CWD]/target/debug/deps \
--extern [..]`
@ -104,6 +107,7 @@ fn rustdoc_only_bar_dependency() {
[DOCUMENTING] bar v0.0.1 ([..])
[RUNNING] `rustdoc --crate-name bar [..]bar/src/lib.rs [..]\
-o [CWD]/target/doc \
[..] \
--cfg=foo \
-L dependency=[CWD]/target/debug/deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
@ -125,6 +129,7 @@ fn rustdoc_same_name_documents_lib() {
[DOCUMENTING] foo v0.0.1 ([..])
[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\
-o [CWD]/target/doc \
[..] \
--cfg=foo \
-L dependency=[CWD]/target/debug/deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
@ -168,6 +173,7 @@ fn rustdoc_target() {
[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\
--target x86_64-unknown-linux-gnu \
-o [CWD]/target/x86_64-unknown-linux-gnu/doc \
[..] \
-L dependency=[CWD]/target/x86_64-unknown-linux-gnu/debug/deps \
-L dependency=[CWD]/target/debug/deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",

View File

@ -1,8 +1,10 @@
use std::fs::{self, File};
use std::io::Write;
use crate::support::rustc_host;
use crate::support::{basic_lib_manifest, basic_manifest, paths, project, project_in_home};
use crate::support::registry::Package;
use crate::support::{
basic_lib_manifest, basic_manifest, paths, project, project_in_home, rustc_host,
};
#[cargo_test]
fn env_rustflags_normal_source() {
@ -1393,3 +1395,41 @@ fn remap_path_prefix_ignored() {
.run();
check_metadata_same();
}
#[cargo_test]
fn remap_path_prefix_works() {
// Check that remap-path-prefix works.
Package::new("bar", "0.1.0")
.file("src/lib.rs", "pub fn f() -> &'static str { file!() }")
.publish();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
bar = "0.1"
"#,
)
.file(
"src/main.rs",
r#"
fn main() {
println!("{}", bar::f());
}
"#,
)
.build();
p.cargo("run")
.env(
"RUSTFLAGS",
format!("--remap-path-prefix={}=/foo", paths::root().display()),
)
.with_stdout("/foo/home/.cargo/registry/src/[..]/bar-0.1.0/src/lib.rs")
.run();
}

View File

@ -98,9 +98,6 @@ fn use_git_gc() {
}
#[cargo_test]
// it looks like this test passes on some windows machines but not others,
// notably not on AppVeyor's machines. Sounds like another but for another day.
#[cfg_attr(windows, ignore)]
fn avoid_using_git() {
let path = env::var_os("PATH").unwrap_or_default();
let mut paths = env::split_paths(&path).collect::<Vec<_>>();

Some files were not shown because too many files have changed in this diff Show More