From e26ef01743eedecd7e011a7ef8b2235fb001e3ac Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Sat, 27 Jul 2019 16:21:24 -0700 Subject: [PATCH 01/29] Refactor resolve `Method` --- crates/resolver-tests/src/lib.rs | 6 +- src/cargo/core/compiler/layout.rs | 2 +- src/cargo/core/resolver/context.rs | 30 ++-- src/cargo/core/resolver/dep_cache.rs | 75 ++++------ src/cargo/core/resolver/encode.rs | 9 ++ src/cargo/core/resolver/mod.rs | 24 ++-- src/cargo/core/resolver/resolve.rs | 20 ++- src/cargo/core/resolver/types.rs | 50 +++++-- src/cargo/core/workspace.rs | 4 +- src/cargo/ops/cargo_compile.rs | 14 +- src/cargo/ops/cargo_doc.rs | 11 +- src/cargo/ops/cargo_generate_lockfile.rs | 17 ++- src/cargo/ops/cargo_install.rs | 6 +- src/cargo/ops/cargo_output_metadata.rs | 12 +- src/cargo/ops/cargo_package.rs | 7 +- src/cargo/ops/mod.rs | 3 +- src/cargo/ops/resolve.rs | 170 ++++++++++++----------- 17 files changed, 248 insertions(+), 212 deletions(-) diff --git a/crates/resolver-tests/src/lib.rs b/crates/resolver-tests/src/lib.rs index 6d349a69b..dce94689e 100644 --- a/crates/resolver-tests/src/lib.rs +++ b/crates/resolver-tests/src/lib.rs @@ -8,7 +8,7 @@ use std::rc::Rc; use std::time::Instant; use cargo::core::dependency::Kind; -use cargo::core::resolver::{self, Method}; +use cargo::core::resolver::{self, ResolveOpts}; use cargo::core::source::{GitReference, SourceId}; use cargo::core::Resolve; use cargo::core::{Dependency, PackageId, Registry, Summary}; @@ -175,10 +175,10 @@ pub fn resolve_with_config_raw( false, ) .unwrap(); - let method = Method::Everything; + let opts = ResolveOpts::everything(); let start = Instant::now(); let resolve = resolver::resolve( - &[(summary, method)], + &[(summary, opts)], &[], &mut registry, &HashSet::new(), diff --git a/src/cargo/core/compiler/layout.rs b/src/cargo/core/compiler/layout.rs index cc48a41ad..40140e8ce 100644 --- a/src/cargo/core/compiler/layout.rs +++ b/src/cargo/core/compiler/layout.rs @@ -3,7 +3,7 @@ //! The directory layout is a little tricky at times, hence a separate file to //! house this logic. The current layout looks like this: //! -//! ```ignore +//! ```text //! # This is the root directory for all output, the top-level package //! # places all of its output here. //! target/ diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 58a86342d..27b9a0585 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -13,7 +13,7 @@ use crate::util::CargoResult; use crate::util::Graph; use super::dep_cache::RegistryQueryer; -use super::types::{ConflictMap, FeaturesSet, Method}; +use super::types::{ConflictMap, FeaturesSet, ResolveOpts}; pub use super::encode::Metadata; pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; @@ -103,11 +103,11 @@ impl Context { /// cased `summary` to get activated. This may not be present for the root /// crate, for example. /// - /// Returns `true` if this summary with the given method is already activated. + /// Returns `true` if this summary with the given features is already activated. pub fn flag_activated( &mut self, summary: &Summary, - method: &Method, + opts: &ResolveOpts, parent: Option<(&Summary, &Dependency)>, ) -> CargoResult { let id = summary.package_id(); @@ -158,25 +158,21 @@ impl Context { } } debug!("checking if {} is already activated", summary.package_id()); - let (features, use_default) = match method { - Method::Everything - | Method::Required { - all_features: true, .. - } => return Ok(false), - Method::Required { - features, - uses_default_features, - .. - } => (features, uses_default_features), - }; + if opts.all_features { + return Ok(false); + } let has_default_feature = summary.features().contains_key("default"); Ok(match self.resolve_features.get(&id) { Some(prev) => { - features.is_subset(prev) - && (!use_default || prev.contains("default") || !has_default_feature) + opts.features.is_subset(prev) + && (!opts.uses_default_features + || prev.contains("default") + || !has_default_feature) + } + None => { + opts.features.is_empty() && (!opts.uses_default_features || !has_default_feature) } - None => features.is_empty() && (!use_default || !has_default_feature), }) } diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs index 58b52a617..e20a78a66 100644 --- a/src/cargo/core/resolver/dep_cache.rs +++ b/src/cargo/core/resolver/dep_cache.rs @@ -20,7 +20,7 @@ use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, use crate::util::errors::CargoResult; use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesSet}; -use crate::core::resolver::{ActivateResult, Method}; +use crate::core::resolver::{ActivateResult, ResolveOpts}; pub struct RegistryQueryer<'a> { pub registry: &'a mut (dyn Registry + 'a), @@ -34,7 +34,7 @@ pub struct RegistryQueryer<'a> { registry_cache: HashMap>>, /// a cache of `Dependency`s that are required for a `Summary` summary_cache: HashMap< - (Option, Summary, Method), + (Option, Summary, ResolveOpts), Rc<(HashSet, Rc>)>, >, /// all the cases we ended up using a supplied replacement @@ -192,20 +192,20 @@ impl<'a> RegistryQueryer<'a> { } /// Find out what dependencies will be added by activating `candidate`, - /// with features described in `method`. Then look up in the `registry` + /// with features described in `opts`. Then look up in the `registry` /// the candidates that will fulfil each of these dependencies, as it is the /// next obvious question. pub fn build_deps( &mut self, parent: Option, candidate: &Summary, - method: &Method, + opts: &ResolveOpts, ) -> ActivateResult, Rc>)>> { // if we have calculated a result before, then we can just return it, // as it is a "pure" query of its arguments. if let Some(out) = self .summary_cache - .get(&(parent, candidate.clone(), method.clone())) + .get(&(parent, candidate.clone(), opts.clone())) .cloned() { return Ok(out); @@ -213,7 +213,7 @@ impl<'a> RegistryQueryer<'a> { // First, figure out our set of dependencies based on the requested set // of features. This also calculates what features we're going to enable // for our own dependencies. - let (used_features, deps) = resolve_features(parent, candidate, method)?; + let (used_features, deps) = resolve_features(parent, candidate, opts)?; // Next, transform all dependencies into a list of possible candidates // which can satisfy that dependency. @@ -236,7 +236,7 @@ impl<'a> RegistryQueryer<'a> { // If we succeed we add the result to the cache so we can use it again next time. // We dont cache the failure cases as they dont impl Clone. self.summary_cache - .insert((parent, candidate.clone(), method.clone()), out.clone()); + .insert((parent, candidate.clone(), opts.clone()), out.clone()); Ok(out) } @@ -247,18 +247,13 @@ impl<'a> RegistryQueryer<'a> { pub fn resolve_features<'b>( parent: Option, s: &'b Summary, - method: &'b Method, + opts: &'b ResolveOpts, ) -> ActivateResult<(HashSet, Vec<(Dependency, FeaturesSet)>)> { - let dev_deps = match *method { - Method::Everything => true, - Method::Required { dev_deps, .. } => dev_deps, - }; - // First, filter by dev-dependencies. let deps = s.dependencies(); - let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); + let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps); - let reqs = build_requirements(s, method)?; + let reqs = build_requirements(s, opts)?; let mut ret = Vec::new(); let mut used_features = HashSet::new(); let default_dep = (false, BTreeSet::new()); @@ -336,52 +331,34 @@ pub fn resolve_features<'b>( Ok((reqs.into_used(), ret)) } -/// Takes requested features for a single package from the input `Method` and +/// Takes requested features for a single package from the input `ResolveOpts` and /// recurses to find all requested features, dependencies and requested /// dependency features in a `Requirements` object, returning it to the resolver. fn build_requirements<'a, 'b: 'a>( s: &'a Summary, - method: &'b Method, + opts: &'b ResolveOpts, ) -> CargoResult> { let mut reqs = Requirements::new(s); - match method { - Method::Everything - | Method::Required { - all_features: true, .. - } => { - for key in s.features().keys() { - reqs.require_feature(*key)?; - } - for dep in s.dependencies().iter().filter(|d| d.is_optional()) { - reqs.require_dependency(dep.name_in_toml()); - } + if opts.all_features { + for key in s.features().keys() { + reqs.require_feature(*key)?; } - Method::Required { - all_features: false, - features: requested, - .. - } => { - for &f in requested.iter() { - reqs.require_value(&FeatureValue::new(f, s))?; - } + for dep in s.dependencies().iter().filter(|d| d.is_optional()) { + reqs.require_dependency(dep.name_in_toml()); + } + } else { + for &f in opts.features.iter() { + reqs.require_value(&FeatureValue::new(f, s))?; } } - match *method { - Method::Everything - | Method::Required { - uses_default_features: true, - .. - } => { - if s.features().contains_key("default") { - reqs.require_feature(InternedString::new("default"))?; - } + + if opts.uses_default_features { + if s.features().contains_key("default") { + reqs.require_feature(InternedString::new("default"))?; } - Method::Required { - uses_default_features: false, - .. - } => {} } + Ok(reqs) } diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index d605b5e0c..b60b3a207 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -105,6 +105,7 @@ use crate::util::{internal, Graph}; use super::{Resolve, ResolveVersion}; +/// The `Cargo.lock` structure. #[derive(Serialize, Deserialize, Debug)] pub struct EncodableResolve { package: Option>, @@ -123,6 +124,14 @@ struct Patch { pub type Metadata = BTreeMap; impl EncodableResolve { + /// Convert a `Cargo.lock` to a Resolve. + /// + /// Note that this `Resolve` is not "complete". For example, the + /// dependencies do not know the difference between regular/dev/build + /// dependencies, so they are not filled in. It also does not include + /// `features`. Care should be taken when using this Resolve. One of the + /// primary uses is to be used with `resolve_with_previous` to guide the + /// resolver to create a complete Resolve. pub fn into_resolve(self, ws: &Workspace<'_>) -> CargoResult { let path_deps = build_path_deps(ws); let mut checksums = HashMap::new(); diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index b7fa8b213..4aaa7eeaf 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -69,7 +69,7 @@ pub use self::encode::Metadata; pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; pub use self::errors::{ActivateError, ActivateResult, ResolveError}; pub use self::resolve::{Resolve, ResolveVersion}; -pub use self::types::Method; +pub use self::types::ResolveOpts; mod conflict_cache; mod context; @@ -120,7 +120,7 @@ mod types; /// When we have a decision for how to implement is without breaking existing functionality /// this flag can be removed. pub fn resolve( - summaries: &[(Summary, Method)], + summaries: &[(Summary, ResolveOpts)], replacements: &[(PackageIdSpec, Dependency)], registry: &mut dyn Registry, try_to_use: &HashSet, @@ -169,7 +169,7 @@ pub fn resolve( fn activate_deps_loop( mut cx: Context, registry: &mut RegistryQueryer<'_>, - summaries: &[(Summary, Method)], + summaries: &[(Summary, ResolveOpts)], config: Option<&Config>, ) -> CargoResult { let mut backtrack_stack = Vec::new(); @@ -180,9 +180,9 @@ fn activate_deps_loop( let mut past_conflicting_activations = conflict_cache::ConflictCache::new(); // Activate all the initial summaries to kick off some work. - for &(ref summary, ref method) in summaries { + for &(ref summary, ref opts) in summaries { debug!("initial activation: {}", summary.package_id()); - let res = activate(&mut cx, registry, None, summary.clone(), method.clone()); + let res = activate(&mut cx, registry, None, summary.clone(), opts.clone()); match res { Ok(Some((frame, _))) => remaining_deps.push(frame), Ok(None) => (), @@ -366,7 +366,7 @@ fn activate_deps_loop( }; let pid = candidate.package_id(); - let method = Method::Required { + let opts = ResolveOpts { dev_deps: false, features: Rc::clone(&features), all_features: false, @@ -379,7 +379,7 @@ fn activate_deps_loop( dep.package_name(), candidate.version() ); - let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, method); + let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, opts); let successfully_activated = match res { // Success! We've now activated our `candidate` in our context @@ -583,7 +583,7 @@ fn activate_deps_loop( /// Attempts to activate the summary `candidate` in the context `cx`. /// /// This function will pull dependency summaries from the registry provided, and -/// the dependencies of the package will be determined by the `method` provided. +/// the dependencies of the package will be determined by the `opts` provided. /// If `candidate` was activated, this function returns the dependency frame to /// iterate through next. fn activate( @@ -591,7 +591,7 @@ fn activate( registry: &mut RegistryQueryer<'_>, parent: Option<(&Summary, &Dependency)>, candidate: Summary, - method: Method, + opts: ResolveOpts, ) -> ActivateResult> { let candidate_pid = candidate.package_id(); if let Some((parent, dep)) = parent { @@ -652,7 +652,7 @@ fn activate( } } - let activated = cx.flag_activated(&candidate, &method, parent)?; + let activated = cx.flag_activated(&candidate, &opts, parent)?; let candidate = match registry.replacement_summary(candidate_pid) { Some(replace) => { @@ -661,7 +661,7 @@ fn activate( // does. TBH it basically cause panics in the test suite if // `parent` is passed through here and `[replace]` is otherwise // on life support so it's not critical to fix bugs anyway per se. - if cx.flag_activated(replace, &method, None)? && activated { + if cx.flag_activated(replace, &opts, None)? && activated { return Ok(None); } trace!( @@ -682,7 +682,7 @@ fn activate( let now = Instant::now(); let (used_features, deps) = - &*registry.build_deps(parent.map(|p| p.0.package_id()), &candidate, &method)?; + &*registry.build_deps(parent.map(|p| p.0.package_id()), &candidate, &opts)?; // Record what list of features is active for this package. if !used_features.is_empty() { diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs index c1353e629..17eccb5a9 100644 --- a/src/cargo/core/resolver/resolve.rs +++ b/src/cargo/core/resolver/resolve.rs @@ -23,15 +23,33 @@ pub struct Resolve { /// from `Cargo.toml`. We need a `Vec` here because the same package /// might be present in both `[dependencies]` and `[build-dependencies]`. graph: Graph>, + /// Replacements from the `[replace]` table. replacements: HashMap, + /// Inverted version of `replacements`. reverse_replacements: HashMap, + /// An empty `HashSet` to avoid creating a new `HashSet` for every package + /// that does not have any features, and to avoid using `Option` to + /// simplify the API. empty_features: HashSet, + /// Features enabled for a given package. features: HashMap>, + /// Checksum for each package. A SHA256 hash of the `.crate` file used to + /// validate the correct crate file is used. This is `None` for sources + /// that do not use `.crate` files, like path or git dependencies. checksums: HashMap>, + /// "Unknown" metadata. This is a collection of extra, unrecognized data + /// found in the `[metadata]` section of `Cargo.lock`, preserved for + /// forwards compatibility. metadata: Metadata, + /// `[patch]` entries that did not match anything, preserved in + /// `Cargo.lock` as the `[[patch.unused]]` table array. + /// TODO: *Why* is this kept in `Cargo.lock`? Removing it doesn't seem to + /// affect anything. unused_patches: Vec, - // A map from packages to a set of their public dependencies + /// A map from packages to a set of their public dependencies public_dependencies: HashMap>, + /// Version of the `Cargo.lock` format, see + /// `cargo::core::resolver::encode` for more. version: ResolveVersion, } diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs index 0797403ee..881869ef1 100644 --- a/src/cargo/core/resolver/types.rs +++ b/src/cargo/core/resolver/types.rs @@ -99,19 +99,47 @@ impl ResolverProgress { /// optimized comparison operators like `is_subset` at the interfaces. pub type FeaturesSet = Rc>; -#[derive(Clone, Eq, PartialEq, Hash)] -pub enum Method { - Everything, // equivalent to Required { dev_deps: true, all_features: true, .. } - Required { - dev_deps: bool, - features: FeaturesSet, - all_features: bool, - uses_default_features: bool, - }, +/// Options for how the resolve should work. +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub struct ResolveOpts { + /// Whether or not dev-dependencies should be included. + /// + /// This may be set to `false` by things like `cargo install` or `-Z avoid-dev-deps`. + pub dev_deps: bool, + /// Set of features to enable (`--features=…`). + pub features: FeaturesSet, + /// Indicates *all* features should be enabled (`--all-features`). + pub all_features: bool, + /// Include the `default` feature (`--no-default-features` sets this false). + pub uses_default_features: bool, } -impl Method { - pub fn split_features(features: &[String]) -> BTreeSet { +impl ResolveOpts { + /// Creates a ResolveOpts that resolves everything. + pub fn everything() -> ResolveOpts { + ResolveOpts { + dev_deps: true, + features: Rc::new(BTreeSet::new()), + all_features: true, + uses_default_features: true, + } + } + + pub fn new( + dev_deps: bool, + features: &[String], + all_features: bool, + uses_default_features: bool, + ) -> ResolveOpts { + ResolveOpts { + dev_deps, + features: Rc::new(ResolveOpts::split_features(features)), + all_features, + uses_default_features, + } + } + + fn split_features(features: &[String]) -> BTreeSet { features .iter() .flat_map(|s| s.split_whitespace()) diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs index cfbc84c55..c36f10d33 100644 --- a/src/cargo/core/workspace.rs +++ b/src/cargo/core/workspace.rs @@ -283,7 +283,7 @@ impl<'cfg> Workspace<'cfg> { .unwrap_or_else(|| Filesystem::new(self.root().join("target"))) } - /// Returns the root [replace] section of this workspace. + /// Returns the root `[replace]` section of this workspace. /// /// This may be from a virtual crate or an actual crate. pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] { @@ -293,7 +293,7 @@ impl<'cfg> Workspace<'cfg> { } } - /// Returns the root [patch] section of this workspace. + /// Returns the root `[patch]` section of this workspace. /// /// This may be from a virtual crate or an actual crate. pub fn root_patch(&self) -> &HashMap> { diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 826c3472e..0601f1cc1 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -23,14 +23,13 @@ use std::collections::{BTreeSet, HashMap, HashSet}; use std::iter::FromIterator; use std::path::PathBuf; -use std::rc::Rc; use std::sync::Arc; use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; use crate::core::compiler::{CompileMode, Kind, Unit}; use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; use crate::core::profiles::{Profiles, UnitFor}; -use crate::core::resolver::{Method, Resolve}; +use crate::core::resolver::{Resolve, ResolveOpts}; use crate::core::{Package, Target}; use crate::core::{PackageId, PackageIdSpec, TargetKind, Workspace}; use crate::ops; @@ -297,14 +296,9 @@ pub fn compile_ws<'a>( }; let specs = spec.to_package_id_specs(ws)?; - let features = Method::split_features(features); - let method = Method::Required { - dev_deps: ws.require_optional_deps() || filter.need_dev_deps(build_config.mode), - features: Rc::new(features), - all_features, - uses_default_features: !no_default_features, - }; - let resolve = ops::resolve_ws_with_method(ws, method, &specs)?; + let dev_deps = ws.require_optional_deps() || filter.need_dev_deps(build_config.mode); + let opts = ResolveOpts::new(dev_deps, features, all_features, !no_default_features); + let resolve = ops::resolve_ws_with_opts(ws, opts, &specs)?; let (packages, resolve_with_overrides) = resolve; let to_build_ids = specs diff --git a/src/cargo/ops/cargo_doc.rs b/src/cargo/ops/cargo_doc.rs index bb77acf9a..fb9aa7ec4 100644 --- a/src/cargo/ops/cargo_doc.rs +++ b/src/cargo/ops/cargo_doc.rs @@ -5,6 +5,7 @@ use std::path::Path; use failure::Fail; use opener; +use crate::core::resolver::ResolveOpts; use crate::core::Workspace; use crate::ops; use crate::util::CargoResult; @@ -21,13 +22,13 @@ pub struct DocOptions<'a> { /// Main method for `cargo doc`. pub fn doc(ws: &Workspace<'_>, options: &DocOptions<'_>) -> CargoResult<()> { let specs = options.compile_opts.spec.to_package_id_specs(ws)?; - let resolve = ops::resolve_ws_precisely( - ws, + let opts = ResolveOpts::new( + /*dev_deps*/ true, &options.compile_opts.features, options.compile_opts.all_features, - options.compile_opts.no_default_features, - &specs, - )?; + !options.compile_opts.no_default_features, + ); + let resolve = ops::resolve_ws_with_opts(ws, opts, &specs)?; let (packages, resolve_with_overrides) = resolve; let ids = specs diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index 1055e52f7..45c0aa688 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -4,7 +4,7 @@ use log::debug; use termcolor::Color::{self, Cyan, Green, Red}; use crate::core::registry::PackageRegistry; -use crate::core::resolver::Method; +use crate::core::resolver::ResolveOpts; use crate::core::PackageId; use crate::core::{Resolve, SourceId, Workspace}; use crate::ops; @@ -21,8 +21,15 @@ pub struct UpdateOptions<'a> { pub fn generate_lockfile(ws: &Workspace<'_>) -> CargoResult<()> { let mut registry = PackageRegistry::new(ws.config())?; - let resolve = - ops::resolve_with_previous(&mut registry, ws, Method::Everything, None, None, &[], true)?; + let resolve = ops::resolve_with_previous( + &mut registry, + ws, + ResolveOpts::everything(), + None, + None, + &[], + true, + )?; ops::write_pkg_lockfile(ws, &resolve)?; Ok(()) } @@ -57,7 +64,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes ops::resolve_with_previous( &mut registry, ws, - Method::Everything, + ResolveOpts::everything(), None, None, &[], @@ -103,7 +110,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes let resolve = ops::resolve_with_previous( &mut registry, ws, - Method::Everything, + ResolveOpts::everything(), Some(&previous_resolve), Some(&to_avoid), &[], diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index 973df18b3..04f9befe3 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -8,7 +8,7 @@ use tempfile::Builder as TempFileBuilder; use crate::core::compiler::Freshness; use crate::core::compiler::{DefaultExecutor, Executor}; -use crate::core::resolver::Method; +use crate::core::resolver::ResolveOpts; use crate::core::{Edition, PackageId, PackageIdSpec, Source, SourceId, Workspace}; use crate::ops; use crate::ops::common_for_install_and_uninstall::*; @@ -486,10 +486,10 @@ fn check_yanked_install(ws: &Workspace<'_>) -> CargoResult<()> { // It would be best if `source` could be passed in here to avoid a // duplicate "Updating", but since `source` is taken by value, then it // wouldn't be available for `compile_ws`. - let (pkg_set, resolve) = ops::resolve_ws_with_method(ws, Method::Everything, &specs)?; + let (pkg_set, resolve) = ops::resolve_ws_with_opts(ws, ResolveOpts::everything(), &specs)?; let mut sources = pkg_set.sources_mut(); - // Checking the yanked status invovles taking a look at the registry and + // Checking the yanked status involves taking a look at the registry and // maybe updating files, so be sure to lock it here. let _lock = ws.config().acquire_package_cache_lock()?; diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs index 2cd93e46e..f782414ca 100644 --- a/src/cargo/ops/cargo_output_metadata.rs +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -4,7 +4,7 @@ use std::path::PathBuf; use serde::ser; use serde::Serialize; -use crate::core::resolver::Resolve; +use crate::core::resolver::{Resolve, ResolveOpts}; use crate::core::{Package, PackageId, Workspace}; use crate::ops::{self, Packages}; use crate::util::CargoResult; @@ -50,13 +50,13 @@ fn metadata_no_deps(ws: &Workspace<'_>, _opt: &OutputMetadataOptions) -> CargoRe fn metadata_full(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> CargoResult { let specs = Packages::All.to_package_id_specs(ws)?; - let (package_set, resolve) = ops::resolve_ws_precisely( - ws, + let opts = ResolveOpts::new( + /*dev_deps*/ true, &opt.features, opt.all_features, - opt.no_default_features, - &specs, - )?; + !opt.no_default_features, + ); + let (package_set, resolve) = ops::resolve_ws_with_opts(ws, opts, &specs)?; let mut packages = HashMap::new(); for pkg in package_set.get_many(package_set.package_ids())? { packages.insert(pkg.package_id(), pkg.clone()); diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index 2857ab3f1..f24332db8 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -14,7 +14,7 @@ use tar::{Archive, Builder, EntryType, Header}; use termcolor::Color; use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor}; -use crate::core::resolver::Method; +use crate::core::resolver::ResolveOpts; use crate::core::Feature; use crate::core::{ Package, PackageId, PackageIdSpec, PackageSet, Resolve, Source, SourceId, Verbosity, Workspace, @@ -152,7 +152,8 @@ fn build_lock(ws: &Workspace<'_>) -> CargoResult { // Regenerate Cargo.lock using the old one as a guide. let specs = vec![PackageIdSpec::from_package_id(new_pkg.package_id())]; let tmp_ws = Workspace::ephemeral(new_pkg, ws.config(), None, true)?; - let (pkg_set, new_resolve) = ops::resolve_ws_with_method(&tmp_ws, Method::Everything, &specs)?; + let (pkg_set, new_resolve) = + ops::resolve_ws_with_opts(&tmp_ws, ResolveOpts::everything(), &specs)?; if let Some(orig_resolve) = orig_resolve { compare_resolve(config, tmp_ws.current()?, &orig_resolve, &new_resolve)?; @@ -558,7 +559,7 @@ fn compare_resolve( } fn check_yanked(config: &Config, pkg_set: &PackageSet<'_>, resolve: &Resolve) -> CargoResult<()> { - // Checking the yanked status invovles taking a look at the registry and + // Checking the yanked status involves taking a look at the registry and // maybe updating files, so be sure to lock it here. let _lock = config.acquire_package_cache_lock()?; diff --git a/src/cargo/ops/mod.rs b/src/cargo/ops/mod.rs index 9e40062b2..304e4d3c3 100644 --- a/src/cargo/ops/mod.rs +++ b/src/cargo/ops/mod.rs @@ -23,8 +23,7 @@ pub use self::registry::{http_handle, needs_custom_http_transport, registry_logi pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts}; pub use self::registry::{publish, registry_configuration, RegistryConfig}; pub use self::resolve::{ - add_overrides, get_resolved_packages, resolve_with_previous, resolve_ws, resolve_ws_precisely, - resolve_ws_with_method, + add_overrides, get_resolved_packages, resolve_with_previous, resolve_ws, resolve_ws_with_opts, }; pub use self::vendor::{vendor, VendorOptions}; diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index b660be8b6..98e1a7087 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -1,10 +1,22 @@ +//! High-level APIs for executing the resolver. +//! +//! This module provides functions for running the resolver given a workspace. +//! There are roughly 3 main functions: +//! +//! - `resolve_ws`: A simple, high-level function with no options. +//! - `resolve_ws_with_opts`: A medium-level function with options like +//! user-provided features. This is the most appropriate function to use in +//! most cases. +//! - `resolve_with_previous`: A low-level function for running the resolver, +//! providing the most power and flexibility. + use std::collections::HashSet; use std::rc::Rc; use log::{debug, trace}; use crate::core::registry::PackageRegistry; -use crate::core::resolver::{self, Method, Resolve}; +use crate::core::resolver::{self, Resolve, ResolveOpts}; use crate::core::Feature; use crate::core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace}; use crate::ops; @@ -21,8 +33,12 @@ version. This may also occur with an optional dependency that is not enabled."; /// Resolves all dependencies for the workspace using the previous /// lock file as a guide if present. /// -/// This function will also write the result of resolution as a new -/// lock file. +/// This function will also write the result of resolution as a new lock file +/// (unless it is an ephemeral workspace such as `cargo install` or `cargo +/// package`). +/// +/// This is a simple interface used by commands like `clean`, `fetch`, and +/// `package`, which don't specify any options or features. pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> { let mut registry = PackageRegistry::new(ws.config())?; let resolve = resolve_with_registry(ws, &mut registry)?; @@ -32,30 +48,17 @@ pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolv /// Resolves dependencies for some packages of the workspace, /// taking into account `paths` overrides and activated features. -pub fn resolve_ws_precisely<'a>( +/// +/// This function will also write the result of resolution as a new lock file +/// (unless `Workspace::require_optional_deps` is false, such as `cargo +/// install` or `-Z avoid-dev-deps`), or it is an ephemeral workspace (`cargo +/// install` or `cargo package`). +/// +/// `specs` may be empty, which indicates it should resolve all workspace +/// members. In this case, `opts.all_features` must be `true`. +pub fn resolve_ws_with_opts<'a>( ws: &Workspace<'a>, - features: &[String], - all_features: bool, - no_default_features: bool, - specs: &[PackageIdSpec], -) -> CargoResult<(PackageSet<'a>, Resolve)> { - let features = Method::split_features(features); - let method = if all_features { - Method::Everything - } else { - Method::Required { - dev_deps: true, - features: Rc::new(features), - all_features: false, - uses_default_features: !no_default_features, - } - }; - resolve_ws_with_method(ws, method, specs) -} - -pub fn resolve_ws_with_method<'a>( - ws: &Workspace<'a>, - method: Method, + opts: ResolveOpts, specs: &[PackageIdSpec], ) -> CargoResult<(PackageSet<'a>, Resolve)> { let mut registry = PackageRegistry::new(ws.config())?; @@ -67,6 +70,7 @@ pub fn resolve_ws_with_method<'a>( // First, resolve the root_package's *listed* dependencies, as well as // downloading and updating all remotes and such. let resolve = resolve_with_registry(ws, &mut registry)?; + // No need to add patches again, `resolve_with_registry` has done it. add_patches = false; // Second, resolve with precisely what we're doing. Filter out @@ -92,10 +96,10 @@ pub fn resolve_ws_with_method<'a>( ops::load_pkg_lockfile(ws)? }; - let resolved_with_overrides = ops::resolve_with_previous( + let resolved_with_overrides = resolve_with_previous( &mut registry, ws, - method, + opts, resolve.as_ref(), None, specs, @@ -115,7 +119,7 @@ fn resolve_with_registry<'cfg>( let resolve = resolve_with_previous( registry, ws, - Method::Everything, + ResolveOpts::everything(), prev.as_ref(), None, &[], @@ -137,15 +141,26 @@ fn resolve_with_registry<'cfg>( /// /// The previous resolve normally comes from a lock file. This function does not /// read or write lock files from the filesystem. +/// +/// `specs` may be empty, which indicates it should resolve all workspace +/// members. In this case, `opts.all_features` must be `true`. +/// +/// If `register_patches` is true, then entries from the `[patch]` table in +/// the manifest will be added to the given `PackageRegistry`. pub fn resolve_with_previous<'cfg>( registry: &mut PackageRegistry<'cfg>, ws: &Workspace<'cfg>, - method: Method, + opts: ResolveOpts, previous: Option<&Resolve>, to_avoid: Option<&HashSet>, specs: &[PackageIdSpec], register_patches: bool, ) -> CargoResult { + assert!( + !specs.is_empty() || opts.all_features, + "no specs requires all_features" + ); + // We only want one Cargo at a time resolving a crate graph since this can // involve a lot of frobbing of the global caches. let _lock = ws.config().acquire_package_cache_lock()?; @@ -228,85 +243,76 @@ pub fn resolve_with_previous<'cfg>( let mut summaries = Vec::new(); if ws.config().cli_unstable().package_features { let mut members = Vec::new(); - match &method { - Method::Everything => members.extend(ws.members()), - Method::Required { - features, - all_features, - uses_default_features, - .. - } => { - if specs.len() > 1 && !features.is_empty() { - failure::bail!("cannot specify features for more than one package"); - } - members.extend( - ws.members() - .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))), - ); - // Edge case: running `cargo build -p foo`, where `foo` is not a member - // of current workspace. Add all packages from workspace to get `foo` - // into the resolution graph. - if members.is_empty() { - if !(features.is_empty() && !all_features && *uses_default_features) { - failure::bail!("cannot specify features for packages outside of workspace"); - } - members.extend(ws.members()); + if specs.is_empty() { + members.extend(ws.members()); + } else { + if specs.len() > 1 && !opts.features.is_empty() { + failure::bail!("cannot specify features for more than one package"); + } + members.extend( + ws.members() + .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))), + ); + // Edge case: running `cargo build -p foo`, where `foo` is not a member + // of current workspace. Add all packages from workspace to get `foo` + // into the resolution graph. + if members.is_empty() { + if !(opts.features.is_empty() && !opts.all_features && opts.uses_default_features) { + failure::bail!("cannot specify features for packages outside of workspace"); } + members.extend(ws.members()); + panic!("tested?"); } } for member in members { let summary = registry.lock(member.summary().clone()); - summaries.push((summary, method.clone())) + summaries.push((summary, opts.clone())) } } else { for member in ws.members() { - let method_to_resolve = match method { - // When everything for a workspace we want to be sure to resolve all - // members in the workspace, so propagate the `Method::Everything`. - Method::Everything => Method::Everything, - + let summary_resolve_opts = if specs.is_empty() { + // When resolving the entire workspace, resolve each member + // with all features enabled. + opts.clone() + } else { // If we're not resolving everything though then we're constructing the // exact crate graph we're going to build. Here we don't necessarily // want to keep around all workspace crates as they may not all be // built/tested. // - // Additionally, the `method` specified represents command line + // Additionally, the `opts` specified represents command line // flags, which really only matters for the current package // (determined by the cwd). If other packages are specified (via // `-p`) then the command line flags like features don't apply to // them. // // As a result, if this `member` is the current member of the - // workspace, then we use `method` specified. Otherwise we use a - // base method with no features specified but using default features + // workspace, then we use `opts` specified. Otherwise we use a + // base `opts` with no features specified but using default features // for any other packages specified with `-p`. - Method::Required { - dev_deps, - all_features, - .. - } => { - let base = Method::Required { - dev_deps, - features: Rc::default(), - all_features, - uses_default_features: true, - }; - let member_id = member.package_id(); - match ws.current_opt() { - Some(current) if member_id == current.package_id() => method.clone(), - _ => { - if specs.iter().any(|spec| spec.matches(member_id)) { - base - } else { - continue; + let member_id = member.package_id(); + match ws.current_opt() { + Some(current) if member_id == current.package_id() => opts.clone(), + _ => { + if specs.iter().any(|spec| spec.matches(member_id)) { + // -p for a workspace member that is not the + // "current" one, don't use the local `--features`. + ResolveOpts { + dev_deps: opts.dev_deps, + features: Rc::default(), + all_features: opts.all_features, + uses_default_features: true, } + } else { + // `-p` for non-member, skip. + continue; } } } }; let summary = registry.lock(member.summary().clone()); - summaries.push((summary, method_to_resolve)); + summaries.push((summary, summary_resolve_opts)); } }; From 2599c34d19088749d2742cdd90501245adabcb56 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Sun, 28 Jul 2019 15:21:43 -0700 Subject: [PATCH 02/29] Update `cargo_compile` module doc. --- src/cargo/ops/cargo_compile.rs | 38 ++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 826c3472e..85d54b94b 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -1,24 +1,26 @@ -//! Cargo `compile` currently does the following steps. +//! The Cargo "compile" operation. //! -//! All configurations are already injected as environment variables via the -//! main cargo command. +//! This module contains the entry point for starting the compilation process +//! for commands like `build`, `test`, `doc`, `rustc`, etc. //! -//! 1. Read the manifest. -//! 2. Shell out to `cargo-resolve` with a list of dependencies and sources as -//! stdin. +//! The `compile` function will do all the work to compile a workspace. A +//! rough outline is: //! -//! a. Shell out to `--do update` and `--do list` for each source. -//! b. Resolve dependencies and return a list of name/version/source. -//! -//! 3. Shell out to `--do download` for each source. -//! 4. Shell out to `--do get` for each source, and build up the list of paths -//! to pass to `rustc -L`. -//! 5. Call `cargo-rustc` with the results of the resolver zipped together with -//! the results of the `get`. -//! -//! a. Topologically sort the dependencies. -//! b. Compile each dependency in order, passing in the -L's pointing at each -//! previously compiled dependency. +//! - Resolve the dependency graph (see `ops::resolve`). +//! - Download any packages needed (see `PackageSet`). +//! - Generate a list of top-level "units" of work for the targets the user +//! requested on the command-line. Each `Unit` corresponds to a compiler +//! invocation. This is done in this module (`generate_targets`). +//! - Create a `Context` which will perform the following steps: +//! - Build the graph of `Unit` dependencies (see +//! `core::compiler::context::unit_dependencies`). +//! - Prepare the `target` directory (see `Layout`). +//! - Create a job queue (see `JobQueue`). The queue checks the +//! fingerprint of each `Unit` to determine if it should run or be +//! skipped. +//! - Execute the queue. Each leaf in the queue's dependency graph is +//! executed, and then removed from the graph when finished. This +//! repeats until the queue is empty. use std::collections::{BTreeSet, HashMap, HashSet}; use std::iter::FromIterator; From abf2bb499ee9c38d8e1c8fac086087b1608b39ea Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Mon, 29 Jul 2019 10:30:53 -0700 Subject: [PATCH 03/29] Further explain unused_patches. --- src/cargo/core/resolver/resolve.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs index 17eccb5a9..9ced48f4d 100644 --- a/src/cargo/core/resolver/resolve.rs +++ b/src/cargo/core/resolver/resolve.rs @@ -42,9 +42,10 @@ pub struct Resolve { /// forwards compatibility. metadata: Metadata, /// `[patch]` entries that did not match anything, preserved in - /// `Cargo.lock` as the `[[patch.unused]]` table array. - /// TODO: *Why* is this kept in `Cargo.lock`? Removing it doesn't seem to - /// affect anything. + /// `Cargo.lock` as the `[[patch.unused]]` table array. Tracking unused + /// patches helps prevent Cargo from being forced to re-update the + /// registry every time it runs, and keeps the resolve in a locked state + /// so it doesn't re-resolve the unused entries. unused_patches: Vec, /// A map from packages to a set of their public dependencies public_dependencies: HashMap>, From 77cfceea7da0ff9a33fd54b0a1e479f9d4dfde85 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Mon, 8 Apr 2019 07:53:00 +0200 Subject: [PATCH 04/29] Add tests for symlinks to git submodules or directories. --- tests/testsuite/package.rs | 43 ++++++++++++++++++++++++++++++++++ tests/testsuite/support/mod.rs | 21 +++++++++++++++-- 2 files changed, 62 insertions(+), 2 deletions(-) diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index 92be2b2ee..71afb01f9 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -504,6 +504,37 @@ fn package_git_submodule() { .run(); } +#[cargo_test] +fn package_symlink_to_submodule() { + let project = git::new("foo", |project| { + project + .file("src/lib.rs", "pub fn foo() {}") + .symlink("submodule", "submodule-link") + }).unwrap(); + + let library = git::new("submodule", |library| { + library.no_manifest().file("Makefile", "all:") + }).unwrap(); + + let repository = git2::Repository::open(&project.root()).unwrap(); + let url = path2url(library.root()).to_string(); + git::add_submodule(&repository, &url, Path::new("submodule")); + git::commit(&repository); + + let repository = git2::Repository::open(&project.root().join("submodule")).unwrap(); + repository + .reset( + &repository.revparse_single("HEAD").unwrap(), + git2::ResetType::Hard, + None + ).unwrap(); + + project + .cargo("package --no-verify -v") + .with_stderr_contains("[ARCHIVING] submodule/Makefile") + .run(); +} + #[cargo_test] fn no_duplicates_from_modified_tracked_files() { let root = paths::root().join("all"); @@ -699,6 +730,18 @@ Caused by: .run(); } +#[cargo_test] +fn package_symlink_to_dir() { + project() + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .file("bla/Makefile", "all:") + .symlink_dir("bla", "foo") + .build() + .cargo("package -v") + .with_stderr_contains("[ARCHIVING] foo/Makefile") + .run(); +} + #[cargo_test] fn do_not_package_if_repository_is_dirty() { let p = project().build(); diff --git a/tests/testsuite/support/mod.rs b/tests/testsuite/support/mod.rs index 01abfdba5..f64ac8dda 100644 --- a/tests/testsuite/support/mod.rs +++ b/tests/testsuite/support/mod.rs @@ -178,11 +178,16 @@ impl FileBuilder { struct SymlinkBuilder { dst: PathBuf, src: PathBuf, + src_is_dir: bool, } impl SymlinkBuilder { pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { - SymlinkBuilder { dst, src } + SymlinkBuilder { dst, src, src_is_dir: false } + } + + pub fn new_dir(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { + SymlinkBuilder { dst, src, src_is_dir: true } } #[cfg(unix)] @@ -194,7 +199,11 @@ impl SymlinkBuilder { #[cfg(windows)] fn mk(&self) { self.dirname().mkdir_p(); - t!(os::windows::fs::symlink_file(&self.dst, &self.src)); + if self.src_is_dir { + t!(os::window::fs::symlink_dir(&self.dst, &self.src)); + } else { + t!(os::windows::fs::symlink_file(&self.dst, &self.src)); + } } fn dirname(&self) -> &Path { @@ -261,6 +270,14 @@ impl ProjectBuilder { self } + pub fn symlink_dir>(mut self, dst: T, src: T) -> Self { + self.symlinks.push(SymlinkBuilder::new_dir( + self.root.root().join(dst), + self.root.root().join(src), + )); + self + } + pub fn no_manifest(mut self) -> Self { self.no_manifest = true; self From 50a24ff29b6ab7d2b2e51e053c0d91f2f64bf3f7 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Wed, 3 Apr 2019 15:07:04 +0200 Subject: [PATCH 05/29] Check if symlinks are directories Fixes #2748. Uses @ehuss's suggested fix. See https://github.com/rust-lang/cargo/pull/6817#issuecomment-480538976 --- src/cargo/sources/path.rs | 14 +++++++++++--- tests/testsuite/package.rs | 8 +++++++- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index af6d458c3..db8e7d998 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -219,9 +219,17 @@ impl<'cfg> PathSource<'cfg> { // the untracked files are often part of a build and may become relevant // as part of a future commit. let index_files = index.iter().map(|entry| { - use libgit2_sys::GIT_FILEMODE_COMMIT; - let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32; - (join(root, &entry.path), Some(is_dir)) + use libgit2_sys::{GIT_FILEMODE_COMMIT, GIT_FILEMODE_LINK}; + // ``is_dir`` is an optimization to avoid calling + // ``fs::metadata`` on every file. + let is_dir = if entry.mode == GIT_FILEMODE_LINK as u32 { + // Let the code below figure out if this symbolic link points + // to a directory or not. + None + } else { + Some(entry.mode == GIT_FILEMODE_COMMIT as u32) + }; + (join(root, &entry.path), is_dir) }); let mut opts = git2::StatusOptions::new(); opts.include_untracked(true); diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index 71afb01f9..d895dca78 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -506,10 +506,14 @@ fn package_git_submodule() { #[cargo_test] fn package_symlink_to_submodule() { + #[cfg(unix)] + use std::os::unix::fs::symlink as symlink; + #[cfg(windows)] + use std::os::unix::fs::symlink_dir as symlink; + let project = git::new("foo", |project| { project .file("src/lib.rs", "pub fn foo() {}") - .symlink("submodule", "submodule-link") }).unwrap(); let library = git::new("submodule", |library| { @@ -519,6 +523,8 @@ fn package_symlink_to_submodule() { let repository = git2::Repository::open(&project.root()).unwrap(); let url = path2url(library.root()).to_string(); git::add_submodule(&repository, &url, Path::new("submodule")); + t!(symlink(&project.root().join("submodule"), &project.root().join("submodule-link"))); + git::add(&repository); git::commit(&repository); let repository = git2::Repository::open(&project.root().join("submodule")).unwrap(); From 32130f8e58c5bb84bd5ad1570b40644225c3feb5 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Thu, 11 Apr 2019 13:28:45 +0200 Subject: [PATCH 06/29] enable the broken_symlink test on Windows --- tests/testsuite/package.rs | 10 ++++++---- tests/testsuite/support/mod.rs | 5 +++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index d895dca78..19bcbdbde 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -509,7 +509,7 @@ fn package_symlink_to_submodule() { #[cfg(unix)] use std::os::unix::fs::symlink as symlink; #[cfg(windows)] - use std::os::unix::fs::symlink_dir as symlink; + use std::os::windows::fs::symlink_dir as symlink; let project = git::new("foo", |project| { project @@ -697,9 +697,11 @@ See [..] } #[cargo_test] -#[cfg(unix)] fn broken_symlink() { - use std::os::unix::fs; + #[cfg(unix)] + use std::os::unix::fs::symlink as symlink; + #[cfg(windows)] + use std::os::windows::fs::symlink_dir as symlink; let p = project() .file( @@ -718,7 +720,7 @@ fn broken_symlink() { ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); - t!(fs::symlink("nowhere", &p.root().join("src/foo.rs"))); + t!(symlink("nowhere", &p.root().join("src/foo.rs"))); p.cargo("package -v") .with_status(101) diff --git a/tests/testsuite/support/mod.rs b/tests/testsuite/support/mod.rs index f64ac8dda..cf1c3a0ec 100644 --- a/tests/testsuite/support/mod.rs +++ b/tests/testsuite/support/mod.rs @@ -200,7 +200,7 @@ impl SymlinkBuilder { fn mk(&self) { self.dirname().mkdir_p(); if self.src_is_dir { - t!(os::window::fs::symlink_dir(&self.dst, &self.src)); + t!(os::windows::fs::symlink_dir(&self.dst, &self.src)); } else { t!(os::windows::fs::symlink_file(&self.dst, &self.src)); } @@ -261,7 +261,7 @@ impl ProjectBuilder { .push(FileBuilder::new(self.root.root().join(path), body)); } - /// Adds a symlink to the project. + /// Adds a symlink to a file to the project. pub fn symlink>(mut self, dst: T, src: T) -> Self { self.symlinks.push(SymlinkBuilder::new( self.root.root().join(dst), @@ -270,6 +270,7 @@ impl ProjectBuilder { self } + /// Create a symlink to a directory pub fn symlink_dir>(mut self, dst: T, src: T) -> Self { self.symlinks.push(SymlinkBuilder::new_dir( self.root.root().join(dst), From 6195924cd720392736cc0ebe3240ccbe8e46d594 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Thu, 11 Apr 2019 14:06:19 +0200 Subject: [PATCH 07/29] handle symlinks correctly in support/paths.rs --- tests/testsuite/support/paths.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/testsuite/support/paths.rs b/tests/testsuite/support/paths.rs index 7dfb65c69..bf3c763de 100644 --- a/tests/testsuite/support/paths.rs +++ b/tests/testsuite/support/paths.rs @@ -153,7 +153,7 @@ impl CargoPathExt for Path { where F: Fn(i64, u32) -> ((i64, u32)), { - let stat = t!(path.metadata()); + let stat = t!(path.symlink_metadata()); let mtime = FileTime::from_last_modification_time(&stat); From 673bb69cf554630401b9dca5053b5bc944830ca9 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Thu, 4 Jul 2019 13:21:50 +0200 Subject: [PATCH 08/29] Ignore tests that need Administrator privileges on Windows. This patch allows you to run them when wanted with ``--ignored`` on Windows. --- ci/azure-test-all.yml | 7 +++++ tests/testsuite/build.rs | 10 +++---- tests/testsuite/package.rs | 53 ++++++++++++++++++++++++++++------ tests/testsuite/support/mod.rs | 18 ++++++++---- 4 files changed, 69 insertions(+), 19 deletions(-) diff --git a/ci/azure-test-all.yml b/ci/azure-test-all.yml index 626858431..df700161c 100644 --- a/ci/azure-test-all.yml +++ b/ci/azure-test-all.yml @@ -26,3 +26,10 @@ steps: # fix the link errors. - bash: cargo test --features 'deny-warnings curl/force-system-lib-on-osx' displayName: "cargo test" + +# Run any tests that have been marked ignore. +# +# `--include-ignored` is only supported on nightly so far, so we have to call +# this separately for now. +- bash: cargo test --features 'deny-warnings curl/force-system-lib-on-osx' -- --ignored + displayName: "cargo test -- --ignored" diff --git a/tests/testsuite/build.rs b/tests/testsuite/build.rs index 59ed08670..ce888880e 100644 --- a/tests/testsuite/build.rs +++ b/tests/testsuite/build.rs @@ -1495,12 +1495,12 @@ package `test v0.0.0 ([CWD])`", } #[cargo_test] +#[cfg_attr(windows, ignore)] +/// Make sure ignored symlinks don't break the build +/// +/// This test is marked ``ignore`` on Windows because it needs admin permissions. +/// Run it with ``--ignored``. fn ignore_broken_symlinks() { - // windows and symlinks don't currently agree that well - if cfg!(windows) { - return; - } - let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index 19bcbdbde..fa59ccf85 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -505,25 +505,39 @@ fn package_git_submodule() { } #[cargo_test] +#[cfg_attr(windows, ignore)] +/// Tests if a symlink to a git submodule is properly handled. +/// +/// This test is ignored on Windows, because it needs Administrator +/// permissions to run. If you do want to run this test, please +/// run the tests with ``--ignored``, e.g. +/// +/// ```text +/// cargo test -- --ignored +/// ``` fn package_symlink_to_submodule() { #[cfg(unix)] - use std::os::unix::fs::symlink as symlink; + use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; let project = git::new("foo", |project| { - project - .file("src/lib.rs", "pub fn foo() {}") - }).unwrap(); + project.file("src/lib.rs", "pub fn foo() {}") + }) + .unwrap(); let library = git::new("submodule", |library| { library.no_manifest().file("Makefile", "all:") - }).unwrap(); + }) + .unwrap(); let repository = git2::Repository::open(&project.root()).unwrap(); let url = path2url(library.root()).to_string(); git::add_submodule(&repository, &url, Path::new("submodule")); - t!(symlink(&project.root().join("submodule"), &project.root().join("submodule-link"))); + t!(symlink( + &project.root().join("submodule"), + &project.root().join("submodule-link") + )); git::add(&repository); git::commit(&repository); @@ -532,8 +546,9 @@ fn package_symlink_to_submodule() { .reset( &repository.revparse_single("HEAD").unwrap(), git2::ResetType::Hard, - None - ).unwrap(); + None, + ) + .unwrap(); project .cargo("package --no-verify -v") @@ -697,9 +712,19 @@ See [..] } #[cargo_test] +#[cfg_attr(windows, ignore)] +/// Tests if a broken symlink is properly handled when packaging. +/// +/// This test is ignored on Windows, because it needs Administrator +/// permissions to run. If you do want to run this test, please +/// run the tests with ``--ignored``, e.g. +/// +/// ```text +/// cargo test -- --ignored +/// ``` fn broken_symlink() { #[cfg(unix)] - use std::os::unix::fs::symlink as symlink; + use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; @@ -739,6 +764,16 @@ Caused by: } #[cargo_test] +#[cfg_attr(windows, ignore)] +/// Tests if a symlink to a directory is proberly included. +/// +/// This test is ignored on Windows, because it needs Administrator +/// permissions to run. If you do want to run this test, please +/// run the tests with ``--ignored``, e.g. +/// +/// ```text +/// cargo test -- --ignored +/// ``` fn package_symlink_to_dir() { project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) diff --git a/tests/testsuite/support/mod.rs b/tests/testsuite/support/mod.rs index cf1c3a0ec..f1da02e25 100644 --- a/tests/testsuite/support/mod.rs +++ b/tests/testsuite/support/mod.rs @@ -183,11 +183,19 @@ struct SymlinkBuilder { impl SymlinkBuilder { pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { - SymlinkBuilder { dst, src, src_is_dir: false } + SymlinkBuilder { + dst, + src, + src_is_dir: false, + } } pub fn new_dir(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { - SymlinkBuilder { dst, src, src_is_dir: true } + SymlinkBuilder { + dst, + src, + src_is_dir: true, + } } #[cfg(unix)] @@ -273,9 +281,9 @@ impl ProjectBuilder { /// Create a symlink to a directory pub fn symlink_dir>(mut self, dst: T, src: T) -> Self { self.symlinks.push(SymlinkBuilder::new_dir( - self.root.root().join(dst), - self.root.root().join(src), - )); + self.root.root().join(dst), + self.root.root().join(src), + )); self } From ca176eed6a1380c6b4b704fd1753f20d7603f895 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Thu, 18 Jul 2019 14:37:29 +0200 Subject: [PATCH 09/29] Fix doctests that used ignore to hide code that didn't compile. --- src/cargo/core/features.rs | 2 +- src/cargo/util/network.rs | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/cargo/core/features.rs b/src/cargo/core/features.rs index 9fd8161ce..b910eb726 100644 --- a/src/cargo/core/features.rs +++ b/src/cargo/core/features.rs @@ -21,7 +21,7 @@ //! 3. To actually perform the feature gate, you'll want to have code that looks //! like: //! -//! ```rust,ignore +//! ```rust,compile_fail //! use core::{Feature, Features}; //! //! let feature = Feature::launch_into_space(); diff --git a/src/cargo/util/network.rs b/src/cargo/util/network.rs index 2873dea25..c0d5b2a5a 100644 --- a/src/cargo/util/network.rs +++ b/src/cargo/util/network.rs @@ -73,9 +73,12 @@ fn maybe_spurious(err: &Error) -> bool { /// /// # Examples /// -/// ```ignore -/// use util::network; -/// cargo_result = network::with_retry(&config, || something.download()); +/// ``` +/// # use crate::cargo::util::{CargoResult, Config}; +/// # let download_something = || return Ok(()); +/// # let config = Config::default().unwrap(); +/// use cargo::util::network; +/// let cargo_result = network::with_retry(&config, || download_something()); /// ``` pub fn with_retry(config: &Config, mut callback: F) -> CargoResult where From 0923d7ca98761087f3f6e202060c8ac0def78cb3 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Mon, 29 Jul 2019 08:23:30 +0200 Subject: [PATCH 10/29] Remove appveyor-aimed #[ignore] Seems like it's no longer necessary (this test ran fine when `--ignored` was specified --- tests/testsuite/small_fd_limits.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/testsuite/small_fd_limits.rs b/tests/testsuite/small_fd_limits.rs index 27558a865..f0ca6988e 100644 --- a/tests/testsuite/small_fd_limits.rs +++ b/tests/testsuite/small_fd_limits.rs @@ -98,9 +98,6 @@ fn use_git_gc() { } #[cargo_test] -// it looks like this test passes on some windows machines but not others, -// notably not on AppVeyor's machines. Sounds like another but for another day. -#[cfg_attr(windows, ignore)] fn avoid_using_git() { let path = env::var_os("PATH").unwrap_or_default(); let mut paths = env::split_paths(&path).collect::>(); From 55e562336805b7213f8e184c0b725dfc6aa56307 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Mon, 29 Jul 2019 08:36:13 +0200 Subject: [PATCH 11/29] Don't run symlink tests based on `symlink_supported` --- tests/testsuite/build.rs | 16 +++++++------ tests/testsuite/package.rs | 47 +++++++++++++++++--------------------- 2 files changed, 30 insertions(+), 33 deletions(-) diff --git a/tests/testsuite/build.rs b/tests/testsuite/build.rs index ce888880e..7c6323425 100644 --- a/tests/testsuite/build.rs +++ b/tests/testsuite/build.rs @@ -4,11 +4,10 @@ use std::io::prelude::*; use crate::support::paths::{root, CargoPathExt}; use crate::support::registry::Package; -use crate::support::ProjectBuilder; use crate::support::{ - basic_bin_manifest, basic_lib_manifest, basic_manifest, rustc_host, sleep_ms, + basic_bin_manifest, basic_lib_manifest, basic_manifest, main_file, project, rustc_host, + sleep_ms, symlink_supported, Execs, ProjectBuilder, }; -use crate::support::{main_file, project, Execs}; use cargo::util::paths::dylib_path_envvar; #[cargo_test] @@ -1495,12 +1494,15 @@ package `test v0.0.0 ([CWD])`", } #[cargo_test] -#[cfg_attr(windows, ignore)] -/// Make sure ignored symlinks don't break the build +/// Make sure broken symlinks don't break the build /// -/// This test is marked ``ignore`` on Windows because it needs admin permissions. -/// Run it with ``--ignored``. +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. fn ignore_broken_symlinks() { + if !symlink_supported() { + return; + } + let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index fa59ccf85..c099672da 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -3,11 +3,11 @@ use std::fs::File; use std::io::prelude::*; use std::path::Path; -use crate::support::cargo_process; use crate::support::paths::CargoPathExt; use crate::support::registry::Package; use crate::support::{ - basic_manifest, git, path2url, paths, project, publish::validate_crate_contents, registry, + basic_manifest, cargo_process, git, path2url, paths, project, publish::validate_crate_contents, + registry, symlink_supported, }; use git2; @@ -505,22 +505,20 @@ fn package_git_submodule() { } #[cargo_test] -#[cfg_attr(windows, ignore)] /// Tests if a symlink to a git submodule is properly handled. /// -/// This test is ignored on Windows, because it needs Administrator -/// permissions to run. If you do want to run this test, please -/// run the tests with ``--ignored``, e.g. -/// -/// ```text -/// cargo test -- --ignored -/// ``` +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. fn package_symlink_to_submodule() { #[cfg(unix)] use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; + if !symlink_supported() { + return; + } + let project = git::new("foo", |project| { project.file("src/lib.rs", "pub fn foo() {}") }) @@ -712,22 +710,20 @@ See [..] } #[cargo_test] -#[cfg_attr(windows, ignore)] /// Tests if a broken symlink is properly handled when packaging. /// -/// This test is ignored on Windows, because it needs Administrator -/// permissions to run. If you do want to run this test, please -/// run the tests with ``--ignored``, e.g. -/// -/// ```text -/// cargo test -- --ignored -/// ``` +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. fn broken_symlink() { #[cfg(unix)] use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; + if !symlink_supported() { + return; + } + let p = project() .file( "Cargo.toml", @@ -764,17 +760,16 @@ Caused by: } #[cargo_test] -#[cfg_attr(windows, ignore)] /// Tests if a symlink to a directory is proberly included. /// -/// This test is ignored on Windows, because it needs Administrator -/// permissions to run. If you do want to run this test, please -/// run the tests with ``--ignored``, e.g. -/// -/// ```text -/// cargo test -- --ignored -/// ``` +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. fn package_symlink_to_dir() { + + if !symlink_supported() { + return; + } + project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .file("bla/Makefile", "all:") From 5866d8eb2ee00abb395a593f2f79569db3001012 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Tue, 30 Jul 2019 09:32:00 +0200 Subject: [PATCH 12/29] Remove ``--ignored`` from CI --- ci/azure-test-all.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/ci/azure-test-all.yml b/ci/azure-test-all.yml index df700161c..626858431 100644 --- a/ci/azure-test-all.yml +++ b/ci/azure-test-all.yml @@ -26,10 +26,3 @@ steps: # fix the link errors. - bash: cargo test --features 'deny-warnings curl/force-system-lib-on-osx' displayName: "cargo test" - -# Run any tests that have been marked ignore. -# -# `--include-ignored` is only supported on nightly so far, so we have to call -# this separately for now. -- bash: cargo test --features 'deny-warnings curl/force-system-lib-on-osx' -- --ignored - displayName: "cargo test -- --ignored" From d0f7c0ee31c81cd405dc2c163e4ca7638895610c Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Tue, 30 Jul 2019 09:51:14 +0200 Subject: [PATCH 13/29] Cargo fmt --- tests/testsuite/package.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index c099672da..6a070ea0a 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -765,7 +765,6 @@ Caused by: /// This test requires you to be able to make symlinks. /// For windows, this may require you to enable developer mode. fn package_symlink_to_dir() { - if !symlink_supported() { return; } From 1e9bd724fd57f844bd0639260d799265e1dd159f Mon Sep 17 00:00:00 2001 From: debris Date: Tue, 30 Jul 2019 16:29:50 +0200 Subject: [PATCH 14/29] fix #7007, improve error message for unmatched prerelease dependencies --- src/cargo/core/resolver/errors.rs | 53 +++++++++++++++++++------------ tests/testsuite/registry.rs | 49 ++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+), 20 deletions(-) diff --git a/src/cargo/core/resolver/errors.rs b/src/cargo/core/resolver/errors.rs index e67ec9ede..359ca186c 100644 --- a/src/cargo/core/resolver/errors.rs +++ b/src/cargo/core/resolver/errors.rs @@ -251,14 +251,14 @@ pub(super) fn activation_error( // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing` // was meant. So we try asking the registry for a `fuzzy` search for suggestions. let mut candidates = Vec::new(); - if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.name()), true) { + if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.clone()), true) { return to_resolve_err(e); }; - candidates.sort_unstable(); - candidates.dedup(); + candidates.sort_unstable_by(|a, b| a.name().cmp(&b.name())); + candidates.dedup_by(|a, b| a.name() == b.name()); let mut candidates: Vec<_> = candidates .iter() - .map(|n| (lev_distance(&*new_dep.package_name(), &*n), n)) + .map(|n| (lev_distance(&*new_dep.package_name(), &*n.name()), n)) .filter(|&(d, _)| d < 4) .collect(); candidates.sort_by_key(|o| o.0); @@ -269,25 +269,38 @@ pub(super) fn activation_error( dep.source_id() ); if !candidates.is_empty() { - let mut names = candidates - .iter() - .take(3) - .map(|c| c.1.as_str()) - .collect::>(); + // If dependency package name is equal to the name of the candidate here + // it may be a prerelease package which hasn't been speficied correctly + if dep.package_name() == candidates[0].1.name() && + candidates[0].1.package_id().version().is_prerelease() { + msg.push_str("prerelease package needs to be specified explicitly\n"); + msg.push_str(&format!( + "{name} = {{ version = \"{version}\" }}", + name = candidates[0].1.name(), + version = candidates[0].1.package_id().version() + )); + } else { + let mut names = candidates + .iter() + .take(3) + .map(|c| c.1.name().as_str()) + .collect::>(); - if candidates.len() > 3 { - names.push("..."); + if candidates.len() > 3 { + names.push("..."); + } + + msg.push_str("perhaps you meant: "); + msg.push_str(&names.iter().enumerate().fold( + String::default(), + |acc, (i, el)| match i { + 0 => acc + el, + i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, + _ => acc + ", " + el, + }, + )); } - msg.push_str("perhaps you meant: "); - msg.push_str(&names.iter().enumerate().fold( - String::default(), - |acc, (i, el)| match i { - 0 => acc + el, - i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, - _ => acc + ", " + el, - }, - )); msg.push_str("\n"); } msg.push_str("required by "); diff --git a/tests/testsuite/registry.rs b/tests/testsuite/registry.rs index 9fb92d80f..2db759ac1 100644 --- a/tests/testsuite/registry.rs +++ b/tests/testsuite/registry.rs @@ -1395,6 +1395,55 @@ fn use_semver() { p.cargo("build").run(); } +#[cargo_test] +fn use_semver_package_incorrectly() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.1-alpha.0" + authors = [] + "#, + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies] + a = { version = "^0.1", path = "../a" } + "#, + ) + .file("a/src/main.rs", "fn main() {}") + .file("b/src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: no matching package named `a` found +location searched: [..] +prerelease package needs to be specified explicitly +a = { version = \"0.1.1-alpha.0\" } +required by package `b v0.1.0 ([..])` +", + ) + .run(); +} + #[cargo_test] fn only_download_relevant() { let p = project() From 643b660697832bf11eb5ddb934d19224effd66e9 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Tue, 30 Jul 2019 08:55:57 -0700 Subject: [PATCH 15/29] Fix excluding target dirs from backups on OSX This fixes an accidental regression from #6880 identified in #7189 by moving where the configuration of backup preferences happens since it was accidentally never happening due to the folder always having been created. --- src/cargo/core/compiler/layout.rs | 71 ++++++++++++++++--------------- 1 file changed, 37 insertions(+), 34 deletions(-) diff --git a/src/cargo/core/compiler/layout.rs b/src/cargo/core/compiler/layout.rs index 40140e8ce..e4ae2bc51 100644 --- a/src/cargo/core/compiler/layout.rs +++ b/src/cargo/core/compiler/layout.rs @@ -109,6 +109,14 @@ impl Layout { /// /// This function will block if the directory is already locked. pub fn at(config: &Config, root: Filesystem) -> CargoResult { + // If the root directory doesn't already exist go ahead and create it + // here. Use this opportunity to exclude it from backups as well if the + // system supports it since this is a freshly created folder. + if !root.as_path_unlocked().exists() { + root.create_dir()?; + exclude_from_backups(root.as_path_unlocked()); + } + // For now we don't do any more finer-grained locking on the artifact // directory, so just lock the entire thing for the duration of this // compile. @@ -127,42 +135,8 @@ impl Layout { }) } - #[cfg(not(target_os = "macos"))] - fn exclude_from_backups(&self, _: &Path) {} - - #[cfg(target_os = "macos")] - /// Marks files or directories as excluded from Time Machine on macOS - /// - /// This is recommended to prevent derived/temporary files from bloating backups. - fn exclude_from_backups(&self, path: &Path) { - use core_foundation::base::TCFType; - use core_foundation::{number, string, url}; - use std::ptr; - - // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey - let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); - let path = url::CFURL::from_path(path, false); - if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { - unsafe { - url::CFURLSetResourcePropertyForKey( - path.as_concrete_TypeRef(), - is_excluded_key.as_concrete_TypeRef(), - number::kCFBooleanTrue as *const _, - ptr::null_mut(), - ); - } - } - // Errors are ignored, since it's an optional feature and failure - // doesn't prevent Cargo from working - } - /// Makes sure all directories stored in the Layout exist on the filesystem. pub fn prepare(&mut self) -> io::Result<()> { - if fs::metadata(&self.root).is_err() { - fs::create_dir_all(&self.root)?; - self.exclude_from_backups(&self.root); - } - mkdir(&self.deps)?; mkdir(&self.native)?; mkdir(&self.incremental)?; @@ -209,3 +183,32 @@ impl Layout { &self.build } } + +#[cfg(not(target_os = "macos"))] +fn exclude_from_backups(_: &Path) {} + +#[cfg(target_os = "macos")] +/// Marks files or directories as excluded from Time Machine on macOS +/// +/// This is recommended to prevent derived/temporary files from bloating backups. +fn exclude_from_backups(path: &Path) { + use core_foundation::base::TCFType; + use core_foundation::{number, string, url}; + use std::ptr; + + // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey + let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); + let path = url::CFURL::from_path(path, false); + if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { + unsafe { + url::CFURLSetResourcePropertyForKey( + path.as_concrete_TypeRef(), + is_excluded_key.as_concrete_TypeRef(), + number::kCFBooleanTrue as *const _, + ptr::null_mut(), + ); + } + } + // Errors are ignored, since it's an optional feature and failure + // doesn't prevent Cargo from working +} From 577968e44828457dfa62fc10ccd29f42a359b746 Mon Sep 17 00:00:00 2001 From: debris Date: Wed, 31 Jul 2019 10:49:32 +0200 Subject: [PATCH 16/29] cargo fmt --- src/cargo/core/resolver/errors.rs | 194 +++++++++++++++--------------- 1 file changed, 98 insertions(+), 96 deletions(-) diff --git a/src/cargo/core/resolver/errors.rs b/src/cargo/core/resolver/errors.rs index 359ca186c..6d56cb33c 100644 --- a/src/cargo/core/resolver/errors.rs +++ b/src/cargo/core/resolver/errors.rs @@ -203,113 +203,115 @@ pub(super) fn activation_error( }; candidates.sort_unstable_by(|a, b| b.version().cmp(a.version())); - let mut msg = if !candidates.is_empty() { - let versions = { - let mut versions = candidates - .iter() - .take(3) - .map(|cand| cand.version().to_string()) - .collect::>(); - - if candidates.len() > 3 { - versions.push("...".into()); - } - - versions.join(", ") - }; - - let mut msg = format!( - "failed to select a version for the requirement `{} = \"{}\"`\n \ - candidate versions found which didn't match: {}\n \ - location searched: {}\n", - dep.package_name(), - dep.version_req(), - versions, - registry.describe_source(dep.source_id()), - ); - msg.push_str("required by "); - msg.push_str(&describe_path( - &cx.parents.path_to_bottom(&parent.package_id()), - )); - - // If we have a path dependency with a locked version, then this may - // indicate that we updated a sub-package and forgot to run `cargo - // update`. In this case try to print a helpful error! - if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') { - msg.push_str( - "\nconsider running `cargo update` to update \ - a path dependency's locked version", - ); - } - - if registry.is_replaced(dep.source_id()) { - msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?"); - } - - msg - } else { - // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing` - // was meant. So we try asking the registry for a `fuzzy` search for suggestions. - let mut candidates = Vec::new(); - if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.clone()), true) { - return to_resolve_err(e); - }; - candidates.sort_unstable_by(|a, b| a.name().cmp(&b.name())); - candidates.dedup_by(|a, b| a.name() == b.name()); - let mut candidates: Vec<_> = candidates - .iter() - .map(|n| (lev_distance(&*new_dep.package_name(), &*n.name()), n)) - .filter(|&(d, _)| d < 4) - .collect(); - candidates.sort_by_key(|o| o.0); - let mut msg = format!( - "no matching package named `{}` found\n\ - location searched: {}\n", - dep.package_name(), - dep.source_id() - ); + let mut msg = if !candidates.is_empty() { - // If dependency package name is equal to the name of the candidate here - // it may be a prerelease package which hasn't been speficied correctly - if dep.package_name() == candidates[0].1.name() && - candidates[0].1.package_id().version().is_prerelease() { - msg.push_str("prerelease package needs to be specified explicitly\n"); - msg.push_str(&format!( - "{name} = {{ version = \"{version}\" }}", - name = candidates[0].1.name(), - version = candidates[0].1.package_id().version() - )); - } else { - let mut names = candidates + let versions = { + let mut versions = candidates .iter() .take(3) - .map(|c| c.1.name().as_str()) + .map(|cand| cand.version().to_string()) .collect::>(); if candidates.len() > 3 { - names.push("..."); + versions.push("...".into()); } - msg.push_str("perhaps you meant: "); - msg.push_str(&names.iter().enumerate().fold( - String::default(), - |acc, (i, el)| match i { - 0 => acc + el, - i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, - _ => acc + ", " + el, - }, - )); + versions.join(", ") + }; + + let mut msg = format!( + "failed to select a version for the requirement `{} = \"{}\"`\n \ + candidate versions found which didn't match: {}\n \ + location searched: {}\n", + dep.package_name(), + dep.version_req(), + versions, + registry.describe_source(dep.source_id()), + ); + msg.push_str("required by "); + msg.push_str(&describe_path( + &cx.parents.path_to_bottom(&parent.package_id()), + )); + + // If we have a path dependency with a locked version, then this may + // indicate that we updated a sub-package and forgot to run `cargo + // update`. In this case try to print a helpful error! + if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') { + msg.push_str( + "\nconsider running `cargo update` to update \ + a path dependency's locked version", + ); } - msg.push_str("\n"); - } - msg.push_str("required by "); - msg.push_str(&describe_path( - &cx.parents.path_to_bottom(&parent.package_id()), - )); + if registry.is_replaced(dep.source_id()) { + msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?"); + } - msg - }; + msg + } else { + // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing` + // was meant. So we try asking the registry for a `fuzzy` search for suggestions. + let mut candidates = Vec::new(); + if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.clone()), true) { + return to_resolve_err(e); + }; + candidates.sort_unstable_by(|a, b| a.name().cmp(&b.name())); + candidates.dedup_by(|a, b| a.name() == b.name()); + let mut candidates: Vec<_> = candidates + .iter() + .map(|n| (lev_distance(&*new_dep.package_name(), &*n.name()), n)) + .filter(|&(d, _)| d < 4) + .collect(); + candidates.sort_by_key(|o| o.0); + let mut msg = format!( + "no matching package named `{}` found\n\ + location searched: {}\n", + dep.package_name(), + dep.source_id() + ); + if !candidates.is_empty() { + // If dependency package name is equal to the name of the candidate here + // it may be a prerelease package which hasn't been speficied correctly + if dep.package_name() == candidates[0].1.name() + && candidates[0].1.package_id().version().is_prerelease() + { + msg.push_str("prerelease package needs to be specified explicitly\n"); + msg.push_str(&format!( + "{name} = {{ version = \"{version}\" }}", + name = candidates[0].1.name(), + version = candidates[0].1.package_id().version() + )); + } else { + let mut names = candidates + .iter() + .take(3) + .map(|c| c.1.name().as_str()) + .collect::>(); + + if candidates.len() > 3 { + names.push("..."); + } + + msg.push_str("perhaps you meant: "); + msg.push_str(&names.iter().enumerate().fold( + String::default(), + |acc, (i, el)| match i { + 0 => acc + el, + i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, + _ => acc + ", " + el, + }, + )); + } + + msg.push_str("\n"); + } + msg.push_str("required by "); + msg.push_str(&describe_path( + &cx.parents.path_to_bottom(&parent.package_id()), + )); + + msg + }; if let Some(config) = config { if config.offline() { From 01aae39ee73d7abac02001f073250415cb6d4c74 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 31 Jul 2019 22:46:29 +0300 Subject: [PATCH 17/29] tests: Enable features to fix unstabilized `#[bench]` --- tests/testsuite/bench.rs | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/testsuite/bench.rs b/tests/testsuite/bench.rs index 42f441527..354887936 100644 --- a/tests/testsuite/bench.rs +++ b/tests/testsuite/bench.rs @@ -58,7 +58,7 @@ fn bench_bench_implicit() { .file( "src/main.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } @@ -364,7 +364,7 @@ fn bench_with_lib_dep() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; /// @@ -432,7 +432,7 @@ fn bench_with_deep_lib_dep() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate foo; #[cfg(test)] @@ -448,7 +448,7 @@ fn bench_with_deep_lib_dep() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; @@ -495,7 +495,7 @@ fn external_bench_explicit() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; pub fn get_hello() -> &'static str { "Hello" } @@ -541,7 +541,7 @@ fn external_bench_implicit() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; @@ -760,7 +760,7 @@ fn lib_bin_same_name() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn lib_bench(_b: &mut test::Bencher) {} @@ -769,7 +769,7 @@ fn lib_bin_same_name() { .file( "src/main.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[allow(unused_extern_crates)] extern crate foo; #[cfg(test)] @@ -804,7 +804,7 @@ fn lib_with_standard_name() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; @@ -919,7 +919,7 @@ fn bench_dylib() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] extern crate bar as the_bar; #[cfg(test)] extern crate test; @@ -1061,7 +1061,7 @@ fn bench_with_examples() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; #[cfg(test)] From daa1bce28340c518824e168727d803ae45310035 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Wed, 17 Jul 2019 12:41:27 -0700 Subject: [PATCH 18/29] Enable pipelined compilation by default This commit enables pipelined compilation by default in Cargo now that the requisite support has been stablized in rust-lang/rust#62766. This involved minor updates in a number of locations here and there, but nothing of meat has changed from the original implementation (just tweaks to how rustc is called). --- .../compiler/build_context/target_info.rs | 14 ++++ src/cargo/core/compiler/context/mod.rs | 2 +- src/cargo/core/compiler/mod.rs | 81 +++++++------------ src/cargo/ops/fix.rs | 2 +- tests/testsuite/build_script.rs | 7 +- tests/testsuite/cache_messages.rs | 58 ++++++++++--- tests/testsuite/dep_info.rs | 2 +- tests/testsuite/profile_overrides.rs | 14 ++-- tests/testsuite/rustc_info_cache.rs | 5 ++ tests/testsuite/rustdoc.rs | 6 ++ 10 files changed, 114 insertions(+), 77 deletions(-) diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index ecdb4239c..81a2d622c 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -34,6 +34,7 @@ pub struct TargetInfo { pub rustflags: Vec, /// Extra flags to pass to `rustdoc`, see `env_args`. pub rustdocflags: Vec, + pub supports_pipelining: Option, } /// Kind of each file generated by a Unit, part of `FileType`. @@ -98,6 +99,18 @@ impl TargetInfo { .args(&rustflags) .env_remove("RUSTC_LOG"); + // NOTE: set this unconditionally to `true` once support for `--json` + // rides to stable. + // + // Also note that we only learn about this functionality for the host + // compiler since the host/target rustc are always the same. + let mut pipelining_test = process.clone(); + pipelining_test.args(&["--error-format=json", "--json=artifacts"]); + let supports_pipelining = match kind { + Kind::Host => Some(rustc.cached_output(&pipelining_test).is_ok()), + Kind::Target => None, + }; + let target_triple = requested_target .as_ref() .map(|s| s.as_str()) @@ -179,6 +192,7 @@ impl TargetInfo { "RUSTDOCFLAGS", )?, cfg, + supports_pipelining, }) } diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index 9325c8c63..7ab5f5b74 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -77,7 +77,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { .config .get_bool("build.pipelining")? .map(|t| t.val) - .unwrap_or(false); + .unwrap_or(bcx.host_info.supports_pipelining.unwrap()); Ok(Self { bcx, diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 4962e7745..efe332a93 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -18,7 +18,7 @@ use std::io::Write; use std::path::{Path, PathBuf}; use std::sync::Arc; -use failure::{bail, Error}; +use failure::Error; use lazycell::LazyCell; use log::debug; use same_file::is_same_file; @@ -614,7 +614,6 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult rustdoc.arg("--crate-name").arg(&unit.target.crate_name()); add_path_args(bcx, unit, &mut rustdoc); add_cap_lints(bcx, unit, &mut rustdoc); - add_color(bcx, &mut rustdoc); if unit.kind != Kind::Host { if let Some(ref target) = bcx.build_config.requested_target { @@ -635,7 +634,7 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } - add_error_format(cx, &mut rustdoc, false, false)?; + add_error_format_and_color(cx, &mut rustdoc, false)?; if let Some(args) = bcx.extra_args_for(unit) { rustdoc.args(args); @@ -722,39 +721,20 @@ fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit<'_>, cmd: &mut ProcessB } } -fn add_color(bcx: &BuildContext<'_, '_>, cmd: &mut ProcessBuilder) { - let shell = bcx.config.shell(); - let color = if shell.supports_color() { - "always" - } else { - "never" - }; - cmd.args(&["--color", color]); -} - /// Add error-format flags to the command. /// -/// This is rather convoluted right now. The general overview is: -/// - If -Zcache-messages or `build.pipelining` is enabled, Cargo always uses -/// JSON output. This has several benefits, such as being easier to parse, -/// handles changing formats (for replaying cached messages), ensures -/// atomic output (so messages aren't interleaved), etc. -/// - `supports_termcolor` is a temporary flag. rustdoc does not yet support -/// the `--json-rendered` flag, but it is intended to fix that soon. -/// - `short` output is not yet supported for JSON output. We haven't yet -/// decided how this problem will be resolved. Probably either adding -/// "short" to the JSON output, or more ambitiously moving diagnostic -/// rendering to an external library that Cargo can share with rustc. +/// This is somewhat odd right now, but the general overview is that if +/// `-Zcache-messages` or `pipelined` is enabled then Cargo always uses JSON +/// output. This has several benefits, such as being easier to parse, handles +/// changing formats (for replaying cached messages), ensures atomic output (so +/// messages aren't interleaved), etc. /// -/// It is intended in the future that Cargo *always* uses the JSON output, and -/// this function can be simplified. The above issues need to be resolved, the -/// flags need to be stabilized, and we need more testing to ensure there -/// aren't any regressions. -fn add_error_format( +/// It is intended in the future that Cargo *always* uses the JSON output (by +/// turning on cache-messages by default), and this function can be simplified. +fn add_error_format_and_color( cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, pipelined: bool, - supports_termcolor: bool, ) -> CargoResult<()> { // If this unit is producing a required rmeta file then we need to know // when the rmeta file is ready so we can signal to the rest of Cargo that @@ -769,26 +749,15 @@ fn add_error_format( // internally understand that we should extract the `rendered` field and // present it if we can. if cx.bcx.build_config.cache_messages() || pipelined { - cmd.arg("--error-format=json").arg("-Zunstable-options"); - if supports_termcolor { - cmd.arg("--json-rendered=termcolor"); + cmd.arg("--error-format=json"); + let mut json = String::from("--json=diagnostic-rendered-ansi"); + if pipelined { + json.push_str(",artifacts"); } if cx.bcx.build_config.message_format == MessageFormat::Short { - // FIXME(rust-lang/rust#60419): right now we have no way of - // turning on JSON messages from the compiler and also asking - // the rendered field to be in the `short` format. - bail!( - "currently `--message-format short` is incompatible with {}", - if pipelined { - "pipelined compilation" - } else { - "cached output" - } - ); - } - if pipelined { - cmd.arg("-Zemit-artifact-notifications"); + json.push_str(",diagnostic-short"); } + cmd.arg(json); } else { match cx.bcx.build_config.message_format { MessageFormat::Human => (), @@ -799,6 +768,13 @@ fn add_error_format( cmd.arg("--error-format").arg("short"); } } + + let color = if cx.bcx.config.shell().supports_color() { + "always" + } else { + "never" + }; + cmd.args(&["--color", color]); } Ok(()) } @@ -829,8 +805,7 @@ fn build_base_args<'a, 'cfg>( cmd.arg("--crate-name").arg(&unit.target.crate_name()); add_path_args(bcx, unit, cmd); - add_color(bcx, cmd); - add_error_format(cx, cmd, cx.rmeta_required(unit), true)?; + add_error_format_and_color(cx, cmd, cx.rmeta_required(unit))?; if !test { for crate_type in crate_types.iter() { @@ -1234,11 +1209,11 @@ fn on_stderr_line( } else { // Remove color information from the rendered string. rustc has not // included color in the past, so to avoid breaking anything, strip it - // out when --json-rendered=termcolor is used. This runs + // out when --json=diagnostic-rendered-ansi is used. This runs // unconditionally under the assumption that Cargo will eventually // move to this as the default mode. Perhaps in the future, cargo // could allow the user to enable/disable color (such as with a - // `--json-rendered` or `--color` or `--message-format` flag). + // `--json` or `--color` or `--message-format` flag). #[derive(serde::Deserialize, serde::Serialize)] struct CompilerMessage { rendered: String, @@ -1304,10 +1279,8 @@ fn replay_output_cache( ) -> Work { let target = target.clone(); let extract_rendered_messages = match format { - MessageFormat::Human => true, + MessageFormat::Human | MessageFormat::Short => true, MessageFormat::Json => false, - // FIXME: short not supported. - MessageFormat::Short => false, }; let mut options = OutputOptions { extract_rendered_messages, diff --git a/src/cargo/ops/fix.rs b/src/cargo/ops/fix.rs index 1ac31974b..a43c68687 100644 --- a/src/cargo/ops/fix.rs +++ b/src/cargo/ops/fix.rs @@ -621,7 +621,7 @@ impl FixArgs { ret.enabled_edition = Some(s[prefix.len()..].to_string()); continue; } - if s.starts_with("--error-format=") || s.starts_with("--json-rendered=") { + if s.starts_with("--error-format=") || s.starts_with("--json=") { // Cargo may add error-format in some cases, but `cargo // fix` wants to add its own. continue; diff --git a/tests/testsuite/build_script.rs b/tests/testsuite/build_script.rs index 0952033ba..d26509e29 100644 --- a/tests/testsuite/build_script.rs +++ b/tests/testsuite/build_script.rs @@ -2155,6 +2155,11 @@ fn flags_go_into_tests() { #[cargo_test] fn diamond_passes_args_only_once() { + // FIXME: when pipelining rides to stable, enable this test on all channels. + if !crate::support::is_nightly() { + return; + } + let p = project() .file( "Cargo.toml", @@ -2229,7 +2234,7 @@ fn diamond_passes_args_only_once() { [COMPILING] a v0.5.0 ([..] [RUNNING] `rustc [..]` [COMPILING] foo v0.5.0 ([..] -[RUNNING] `[..]rlib -L native=test` +[RUNNING] `[..]rmeta -L native=test` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) diff --git a/tests/testsuite/cache_messages.rs b/tests/testsuite/cache_messages.rs index b283a285f..2582d1918 100644 --- a/tests/testsuite/cache_messages.rs +++ b/tests/testsuite/cache_messages.rs @@ -54,6 +54,52 @@ fn simple() { assert!(cargo_output2.stdout.is_empty()); } +// same as `simple`, except everything is using the short format +#[cargo_test] +fn simple_short() { + if !is_nightly() { + // --json-rendered is unstable + return; + } + let p = project() + .file( + "src/lib.rs", + " + fn a() {} + fn b() {} + ", + ) + .build(); + + let agnostic_path = Path::new("src").join("lib.rs"); + let agnostic_path_s = agnostic_path.to_str().unwrap(); + + let rustc_output = process("rustc") + .cwd(p.root()) + .args(&["--crate-type=lib", agnostic_path_s, "--error-format=short"]) + .exec_with_output() + .expect("rustc to run"); + + assert!(rustc_output.stdout.is_empty()); + assert!(rustc_output.status.success()); + + let cargo_output1 = p + .cargo("check -Zcache-messages -q --color=never --message-format=short") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + assert_eq!(as_str(&rustc_output.stderr), as_str(&cargo_output1.stderr)); + // assert!(cargo_output1.stdout.is_empty()); + let cargo_output2 = p + .cargo("check -Zcache-messages -q --message-format=short") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + println!("{}", String::from_utf8_lossy(&cargo_output2.stdout)); + assert_eq!(as_str(&rustc_output.stderr), as_str(&cargo_output2.stderr)); + assert!(cargo_output2.stdout.is_empty()); +} + #[cargo_test] fn color() { if !is_nightly() { @@ -334,15 +380,3 @@ fn very_verbose() { .with_stderr_contains("[..]not_used[..]") .run(); } - -#[cargo_test] -fn short_incompatible() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("check -Zcache-messages --message-format=short") - .masquerade_as_nightly_cargo() - .with_stderr( - "[ERROR] currently `--message-format short` is incompatible with cached output", - ) - .with_status(101) - .run(); -} diff --git a/tests/testsuite/dep_info.rs b/tests/testsuite/dep_info.rs index dce5c4025..439621e42 100644 --- a/tests/testsuite/dep_info.rs +++ b/tests/testsuite/dep_info.rs @@ -511,6 +511,6 @@ fn canonical_path() { assert_deps_contains( &p, "target/debug/.fingerprint/foo-*/dep-lib-foo-*", - &[(1, "src/lib.rs"), (2, "debug/deps/libregdep-*.rlib")], + &[(1, "src/lib.rs"), (2, "debug/deps/libregdep-*.rmeta")], ); } diff --git a/tests/testsuite/profile_overrides.rs b/tests/testsuite/profile_overrides.rs index 8445460b5..17b3c25c1 100644 --- a/tests/testsuite/profile_overrides.rs +++ b/tests/testsuite/profile_overrides.rs @@ -321,17 +321,17 @@ fn profile_override_hierarchy() { p.cargo("build -v").masquerade_as_nightly_cargo().with_stderr_unordered("\ [COMPILING] m3 [..] [COMPILING] dep [..] -[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=4 [..] -[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=3 [..] -[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=1 [..] -[RUNNING] `rustc --crate-name build_script_build m1/build.rs --color never --crate-type bin --emit=[..]link -C codegen-units=4 [..] +[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=4 [..] +[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=3 [..] +[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=1 [..] +[RUNNING] `rustc --crate-name build_script_build m1/build.rs [..] --crate-type bin --emit=[..]link -C codegen-units=4 [..] [COMPILING] m2 [..] -[RUNNING] `rustc --crate-name build_script_build m2/build.rs --color never --crate-type bin --emit=[..]link -C codegen-units=2 [..] +[RUNNING] `rustc --crate-name build_script_build m2/build.rs [..] --crate-type bin --emit=[..]link -C codegen-units=2 [..] [RUNNING] `[..]/m1-[..]/build-script-build` [RUNNING] `[..]/m2-[..]/build-script-build` -[RUNNING] `rustc --crate-name m2 m2/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=2 [..] +[RUNNING] `rustc --crate-name m2 m2/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=2 [..] [COMPILING] m1 [..] -[RUNNING] `rustc --crate-name m1 m1/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=1 [..] +[RUNNING] `rustc --crate-name m1 m1/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=1 [..] [FINISHED] dev [unoptimized + debuginfo] [..] ", ) diff --git a/tests/testsuite/rustc_info_cache.rs b/tests/testsuite/rustc_info_cache.rs index 51dc4a428..ceed53ee3 100644 --- a/tests/testsuite/rustc_info_cache.rs +++ b/tests/testsuite/rustc_info_cache.rs @@ -4,6 +4,11 @@ use std::env; #[cargo_test] fn rustc_info_cache() { + // FIXME: when pipelining rides to stable, enable this test on all channels. + if !crate::support::is_nightly() { + return; + } + let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); diff --git a/tests/testsuite/rustdoc.rs b/tests/testsuite/rustdoc.rs index 652505444..195b47c03 100644 --- a/tests/testsuite/rustdoc.rs +++ b/tests/testsuite/rustdoc.rs @@ -10,6 +10,7 @@ fn rustdoc_simple() { [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", @@ -27,6 +28,7 @@ fn rustdoc_args() { [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] @@ -66,6 +68,7 @@ fn rustdoc_foo_with_bar_dependency() { [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps \ --extern [..]` @@ -104,6 +107,7 @@ fn rustdoc_only_bar_dependency() { [DOCUMENTING] bar v0.0.1 ([..]) [RUNNING] `rustdoc --crate-name bar [..]bar/src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] @@ -125,6 +129,7 @@ fn rustdoc_same_name_documents_lib() { [DOCUMENTING] foo v0.0.1 ([..]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] @@ -168,6 +173,7 @@ fn rustdoc_target() { [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ --target x86_64-unknown-linux-gnu \ -o [CWD]/target/x86_64-unknown-linux-gnu/doc \ + [..] \ -L dependency=[CWD]/target/x86_64-unknown-linux-gnu/debug/deps \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", From 20d9119a484e761189c6a42d370075d74c63f5aa Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Thu, 1 Aug 2019 11:12:29 -0700 Subject: [PATCH 19/29] Revert "Release a jobserver token while locking a file" This reverts commit d19b41f2c6981dcdf5270e8f4582a95e51927e2a. --- src/cargo/util/flock.rs | 23 ++--------------------- 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/src/cargo/util/flock.rs b/src/cargo/util/flock.rs index 96458bdf3..d93223602 100644 --- a/src/cargo/util/flock.rs +++ b/src/cargo/util/flock.rs @@ -321,27 +321,8 @@ fn acquire( let msg = format!("waiting for file lock on {}", msg); config.shell().status_with_color("Blocking", &msg, Cyan)?; - // We're about to block the current process and not really do anything - // productive for what could possibly be a very long time. We could be - // waiting, for example, on another Cargo to finish a download, finish an - // entire build, etc. Since we're not doing anything productive we're not - // making good use of our jobserver token, if we have one. - // - // This can typically come about if `cargo` is invoked from `make` (or some - // other jobserver-providing system). In this situation it's actually best - // if we release the token back to the original jobserver to let some other - // cpu-hungry work continue to make progress. After we're done blocking - // we'll block waiting to reacquire a token as we'll probably be doing cpu - // hungry work ourselves. - let jobserver = config.jobserver_from_env(); - if let Some(server) = jobserver { - server.release_raw()?; - } - let result = block().chain_err(|| format!("failed to lock file: {}", path.display())); - if let Some(server) = jobserver { - server.acquire_raw()?; - } - return Ok(result?); + block().chain_err(|| format!("failed to lock file: {}", path.display()))?; + return Ok(()); #[cfg(all(target_os = "linux", not(target_env = "musl")))] fn is_on_nfs_mount(path: &Path) -> bool { From 4a37adc1045e5dfb26ed8148348aae1529431231 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Sun, 4 Aug 2019 11:09:25 -0700 Subject: [PATCH 20/29] Fix an old test. --- tests/testsuite/build_script.rs | 36 ++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/tests/testsuite/build_script.rs b/tests/testsuite/build_script.rs index d26509e29..1a4277d91 100644 --- a/tests/testsuite/build_script.rs +++ b/tests/testsuite/build_script.rs @@ -195,7 +195,6 @@ fn custom_build_script_wrong_rustc_flags() { .run(); } -/* #[cargo_test] fn custom_build_script_rustc_flags() { let p = project() @@ -211,7 +210,8 @@ fn custom_build_script_rustc_flags() { [dependencies.foo] path = "foo" "#, - ).file("src/main.rs", "fn main() {}") + ) + .file("src/main.rs", "fn main() {}") .file( "foo/Cargo.toml", r#" @@ -222,7 +222,8 @@ fn custom_build_script_rustc_flags() { authors = ["wycats@example.com"] build = "build.rs" "#, - ).file("foo/src/lib.rs", "") + ) + .file("foo/src/lib.rs", "") .file( "foo/build.rs", r#" @@ -230,25 +231,28 @@ fn custom_build_script_rustc_flags() { println!("cargo:rustc-flags=-l nonexistinglib -L /dummy/path1 -L /dummy/path2"); } "#, - ).build(); + ) + .build(); - // TODO: TEST FAILS BECAUSE OF WRONG STDOUT (but otherwise, the build works). p.cargo("build --verbose") - .with_status(101) .with_stderr( "\ -[COMPILING] bar v0.5.0 ([CWD]) -[RUNNING] `rustc --crate-name test [CWD]/src/lib.rs --crate-type lib -C debuginfo=2 \ - -C metadata=[..] \ - -C extra-filename=-[..] \ - --out-dir [CWD]/target \ - --emit=[..]link \ - -L [CWD]/target \ - -L [CWD]/target/deps` +[COMPILING] foo [..] +[RUNNING] `rustc --crate-name build_script_build foo/build.rs [..] +[RUNNING] `[..]build-script-build` +[RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\ + -L dependency=[CWD]/target/debug/deps \ + -L /dummy/path1 -L /dummy/path2 -l nonexistinglib` +[COMPILING] bar [..] +[RUNNING] `rustc --crate-name bar src/main.rs [..]\ + -L dependency=[CWD]/target/debug/deps \ + --extern foo=[..]libfoo-[..] \ + -L /dummy/path1 -L /dummy/path2` +[FINISHED] dev [..] ", - ).run(); + ) + .run(); } -*/ #[cargo_test] fn links_no_build_cmd() { From aba2aa23ef3bfbf5fad791a10231c1476aaf9628 Mon Sep 17 00:00:00 2001 From: Mark Rousskov Date: Sun, 4 Aug 2019 18:09:52 -0400 Subject: [PATCH 21/29] Remove unused AstBuilder --- tests/testsuite/cross_compile.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/testsuite/cross_compile.rs b/tests/testsuite/cross_compile.rs index c725c1a01..e0d47ac70 100644 --- a/tests/testsuite/cross_compile.rs +++ b/tests/testsuite/cross_compile.rs @@ -209,7 +209,6 @@ fn plugin_deps() { use syntax::source_map::Span; use syntax::ast::*; use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; - use syntax::ext::build::AstBuilder; #[plugin_registrar] pub fn foo(reg: &mut Registry) { @@ -306,7 +305,6 @@ fn plugin_to_the_max() { use syntax::source_map::Span; use syntax::ast::*; use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; - use syntax::ext::build::AstBuilder; use syntax::ptr::P; #[plugin_registrar] From 88d3b4ce42dfb0625a8c7a06d4a0e475b0bfa9e6 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Mon, 5 Aug 2019 08:44:13 -0700 Subject: [PATCH 22/29] Remove debug panic in package-features. --- src/cargo/ops/resolve.rs | 1 - tests/testsuite/features.rs | 3 +++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 98e1a7087..d29cc31bd 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -261,7 +261,6 @@ pub fn resolve_with_previous<'cfg>( failure::bail!("cannot specify features for packages outside of workspace"); } members.extend(ws.members()); - panic!("tested?"); } } for member in members { diff --git a/tests/testsuite/features.rs b/tests/testsuite/features.rs index 1fe51e19e..4fa585dd9 100644 --- a/tests/testsuite/features.rs +++ b/tests/testsuite/features.rs @@ -1425,6 +1425,9 @@ fn combining_features_and_package() { p.cargo("run -Z package-features --package bar --features main") .masquerade_as_nightly_cargo() .run(); + p.cargo("build -Z package-features --package dep") + .masquerade_as_nightly_cargo() + .run(); } #[cargo_test] From bd31c081efdbb9ed5d4add121638c064c0d519d2 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Fri, 2 Aug 2019 16:05:18 -0700 Subject: [PATCH 23/29] Clean up build script stuff and documentation. --- src/cargo/core/compiler/build_context/mod.rs | 22 ++- src/cargo/core/compiler/context/mod.rs | 54 +++--- .../compiler/context/unit_dependencies.rs | 7 + src/cargo/core/compiler/custom_build.rs | 174 +++++++++--------- src/cargo/core/compiler/fingerprint.rs | 11 +- src/cargo/core/compiler/job_queue.rs | 4 +- src/cargo/core/compiler/mod.rs | 46 +++-- src/cargo/core/compiler/output_depinfo.rs | 2 +- src/cargo/core/manifest.rs | 1 + 9 files changed, 180 insertions(+), 141 deletions(-) diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs index 189566321..7eab9dea4 100644 --- a/src/cargo/core/compiler/build_context/mod.rs +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -16,6 +16,11 @@ mod target_info; pub use self::target_info::{FileFlavor, TargetInfo}; /// The build context, containing all information about a build task. +/// +/// It is intended that this is mostly static information. Stuff that mutates +/// during the build can be found in the parent `Context`. (I say mostly, +/// because this has internal caching, but nothing that should be observable +/// or require &mut.) pub struct BuildContext<'a, 'cfg> { /// The workspace the build is for. pub ws: &'a Workspace<'cfg>, @@ -183,6 +188,17 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { pub fn extra_args_for(&self, unit: &Unit<'a>) -> Option<&Vec> { self.extra_compiler_args.get(unit) } + + /// If a build script is overridden, this returns the `BuildOutput` to use. + /// + /// `lib_name` is the `links` library name and `kind` is whether it is for + /// Host or Target. + pub fn script_override(&self, lib_name: &str, kind: Kind) -> Option<&BuildOutput> { + match kind { + Kind::Host => self.host_config.overrides.get(lib_name), + Kind::Target => self.target_config.overrides.get(lib_name), + } + } } /// Information required to build for a target. @@ -192,7 +208,11 @@ pub struct TargetConfig { pub ar: Option, /// The path of the linker for this target. pub linker: Option, - /// Special build options for any necessary input files (filename -> options). + /// Build script override for the given library name. + /// + /// Any package with a `links` value for the given library name will skip + /// running its build script and instead use the given output from the + /// config file. pub overrides: HashMap, } diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index 7ab5f5b74..47f03308c 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -3,7 +3,7 @@ use std::collections::{HashMap, HashSet}; use std::ffi::OsStr; use std::fmt::Write; use std::path::PathBuf; -use std::sync::Arc; +use std::sync::{Arc, Mutex}; use filetime::FileTime; use jobserver::Client; @@ -15,7 +15,7 @@ use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::{internal, profile, Config}; use super::build_plan::BuildPlan; -use super::custom_build::{self, BuildDeps, BuildScripts, BuildState}; +use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts}; use super::fingerprint::Fingerprint; use super::job_queue::JobQueue; use super::layout::Layout; @@ -28,21 +28,45 @@ mod compilation_files; use self::compilation_files::CompilationFiles; pub use self::compilation_files::{Metadata, OutputFile}; +/// Collection of all the stuff that is needed to perform a build. pub struct Context<'a, 'cfg> { + /// Mostly static information about the build task. pub bcx: &'a BuildContext<'a, 'cfg>, + /// A large collection of information about the result of the entire compilation. pub compilation: Compilation<'cfg>, - pub build_state: Arc, - pub build_script_overridden: HashSet<(PackageId, Kind)>, + /// Output from build scripts, updated after each build script runs. + pub build_script_outputs: Arc>, + /// Dependencies (like rerun-if-changed) declared by a build script. + /// This is *only* populated from the output from previous runs. + /// If the build script hasn't ever been run, then it must be run. pub build_explicit_deps: HashMap, BuildDeps>, + /// Fingerprints used to detect if a unit is out-of-date. pub fingerprints: HashMap, Arc>, + /// Cache of file mtimes to reduce filesystem hits. pub mtime_cache: HashMap, + /// A set used to track which units have been compiled. + /// A unit may appear in the job graph multiple times as a dependency of + /// multiple packages, but it only needs to run once. pub compiled: HashSet>, + /// Linking information for each `Unit`. + /// See `build_map` for details. pub build_scripts: HashMap, Arc>, + /// Used to check the `links` field in the manifest is not duplicated and + /// is used correctly. pub links: Links, + /// Job server client to manage concurrency with other processes. pub jobserver: Client, + /// "Primary" packages are the ones the user selected on the command-line + /// with `-p` flags. If no flags are specified, then it is the defaults + /// based on the current directory and the default workspace members. primary_packages: HashSet, + /// The dependency graph of units to compile. unit_dependencies: HashMap, Vec>>, + /// An abstraction of the files and directories that will be generated by + /// the compilation. This is `None` until after `unit_dependencies` has + /// been computed. files: Option>, + /// Cache of packages, populated when they are downloaded. package_cache: HashMap, /// A flag indicating whether pipelining is enabled for this compilation @@ -82,7 +106,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { Ok(Self { bcx, compilation: Compilation::new(bcx)?, - build_state: Arc::new(BuildState::new(&bcx.host_config, &bcx.target_config)), + build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())), fingerprints: HashMap::new(), mtime_cache: HashMap::new(), compiled: HashSet::new(), @@ -90,8 +114,6 @@ impl<'a, 'cfg> Context<'a, 'cfg> { build_explicit_deps: HashMap::new(), links: Links::new(), jobserver, - build_script_overridden: HashSet::new(), - primary_packages: HashSet::new(), unit_dependencies: HashMap::new(), files: None, @@ -228,7 +250,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { super::output_depinfo(&mut self, unit)?; } - for (&(ref pkg, _), output) in self.build_state.outputs.lock().unwrap().iter() { + for (&(ref pkg, _), output) in self.build_script_outputs.lock().unwrap().iter() { self.compilation .cfgs .entry(pkg.clone()) @@ -338,22 +360,6 @@ impl<'a, 'cfg> Context<'a, 'cfg> { // // TODO: this ideally should be `-> &[Unit<'a>]`. pub fn dep_targets(&self, unit: &Unit<'a>) -> Vec> { - // If this build script's execution has been overridden then we don't - // actually depend on anything, we've reached the end of the dependency - // chain as we've got all the info we're gonna get. - // - // Note there's a subtlety about this piece of code! The - // `build_script_overridden` map here is populated in - // `custom_build::build_map` which you need to call before inspecting - // dependencies. However, that code itself calls this method and - // gets a full pre-filtered set of dependencies. This is not super - // obvious, and clear, but it does work at the moment. - if unit.target.is_custom_build() { - let key = (unit.pkg.package_id(), unit.kind); - if self.build_script_overridden.contains(&key) { - return Vec::new(); - } - } self.unit_dependencies[unit].clone() } diff --git a/src/cargo/core/compiler/context/unit_dependencies.rs b/src/cargo/core/compiler/context/unit_dependencies.rs index 6e80f31a1..5cb41e638 100644 --- a/src/cargo/core/compiler/context/unit_dependencies.rs +++ b/src/cargo/core/compiler/context/unit_dependencies.rs @@ -258,6 +258,13 @@ fn compute_deps_custom_build<'a, 'cfg>( unit: &Unit<'a>, bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult, UnitFor)>> { + if let Some(links) = unit.pkg.manifest().links() { + if bcx.script_override(links, unit.kind).is_some() { + // Overridden build scripts don't have any dependencies. + return Ok(Vec::new()); + } + } + // When not overridden, then the dependencies to run a build script are: // // 1. Compiling the build script itself. diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index 7978d1d48..db621aa7e 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -3,7 +3,7 @@ use std::collections::{BTreeSet, HashSet}; use std::fs; use std::path::{Path, PathBuf}; use std::str; -use std::sync::{Arc, Mutex}; +use std::sync::Arc; use crate::core::compiler::job_queue::JobState; use crate::core::PackageId; @@ -13,7 +13,7 @@ use crate::util::Cfg; use crate::util::{self, internal, paths, profile}; use super::job::{Freshness, Job, Work}; -use super::{fingerprint, Context, Kind, TargetConfig, Unit}; +use super::{fingerprint, Context, Kind, Unit}; /// Contains the parsed output of a custom build script. #[derive(Clone, Debug, Hash)] @@ -39,48 +39,57 @@ pub struct BuildOutput { pub warnings: Vec, } -/// Map of packages to build info. -pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>; - -/// Build info and overrides. -pub struct BuildState { - pub outputs: Mutex, - overrides: HashMap<(String, Kind), BuildOutput>, -} +/// Map of packages to build script output. +/// +/// This initially starts out as empty. Overridden build scripts get +/// inserted during `build_map`. The rest of the entries are added +/// immediately after each build script runs. +pub type BuildScriptOutputs = HashMap<(PackageId, Kind), BuildOutput>; +/// Linking information for a `Unit`. +/// +/// See `build_map` for more details. #[derive(Default)] pub struct BuildScripts { - // Cargo will use this `to_link` vector to add `-L` flags to compiles as we - // propagate them upwards towards the final build. Note, however, that we - // need to preserve the ordering of `to_link` to be topologically sorted. - // This will ensure that build scripts which print their paths properly will - // correctly pick up the files they generated (if there are duplicates - // elsewhere). - // - // To preserve this ordering, the (id, kind) is stored in two places, once - // in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain - // this as we're building interactively below to ensure that the memory - // usage here doesn't blow up too much. - // - // For more information, see #2354. + /// Cargo will use this `to_link` vector to add `-L` flags to compiles as we + /// propagate them upwards towards the final build. Note, however, that we + /// need to preserve the ordering of `to_link` to be topologically sorted. + /// This will ensure that build scripts which print their paths properly will + /// correctly pick up the files they generated (if there are duplicates + /// elsewhere). + /// + /// To preserve this ordering, the (id, kind) is stored in two places, once + /// in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain + /// this as we're building interactively below to ensure that the memory + /// usage here doesn't blow up too much. + /// + /// For more information, see #2354. pub to_link: Vec<(PackageId, Kind)>, + /// This is only used while constructing `to_link` to avoid duplicates. seen_to_link: HashSet<(PackageId, Kind)>, + /// Host-only dependencies that have build scripts. + /// + /// This is the set of transitive dependencies that are host-only + /// (proc-macro, plugin, build-dependency) that contain a build script. + /// Any `BuildOutput::library_paths` path relative to `target` will be + /// added to LD_LIBRARY_PATH so that the compiler can find any dynamic + /// libraries a build script may have generated. pub plugins: BTreeSet, } +/// Dependency information as declared by a build script. #[derive(Debug)] pub struct BuildDeps { + /// Absolute path to the file in the target directory that stores the + /// output of the build script. pub build_script_output: PathBuf, + /// Files that trigger a rebuild if they change. pub rerun_if_changed: Vec, + /// Environment variables that trigger a rebuild if they change. pub rerun_if_env_changed: Vec, } /// Prepares a `Work` that executes the target as a custom build script. -/// -/// The `req` given is the requirement which this run of the build script will -/// prepare work for. If the requirement is specified as both the target and the -/// host platforms it is assumed that the two are equal and the build script is -/// only run once (not twice). pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult { let _p = profile::start(format!( "build script prepare: {}/{}", @@ -90,7 +99,8 @@ pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRe let key = (unit.pkg.package_id(), unit.kind); - if cx.build_script_overridden.contains(&key) { + if cx.build_script_outputs.lock().unwrap().contains_key(&key) { + // The output is already set, thus the build script is overridden. fingerprint::prepare_target(cx, unit, false) } else { build_work(cx, unit) @@ -233,7 +243,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes .collect::>() }; let pkg_name = unit.pkg.to_string(); - let build_state = Arc::clone(&cx.build_state); + let build_script_outputs = Arc::clone(&cx.build_script_outputs); let id = unit.pkg.package_id(); let output_file = script_run_dir.join("output"); let err_file = script_run_dir.join("stderr"); @@ -242,11 +252,11 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes let all = ( id, pkg_name.clone(), - Arc::clone(&build_state), + Arc::clone(&build_script_outputs), output_file.clone(), script_out_dir.clone(), ); - let build_scripts = super::load_build_deps(cx, unit); + let build_scripts = cx.build_scripts.get(unit).cloned(); let kind = unit.kind; let json_messages = bcx.build_config.emit_json(); let extra_verbose = bcx.config.extra_verbose(); @@ -279,17 +289,17 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes // dynamic library search path in case the build script depended on any // native dynamic libraries. if !build_plan { - let build_state = build_state.outputs.lock().unwrap(); + let build_script_outputs = build_script_outputs.lock().unwrap(); for (name, id) in lib_deps { let key = (id, kind); - let state = build_state.get(&key).ok_or_else(|| { + let script_output = build_script_outputs.get(&key).ok_or_else(|| { internal(format!( "failed to locate build state for env \ vars: {}/{:?}", id, kind )) })?; - let data = &state.metadata; + let data = &script_output.metadata; for &(ref key, ref value) in data.iter() { cmd.env( &format!("DEP_{}_{}", super::envify(&name), super::envify(key)), @@ -298,7 +308,12 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes } } if let Some(build_scripts) = build_scripts { - super::add_plugin_deps(&mut cmd, &build_state, &build_scripts, &host_target_root)?; + super::add_plugin_deps( + &mut cmd, + &build_script_outputs, + &build_scripts, + &host_target_root, + )?; } } @@ -346,7 +361,10 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes if json_messages { emit_build_output(state, &parsed_output, id); } - build_state.insert(id, kind, parsed_output); + build_script_outputs + .lock() + .unwrap() + .insert((id, kind), parsed_output); Ok(()) }); @@ -354,7 +372,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes // itself to run when we actually end up just discarding what we calculated // above. let fresh = Work::new(move |state| { - let (id, pkg_name, build_state, output_file, script_out_dir) = all; + let (id, pkg_name, build_script_outputs, output_file, script_out_dir) = all; let output = match prev_output { Some(output) => output, None => BuildOutput::parse_file( @@ -369,7 +387,10 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes emit_build_output(state, &output, id); } - build_state.insert(id, kind, output); + build_script_outputs + .lock() + .unwrap() + .insert((id, kind), output); Ok(()) }); @@ -386,25 +407,6 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes Ok(job) } -impl BuildState { - pub fn new(host_config: &TargetConfig, target_config: &TargetConfig) -> BuildState { - let mut overrides = HashMap::new(); - let i1 = host_config.overrides.iter().map(|p| (p, Kind::Host)); - let i2 = target_config.overrides.iter().map(|p| (p, Kind::Target)); - for ((name, output), kind) in i1.chain(i2) { - overrides.insert((name.clone(), kind), output.clone()); - } - BuildState { - outputs: Mutex::new(HashMap::new()), - overrides, - } - } - - fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) { - self.outputs.lock().unwrap().insert((id, kind), output); - } -} - impl BuildOutput { pub fn parse_file( path: &Path, @@ -471,6 +473,7 @@ impl BuildOutput { script_out_dir.to_str().unwrap(), ); + // Keep in sync with TargetConfig::new. match key { "rustc-flags" => { let (paths, links) = BuildOutput::parse_rustc_flags(&value, &whence)?; @@ -597,14 +600,21 @@ impl BuildDeps { } } -/// Computes the `build_scripts` map in the `Context` which tracks what build -/// scripts each package depends on. +/// Computes several maps in `Context`: +/// - `build_scripts`: A map that tracks which build scripts each package +/// depends on. +/// - `build_explicit_deps`: Dependency statements emitted by build scripts +/// from a previous run. +/// - `build_script_outputs`: Pre-populates this with any overridden build +/// scripts. /// -/// The global `build_scripts` map lists for all (package, kind) tuples what set -/// of packages' build script outputs must be considered. For example this lists -/// all dependencies' `-L` flags which need to be propagated transitively. +/// The important one here is `build_scripts`, which for each `(package, +/// kind)` stores a `BuildScripts` object which contains a list of +/// dependencies with build scripts that the unit should consider when +/// linking. For example this lists all dependencies' `-L` flags which need to +/// be propagated transitively. /// -/// The given set of targets to this function is the initial set of +/// The given set of units to this function is the initial set of /// targets/profiles which are being built. pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CargoResult<()> { let mut ret = HashMap::new(); @@ -628,20 +638,15 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca return Ok(&out[unit]); } - let key = unit - .pkg - .manifest() - .links() - .map(|l| (l.to_string(), unit.kind)); - let build_state = &cx.build_state; - if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) { - let key = (unit.pkg.package_id(), unit.kind); - cx.build_script_overridden.insert(key); - build_state - .outputs - .lock() - .unwrap() - .insert(key, output.clone()); + // If there is a build script override, pre-fill the build output. + if let Some(links) = unit.pkg.manifest().links() { + if let Some(output) = cx.bcx.script_override(links, unit.kind) { + let key = (unit.pkg.package_id(), unit.kind); + cx.build_script_outputs + .lock() + .unwrap() + .insert(key, output.clone()); + } } let mut ret = BuildScripts::default(); @@ -650,6 +655,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca add_to_link(&mut ret, unit.pkg.package_id(), unit.kind); } + // Load any dependency declarations from a previous run. if unit.mode.is_run_custom_build() { parse_previous_explicit_deps(cx, unit)?; } @@ -658,16 +664,16 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca // to rustc invocation caching schemes, so be sure to generate the same // set of build script dependency orderings via sorting the targets that // come out of the `Context`. - let mut targets = cx.dep_targets(unit); - targets.sort_by_key(|u| u.pkg.package_id()); + let mut dependencies = cx.dep_targets(unit); + dependencies.sort_by_key(|u| u.pkg.package_id()); - for unit in targets.iter() { - let dep_scripts = build(out, cx, unit)?; + for dep_unit in dependencies.iter() { + let dep_scripts = build(out, cx, dep_unit)?; - if unit.target.for_host() { + if dep_unit.target.for_host() { ret.plugins .extend(dep_scripts.to_link.iter().map(|p| &p.0).cloned()); - } else if unit.target.linkable() { + } else if dep_unit.target.linkable() { for &(pkg, kind) in dep_scripts.to_link.iter() { add_to_link(&mut ret, pkg, kind); } diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs index 3ce3c04a0..e7cccc818 100644 --- a/src/cargo/core/compiler/fingerprint.rs +++ b/src/cargo/core/compiler/fingerprint.rs @@ -286,12 +286,12 @@ pub fn prepare_target<'a, 'cfg>( // build script's fingerprint after it's executed. We do this by // using the `build_script_local_fingerprints` function which returns a // thunk we can invoke on a foreign thread to calculate this. - let state = Arc::clone(&cx.build_state); + let build_script_outputs = Arc::clone(&cx.build_script_outputs); let key = (unit.pkg.package_id(), unit.kind); let (gen_local, _overridden) = build_script_local_fingerprints(cx, unit); let output_path = cx.build_explicit_deps[unit].build_script_output.clone(); Work::new(move |_| { - let outputs = state.outputs.lock().unwrap(); + let outputs = build_script_outputs.lock().unwrap(); let outputs = &outputs[&key]; let deps = BuildDeps::new(&output_path, Some(outputs)); @@ -1264,8 +1264,11 @@ fn build_script_override_fingerprint<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>, ) -> Option { - let state = cx.build_state.outputs.lock().unwrap(); - let output = state.get(&(unit.pkg.package_id(), unit.kind))?; + // Build script output is only populated at this stage when it is + // overridden. + let build_script_outputs = cx.build_script_outputs.lock().unwrap(); + // Returns None if it is not overridden. + let output = build_script_outputs.get(&(unit.pkg.package_id(), unit.kind))?; let s = format!( "overridden build state with hash: {}", util::hash_u64(output) diff --git a/src/cargo/core/compiler/job_queue.rs b/src/cargo/core/compiler/job_queue.rs index 1b999be7f..cea9e4e23 100644 --- a/src/cargo/core/compiler/job_queue.rs +++ b/src/cargo/core/compiler/job_queue.rs @@ -534,9 +534,9 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> { unit: &Unit<'a>, cx: &mut Context<'_, '_>, ) -> CargoResult<()> { - let output = cx.build_state.outputs.lock().unwrap(); + let outputs = cx.build_script_outputs.lock().unwrap(); let bcx = &mut cx.bcx; - if let Some(output) = output.get(&(unit.pkg.package_id(), unit.kind)) { + if let Some(output) = outputs.get(&(unit.pkg.package_id(), unit.kind)) { if !output.warnings.is_empty() { if let Some(msg) = msg { writeln!(bcx.config.shell().err(), "{}\n", msg)?; diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index efe332a93..cf885e775 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -29,7 +29,7 @@ pub use self::build_context::{BuildContext, FileFlavor, TargetConfig, TargetInfo use self::build_plan::BuildPlan; pub use self::compilation::{Compilation, Doctest}; pub use self::context::Context; -pub use self::custom_build::{BuildMap, BuildOutput, BuildScripts}; +pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts}; pub use self::job::Freshness; use self::job::{Job, Work}; use self::job_queue::{JobQueue, JobState}; @@ -192,9 +192,9 @@ fn rustc<'a, 'cfg>( let kind = unit.kind; // Prepare the native lib state (extra `-L` and `-l` flags). - let build_state = cx.build_state.clone(); + let build_script_outputs = Arc::clone(&cx.build_script_outputs); let current_id = unit.pkg.package_id(); - let build_deps = load_build_deps(cx, unit); + let build_scripts = cx.build_scripts.get(unit).cloned(); // If we are a binary and the package also contains a library, then we // don't pass the `-l` flags. @@ -242,20 +242,20 @@ fn rustc<'a, 'cfg>( // located somewhere in there. // Finally, if custom environment variables have been produced by // previous build scripts, we include them in the rustc invocation. - if let Some(build_deps) = build_deps { - let build_state = build_state.outputs.lock().unwrap(); + if let Some(build_scripts) = build_scripts { + let script_outputs = build_script_outputs.lock().unwrap(); if !build_plan { add_native_deps( &mut rustc, - &build_state, - &build_deps, + &script_outputs, + &build_scripts, pass_l_flag, pass_cdylib_link_args, current_id, )?; - add_plugin_deps(&mut rustc, &build_state, &build_deps, &root_output)?; + add_plugin_deps(&mut rustc, &script_outputs, &build_scripts, &root_output)?; } - add_custom_env(&mut rustc, &build_state, current_id, kind)?; + add_custom_env(&mut rustc, &script_outputs, current_id, kind)?; } for output in outputs.iter() { @@ -341,16 +341,16 @@ fn rustc<'a, 'cfg>( // present in `state`) to the command provided. fn add_native_deps( rustc: &mut ProcessBuilder, - build_state: &BuildMap, + build_script_outputs: &BuildScriptOutputs, build_scripts: &BuildScripts, pass_l_flag: bool, pass_cdylib_link_args: bool, current_id: PackageId, ) -> CargoResult<()> { for key in build_scripts.to_link.iter() { - let output = build_state.get(key).ok_or_else(|| { + let output = build_script_outputs.get(key).ok_or_else(|| { internal(format!( - "couldn't find build state for {}/{:?}", + "couldn't find build script output for {}/{:?}", key.0, key.1 )) })?; @@ -381,12 +381,12 @@ fn rustc<'a, 'cfg>( // been put there by one of the `build_scripts`) to the command provided. fn add_custom_env( rustc: &mut ProcessBuilder, - build_state: &BuildMap, + build_script_outputs: &BuildScriptOutputs, current_id: PackageId, kind: Kind, ) -> CargoResult<()> { let key = (current_id, kind); - if let Some(output) = build_state.get(&key) { + if let Some(output) = build_script_outputs.get(&key) { for &(ref name, ref value) in output.env.iter() { rustc.env(name, value); } @@ -522,16 +522,12 @@ fn hardlink_or_copy(src: &Path, dst: &Path) -> CargoResult<()> { Ok(()) } -fn load_build_deps(cx: &Context<'_, '_>, unit: &Unit<'_>) -> Option> { - cx.build_scripts.get(unit).cloned() -} - -// For all plugin dependencies, add their -L paths (now calculated and -// present in `state`) to the dynamic library load path for the command to -// execute. +// For all plugin dependencies, add their -L paths (now calculated and present +// in `build_script_outputs`) to the dynamic library load path for the command +// to execute. fn add_plugin_deps( rustc: &mut ProcessBuilder, - build_state: &BuildMap, + build_script_outputs: &BuildScriptOutputs, build_scripts: &BuildScripts, root_output: &PathBuf, ) -> CargoResult<()> { @@ -539,7 +535,7 @@ fn add_plugin_deps( let search_path = rustc.get_env(var).unwrap_or_default(); let mut search_path = env::split_paths(&search_path).collect::>(); for &id in build_scripts.plugins.iter() { - let output = build_state + let output = build_script_outputs .get(&(id, Kind::Host)) .ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", id)))?; search_path.append(&mut filter_dynamic_search_path( @@ -645,14 +641,14 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult rustdoc.args(bcx.rustdocflags_args(unit)); let name = unit.pkg.name().to_string(); - let build_state = cx.build_state.clone(); + let build_script_outputs = Arc::clone(&cx.build_script_outputs); let key = (unit.pkg.package_id(), unit.kind); let package_id = unit.pkg.package_id(); let target = unit.target.clone(); let mut output_options = OutputOptions::new(cx, unit); Ok(Work::new(move |state| { - if let Some(output) = build_state.outputs.lock().unwrap().get(&key) { + if let Some(output) = build_script_outputs.lock().unwrap().get(&key) { for cfg in output.cfgs.iter() { rustdoc.arg("--cfg").arg(cfg); } diff --git a/src/cargo/core/compiler/output_depinfo.rs b/src/cargo/core/compiler/output_depinfo.rs index 86cb7b218..a7acc8c4e 100644 --- a/src/cargo/core/compiler/output_depinfo.rs +++ b/src/cargo/core/compiler/output_depinfo.rs @@ -59,7 +59,7 @@ fn add_deps_for_unit<'a, 'b>( // Add rerun-if-changed dependencies let key = (unit.pkg.package_id(), unit.kind); - if let Some(output) = context.build_state.outputs.lock().unwrap().get(&key) { + if let Some(output) = context.build_script_outputs.lock().unwrap().get(&key) { for path in &output.rerun_if_changed { deps.insert(path.into()); } diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index 2c57c9e4a..acda0e570 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -761,6 +761,7 @@ impl Target { pub fn documented(&self) -> bool { self.doc } + // A plugin, proc-macro, or build-script. pub fn for_host(&self) -> bool { self.for_host } From 45699e9f21fc9da4fe34b5d27c08fb3c3a6fb839 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Mon, 5 Aug 2019 09:42:28 -0700 Subject: [PATCH 24/29] Add support for customizing JSON diagnostics from Cargo Cargo has of #7143 enabled pipelined compilation by default which affects how the compiler is invoked, especially with respect to JSON messages. This, in some testing, has proven to cause quite a few issues with rustbuild's current integration with Cargo. This commit is aimed at adding features to Cargo to solve this issue. This commit adds the ability to customize the stream of JSON messages coming from Cargo. The new feature for Cargo is that it now also mirrors rustc in how you can configure the JSON stream. Multiple `--message-format` arguments are now supported and the value specified is a comma-separated list of directives. In addition to the existing `human`, `short`, and `json` directives these new directives have been added: * `json-render-diagnostics` - instructs Cargo to render rustc diagnostics and only print out JSON messages for artifacts and Cargo things. * `json-diagnostic-short` - indicates that the `rendered` field of rustc diagnostics should use the "short" rendering. * `json-diagnostic-rendered-ansi` - indicates that the `rendered` field of rustc diagnostics should embed ansi color codes. The first option here, `json-render-diagnostics`, will be used by rustbuild unconditionally. Additionally `json-diagnostic-short` will be conditionally used based on the input to rustbuild itself. This should be enough for external tools to customize how Cargo is invoked and how all kinds of JSON diagnostics get printed, and it's thought that we can relatively easily tweak this as necessary to extend it and such. --- CHANGELOG.md | 12 +- src/cargo/core/compiler/build_config.rs | 17 ++- src/cargo/core/compiler/mod.rs | 155 +++++++++++++--------- src/cargo/util/command_prelude.rs | 89 +++++++++---- src/doc/man/generated/cargo-bench.html | 20 ++- src/doc/man/generated/cargo-build.html | 20 ++- src/doc/man/generated/cargo-check.html | 20 ++- src/doc/man/generated/cargo-doc.html | 20 ++- src/doc/man/generated/cargo-fix.html | 20 ++- src/doc/man/generated/cargo-metadata.html | 2 +- src/doc/man/generated/cargo-run.html | 20 ++- src/doc/man/generated/cargo-rustc.html | 20 ++- src/doc/man/generated/cargo-rustdoc.html | 20 ++- src/doc/man/generated/cargo-test.html | 20 ++- src/doc/man/options-message-format.adoc | 14 +- src/etc/man/cargo-bench.1 | 50 ++++++- src/etc/man/cargo-build.1 | 50 ++++++- src/etc/man/cargo-check.1 | 50 ++++++- src/etc/man/cargo-clean.1 | 6 +- src/etc/man/cargo-doc.1 | 50 ++++++- src/etc/man/cargo-fetch.1 | 6 +- src/etc/man/cargo-fix.1 | 50 ++++++- src/etc/man/cargo-generate-lockfile.1 | 6 +- src/etc/man/cargo-help.1 | 6 +- src/etc/man/cargo-init.1 | 6 +- src/etc/man/cargo-install.1 | 6 +- src/etc/man/cargo-locate-project.1 | 6 +- src/etc/man/cargo-login.1 | 6 +- src/etc/man/cargo-metadata.1 | 6 +- src/etc/man/cargo-new.1 | 6 +- src/etc/man/cargo-owner.1 | 6 +- src/etc/man/cargo-package.1 | 6 +- src/etc/man/cargo-pkgid.1 | 6 +- src/etc/man/cargo-publish.1 | 6 +- src/etc/man/cargo-run.1 | 50 ++++++- src/etc/man/cargo-rustc.1 | 50 ++++++- src/etc/man/cargo-rustdoc.1 | 50 ++++++- src/etc/man/cargo-search.1 | 6 +- src/etc/man/cargo-test.1 | 50 ++++++- src/etc/man/cargo-uninstall.1 | 6 +- src/etc/man/cargo-update.1 | 6 +- src/etc/man/cargo-vendor.1 | 21 ++- src/etc/man/cargo-verify-project.1 | 6 +- src/etc/man/cargo-version.1 | 6 +- src/etc/man/cargo-yank.1 | 6 +- src/etc/man/cargo.1 | 8 +- tests/testsuite/build.rs | 5 +- tests/testsuite/fix.rs | 2 +- tests/testsuite/main.rs | 1 + tests/testsuite/message_format.rs | 126 ++++++++++++++++++ 50 files changed, 974 insertions(+), 228 deletions(-) create mode 100644 tests/testsuite/message_format.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index c3c1a7f08..ab11e1a2b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,10 +5,20 @@ ### Added +- Cargo build pipelining has been enabled by default to leverage more idle CPU + parallelism during builds. + [#7143](https://github.com/rust-lang/cargo/pull/7143) +- The `--message-format` option to Cargo can now be specified multiple times and + accepts a comma-separated list of values. In addition to the previous values + it also now accepts `json-diagnostic-short` and + `json-diagnostic-rendered-ansi` which configures the output coming from rustc + in `json` message mode. + [#7214](https://github.com/rust-lang/cargo/pull/7214) + ### Changed ### Fixed -- (Nightly only): Fixed exponential blowup when using CARGO_BUILD_PIPELINING. +- (Nightly only): Fixed exponential blowup when using `CARGO_BUILD_PIPELINING`. [#7062](https://github.com/rust-lang/cargo/pull/7062) - Fixed using the wrong directory when updating git repositories when using the `git-fetch-with-cli` config option, and the `GIT_DIR` environment diff --git a/src/cargo/core/compiler/build_config.rs b/src/cargo/core/compiler/build_config.rs index 7f795c442..3c3350cdb 100644 --- a/src/cargo/core/compiler/build_config.rs +++ b/src/cargo/core/compiler/build_config.rs @@ -112,7 +112,10 @@ impl BuildConfig { /// Whether or not the *user* wants JSON output. Whether or not rustc /// actually uses JSON is decided in `add_error_format`. pub fn emit_json(&self) -> bool { - self.message_format == MessageFormat::Json + match self.message_format { + MessageFormat::Json { .. } => true, + _ => false, + } } pub fn test(&self) -> bool { @@ -123,7 +126,17 @@ impl BuildConfig { #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum MessageFormat { Human, - Json, + Json { + /// Whether rustc diagnostics are rendered by cargo or included into the + /// output stream. + render_diagnostics: bool, + /// Whether the `rendered` field of rustc diagnostics are using the + /// "short" rendering. + short: bool, + /// Whether the `rendered` field of rustc diagnostics embed ansi color + /// codes. + ansi: bool, + }, Short, } diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index efe332a93..5cc6f2163 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -754,27 +754,47 @@ fn add_error_format_and_color( if pipelined { json.push_str(",artifacts"); } - if cx.bcx.build_config.message_format == MessageFormat::Short { - json.push_str(",diagnostic-short"); + match cx.bcx.build_config.message_format { + MessageFormat::Short | MessageFormat::Json { short: true, .. } => { + json.push_str(",diagnostic-short"); + } + _ => {} } cmd.arg(json); } else { + let mut color = true; match cx.bcx.build_config.message_format { MessageFormat::Human => (), - MessageFormat::Json => { + MessageFormat::Json { + ansi, + short, + render_diagnostics, + } => { cmd.arg("--error-format").arg("json"); + // If ansi is explicitly requested, enable it. If we're + // rendering diagnostics ourselves then also enable it because + // we'll figure out what to do with the colors later. + if ansi || render_diagnostics { + cmd.arg("--json=diagnostic-rendered-ansi"); + } + if short { + cmd.arg("--json=diagnostic-short"); + } + color = false; } MessageFormat::Short => { cmd.arg("--error-format").arg("short"); } } - let color = if cx.bcx.config.shell().supports_color() { - "always" - } else { - "never" - }; - cmd.args(&["--color", color]); + if color { + let color = if cx.bcx.config.shell().supports_color() { + "always" + } else { + "never" + }; + cmd.args(&["--color", color]); + } } Ok(()) } @@ -1094,9 +1114,8 @@ impl Kind { } struct OutputOptions { - /// Get the `"rendered"` field from the JSON output and display it on - /// stderr instead of the JSON message. - extract_rendered_messages: bool, + /// What format we're emitting from Cargo itself. + format: MessageFormat, /// Look for JSON message that indicates .rmeta file is available for /// pipelined compilation. look_for_metadata_directive: bool, @@ -1110,7 +1129,6 @@ struct OutputOptions { impl OutputOptions { fn new<'a>(cx: &Context<'a, '_>, unit: &Unit<'a>) -> OutputOptions { - let extract_rendered_messages = cx.bcx.build_config.message_format != MessageFormat::Json; let look_for_metadata_directive = cx.rmeta_required(unit); let color = cx.bcx.config.shell().supports_color(); let cache_cell = if cx.bcx.build_config.cache_messages() { @@ -1122,7 +1140,7 @@ impl OutputOptions { None }; OutputOptions { - extract_rendered_messages, + format: cx.bcx.build_config.message_format, look_for_metadata_directive, color, cache_cell, @@ -1179,55 +1197,66 @@ fn on_stderr_line( } }; - // In some modes of compilation Cargo switches the compiler to JSON mode - // but the user didn't request that so we still want to print pretty rustc - // colorized diagnostics. In those cases (`extract_rendered_messages`) we - // take a look at the JSON blob we go, see if it's a relevant diagnostics, - // and if so forward just that diagnostic for us to print. - if options.extract_rendered_messages { - #[derive(serde::Deserialize)] - struct CompilerMessage { - rendered: String, - } - if let Ok(mut error) = serde_json::from_str::(compiler_message.get()) { - // state.stderr will add a newline - if error.rendered.ends_with('\n') { - error.rendered.pop(); + // Depending on what we're emitting from Cargo itself, we figure out what to + // do with this JSON message. + match options.format { + // In the "human" output formats (human/short) or if diagnostic messages + // from rustc aren't being included in the output of Cargo's JSON + // messages then we extract the diagnostic (if present) here and handle + // it ourselves. + MessageFormat::Human + | MessageFormat::Short + | MessageFormat::Json { + render_diagnostics: true, + .. + } => { + #[derive(serde::Deserialize)] + struct CompilerMessage { + rendered: String, } - let rendered = if options.color { - error.rendered - } else { - // Strip only fails if the the Writer fails, which is Cursor - // on a Vec, which should never fail. - strip_ansi_escapes::strip(&error.rendered) + if let Ok(mut error) = serde_json::from_str::(compiler_message.get()) { + // state.stderr will add a newline + if error.rendered.ends_with('\n') { + error.rendered.pop(); + } + let rendered = if options.color { + error.rendered + } else { + // Strip only fails if the the Writer fails, which is Cursor + // on a Vec, which should never fail. + strip_ansi_escapes::strip(&error.rendered) + .map(|v| String::from_utf8(v).expect("utf8")) + .expect("strip should never fail") + }; + state.stderr(rendered); + return Ok(()); + } + } + + // Remove color information from the rendered string. When pipelining is + // enabled and/or when cached messages are enabled we're always asking + // for ANSI colors from rustc, so unconditionally postprocess here and + // remove ansi color codes. + MessageFormat::Json { ansi: false, .. } => { + #[derive(serde::Deserialize, serde::Serialize)] + struct CompilerMessage { + rendered: String, + #[serde(flatten)] + other: std::collections::BTreeMap, + } + if let Ok(mut error) = serde_json::from_str::(compiler_message.get()) { + error.rendered = strip_ansi_escapes::strip(&error.rendered) .map(|v| String::from_utf8(v).expect("utf8")) - .expect("strip should never fail") - }; - state.stderr(rendered); - return Ok(()); - } - } else { - // Remove color information from the rendered string. rustc has not - // included color in the past, so to avoid breaking anything, strip it - // out when --json=diagnostic-rendered-ansi is used. This runs - // unconditionally under the assumption that Cargo will eventually - // move to this as the default mode. Perhaps in the future, cargo - // could allow the user to enable/disable color (such as with a - // `--json` or `--color` or `--message-format` flag). - #[derive(serde::Deserialize, serde::Serialize)] - struct CompilerMessage { - rendered: String, - #[serde(flatten)] - other: std::collections::BTreeMap, - } - if let Ok(mut error) = serde_json::from_str::(compiler_message.get()) { - error.rendered = strip_ansi_escapes::strip(&error.rendered) - .map(|v| String::from_utf8(v).expect("utf8")) - .unwrap_or(error.rendered); - let new_line = serde_json::to_string(&error)?; - let new_msg: Box = serde_json::from_str(&new_line)?; - compiler_message = new_msg; + .unwrap_or(error.rendered); + let new_line = serde_json::to_string(&error)?; + let new_msg: Box = serde_json::from_str(&new_line)?; + compiler_message = new_msg; + } } + + // If ansi colors are desired then we should be good to go! We can just + // pass through this message as-is. + MessageFormat::Json { ansi: true, .. } => {} } // In some modes of execution we will execute rustc with `-Z @@ -1278,12 +1307,8 @@ fn replay_output_cache( color: bool, ) -> Work { let target = target.clone(); - let extract_rendered_messages = match format { - MessageFormat::Human | MessageFormat::Short => true, - MessageFormat::Json => false, - }; let mut options = OutputOptions { - extract_rendered_messages, + format, look_for_metadata_directive: false, color, cache_cell: None, diff --git a/src/cargo/util/command_prelude.rs b/src/cargo/util/command_prelude.rs index bb534be72..e8963cec1 100644 --- a/src/cargo/util/command_prelude.rs +++ b/src/cargo/util/command_prelude.rs @@ -1,7 +1,3 @@ -use std::ffi::{OsStr, OsString}; -use std::fs; -use std::path::PathBuf; - use crate::core::compiler::{BuildConfig, MessageFormat}; use crate::core::Workspace; use crate::ops::{CompileFilter, CompileOptions, NewOptions, Packages, VersionControl}; @@ -14,6 +10,10 @@ use crate::util::{ }; use crate::CargoResult; use clap::{self, SubCommand}; +use failure::bail; +use std::ffi::{OsStr, OsString}; +use std::fs; +use std::path::PathBuf; pub use crate::core::compiler::CompileMode; pub use crate::{CliError, CliResult, Config}; @@ -134,13 +134,7 @@ pub trait AppExt: Sized { } fn arg_message_format(self) -> Self { - self._arg( - opt("message-format", "Error format") - .value_name("FMT") - .case_insensitive(true) - .possible_values(&["human", "json", "short"]) - .default_value("human"), - ) + self._arg(multi_opt("message-format", "FMT", "Error format")) } fn arg_build_plan(self) -> Self { @@ -301,23 +295,70 @@ pub trait ArgMatchesExt { self._values_of("package"), )?; - let message_format = match self._value_of("message-format") { - None => MessageFormat::Human, - Some(f) => { - if f.eq_ignore_ascii_case("json") { - MessageFormat::Json - } else if f.eq_ignore_ascii_case("human") { - MessageFormat::Human - } else if f.eq_ignore_ascii_case("short") { - MessageFormat::Short - } else { - panic!("Impossible message format: {:?}", f) + let mut message_format = None; + let default_json = MessageFormat::Json { + short: false, + ansi: false, + render_diagnostics: false, + }; + for fmt in self._values_of("message-format") { + for fmt in fmt.split(',') { + let fmt = fmt.to_ascii_lowercase(); + match fmt.as_str() { + "json" => { + if message_format.is_some() { + bail!("cannot specify two kinds of `message-format` arguments"); + } + message_format = Some(default_json); + } + "human" => { + if message_format.is_some() { + bail!("cannot specify two kinds of `message-format` arguments"); + } + message_format = Some(MessageFormat::Human); + } + "short" => { + if message_format.is_some() { + bail!("cannot specify two kinds of `message-format` arguments"); + } + message_format = Some(MessageFormat::Short); + } + "json-render-diagnostics" => { + if message_format.is_none() { + message_format = Some(default_json); + } + match &mut message_format { + Some(MessageFormat::Json { + render_diagnostics, .. + }) => *render_diagnostics = true, + _ => bail!("cannot specify two kinds of `message-format` arguments"), + } + } + "json-diagnostic-short" => { + if message_format.is_none() { + message_format = Some(default_json); + } + match &mut message_format { + Some(MessageFormat::Json { short, .. }) => *short = true, + _ => bail!("cannot specify two kinds of `message-format` arguments"), + } + } + "json-diagnostic-rendered-ansi" => { + if message_format.is_none() { + message_format = Some(default_json); + } + match &mut message_format { + Some(MessageFormat::Json { ansi, .. }) => *ansi = true, + _ => bail!("cannot specify two kinds of `message-format` arguments"), + } + } + s => bail!("invalid message format specifier: `{}`", s), } } - }; + } let mut build_config = BuildConfig::new(config, self.jobs()?, &self.target(), mode)?; - build_config.message_format = message_format; + build_config.message_format = message_format.unwrap_or(MessageFormat::Human); build_config.release = self._is_present("release"); build_config.build_plan = self._is_present("build-plan"); if build_config.build_plan { diff --git a/src/doc/man/generated/cargo-bench.html b/src/doc/man/generated/cargo-bench.html index dc00b1fe1..a2505612a 100644 --- a/src/doc/man/generated/cargo-bench.html +++ b/src/doc/man/generated/cargo-bench.html @@ -301,17 +301,33 @@ terminal.

--message-format FMT
-

The output format for diagnostic messages. Valid values:

+

The output format for diagnostic messages. Can be specified multiple times +and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format.

  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
  • json: Emit JSON messages to stdout.

  • -

    short: Emit shorter, human-readable text messages.

    +

    json-diagnostic-short: Ensure the rendered field of JSON messages contains +the "short" rendering from rustc.

    +
  • +
  • +

    json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages +contains embedded ANSI color codes for respecting rustc’s default color +scheme.

    +
  • +
  • +

    json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others +coming from rustc are still emitted.

diff --git a/src/doc/man/generated/cargo-build.html b/src/doc/man/generated/cargo-build.html index 593d454ca..da9bbf7e5 100644 --- a/src/doc/man/generated/cargo-build.html +++ b/src/doc/man/generated/cargo-build.html @@ -235,17 +235,33 @@ terminal.

--message-format FMT
-

The output format for diagnostic messages. Valid values:

+

The output format for diagnostic messages. Can be specified multiple times +and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format.

  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
  • json: Emit JSON messages to stdout.

  • -

    short: Emit shorter, human-readable text messages.

    +

    json-diagnostic-short: Ensure the rendered field of JSON messages contains +the "short" rendering from rustc.

    +
  • +
  • +

    json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages +contains embedded ANSI color codes for respecting rustc’s default color +scheme.

    +
  • +
  • +

    json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others +coming from rustc are still emitted.

diff --git a/src/doc/man/generated/cargo-check.html b/src/doc/man/generated/cargo-check.html index 01e434270..13107b515 100644 --- a/src/doc/man/generated/cargo-check.html +++ b/src/doc/man/generated/cargo-check.html @@ -237,17 +237,33 @@ terminal.

--message-format FMT
-

The output format for diagnostic messages. Valid values:

+

The output format for diagnostic messages. Can be specified multiple times +and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format.

  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
  • json: Emit JSON messages to stdout.

  • -

    short: Emit shorter, human-readable text messages.

    +

    json-diagnostic-short: Ensure the rendered field of JSON messages contains +the "short" rendering from rustc.

    +
  • +
  • +

    json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages +contains embedded ANSI color codes for respecting rustc’s default color +scheme.

    +
  • +
  • +

    json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others +coming from rustc are still emitted.

diff --git a/src/doc/man/generated/cargo-doc.html b/src/doc/man/generated/cargo-doc.html index 1b07f0f94..0550e5022 100644 --- a/src/doc/man/generated/cargo-doc.html +++ b/src/doc/man/generated/cargo-doc.html @@ -205,17 +205,33 @@ terminal.

--message-format FMT
-

The output format for diagnostic messages. Valid values:

+

The output format for diagnostic messages. Can be specified multiple times +and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format.

  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
  • json: Emit JSON messages to stdout.

  • -

    short: Emit shorter, human-readable text messages.

    +

    json-diagnostic-short: Ensure the rendered field of JSON messages contains +the "short" rendering from rustc.

    +
  • +
  • +

    json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages +contains embedded ANSI color codes for respecting rustc’s default color +scheme.

    +
  • +
  • +

    json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others +coming from rustc are still emitted.

diff --git a/src/doc/man/generated/cargo-fix.html b/src/doc/man/generated/cargo-fix.html index 3f870dad4..bec13f66a 100644 --- a/src/doc/man/generated/cargo-fix.html +++ b/src/doc/man/generated/cargo-fix.html @@ -308,17 +308,33 @@ terminal.

--message-format FMT
-

The output format for diagnostic messages. Valid values:

+

The output format for diagnostic messages. Can be specified multiple times +and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format.

  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
  • json: Emit JSON messages to stdout.

  • -

    short: Emit shorter, human-readable text messages.

    +

    json-diagnostic-short: Ensure the rendered field of JSON messages contains +the "short" rendering from rustc.

    +
  • +
  • +

    json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages +contains embedded ANSI color codes for respecting rustc’s default color +scheme.

    +
  • +
  • +

    json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others +coming from rustc are still emitted.

diff --git a/src/doc/man/generated/cargo-metadata.html b/src/doc/man/generated/cargo-metadata.html index 9a0cfc7d3..2598013e7 100644 --- a/src/doc/man/generated/cargo-metadata.html +++ b/src/doc/man/generated/cargo-metadata.html @@ -35,7 +35,7 @@ for a Rust API for reading the metadata.

-
{
+
{
     /* Array of all packages in the workspace.
        It also includes all feature-enabled dependencies unless --no-deps is used.
     */
diff --git a/src/doc/man/generated/cargo-run.html b/src/doc/man/generated/cargo-run.html
index abadc739f..608568ce6 100644
--- a/src/doc/man/generated/cargo-run.html
+++ b/src/doc/man/generated/cargo-run.html
@@ -168,17 +168,33 @@ terminal.

--message-format FMT
-

The output format for diagnostic messages. Valid values:

+

The output format for diagnostic messages. Can be specified multiple times +and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format.

  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
  • json: Emit JSON messages to stdout.

  • -

    short: Emit shorter, human-readable text messages.

    +

    json-diagnostic-short: Ensure the rendered field of JSON messages contains +the "short" rendering from rustc.

    +
  • +
  • +

    json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages +contains embedded ANSI color codes for respecting rustc’s default color +scheme.

    +
  • +
  • +

    json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others +coming from rustc are still emitted.

diff --git a/src/doc/man/generated/cargo-rustc.html b/src/doc/man/generated/cargo-rustc.html index 37ff9606c..9133521b0 100644 --- a/src/doc/man/generated/cargo-rustc.html +++ b/src/doc/man/generated/cargo-rustc.html @@ -229,17 +229,33 @@ terminal.

--message-format FMT
-

The output format for diagnostic messages. Valid values:

+

The output format for diagnostic messages. Can be specified multiple times +and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format.

  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
  • json: Emit JSON messages to stdout.

  • -

    short: Emit shorter, human-readable text messages.

    +

    json-diagnostic-short: Ensure the rendered field of JSON messages contains +the "short" rendering from rustc.

    +
  • +
  • +

    json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages +contains embedded ANSI color codes for respecting rustc’s default color +scheme.

    +
  • +
  • +

    json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others +coming from rustc are still emitted.

diff --git a/src/doc/man/generated/cargo-rustdoc.html b/src/doc/man/generated/cargo-rustdoc.html index c0bc147e5..88c4a5612 100644 --- a/src/doc/man/generated/cargo-rustdoc.html +++ b/src/doc/man/generated/cargo-rustdoc.html @@ -242,17 +242,33 @@ terminal.

--message-format FMT
-

The output format for diagnostic messages. Valid values:

+

The output format for diagnostic messages. Can be specified multiple times +and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format.

  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
  • json: Emit JSON messages to stdout.

  • -

    short: Emit shorter, human-readable text messages.

    +

    json-diagnostic-short: Ensure the rendered field of JSON messages contains +the "short" rendering from rustc.

    +
  • +
  • +

    json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages +contains embedded ANSI color codes for respecting rustc’s default color +scheme.

    +
  • +
  • +

    json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others +coming from rustc are still emitted.

diff --git a/src/doc/man/generated/cargo-test.html b/src/doc/man/generated/cargo-test.html index 0419c076a..86d410df2 100644 --- a/src/doc/man/generated/cargo-test.html +++ b/src/doc/man/generated/cargo-test.html @@ -326,17 +326,33 @@ terminal.

--message-format FMT
-

The output format for diagnostic messages. Valid values:

+

The output format for diagnostic messages. Can be specified multiple times +and consists of comma-separated values. Valid values:

  • human (default): Display in a human-readable text format.

  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
  • json: Emit JSON messages to stdout.

  • -

    short: Emit shorter, human-readable text messages.

    +

    json-diagnostic-short: Ensure the rendered field of JSON messages contains +the "short" rendering from rustc.

    +
  • +
  • +

    json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages +contains embedded ANSI color codes for respecting rustc’s default color +scheme.

    +
  • +
  • +

    json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo’s own JSON diagnostics and others +coming from rustc are still emitted.

diff --git a/src/doc/man/options-message-format.adoc b/src/doc/man/options-message-format.adoc index 6da9c26bd..fa5922a5d 100644 --- a/src/doc/man/options-message-format.adoc +++ b/src/doc/man/options-message-format.adoc @@ -1,6 +1,16 @@ *--message-format* _FMT_:: - The output format for diagnostic messages. Valid values: + The output format for diagnostic messages. Can be specified multiple times + and consists of comma-separated values. Valid values: + - `human` (default): Display in a human-readable text format. -- `json`: Emit JSON messages to stdout. - `short`: Emit shorter, human-readable text messages. +- `json`: Emit JSON messages to stdout. +- `json-diagnostic-short`: Ensure the `rendered` field of JSON messages contains + the "short" rendering from rustc. +- `json-diagnostic-rendered-ansi`: Ensure the `rendered` field of JSON messages + contains embedded ANSI color codes for respecting rustc's default color + scheme. +- `json-render-diagnostics`: Instruct Cargo to not include rustc diagnostics in + in JSON messages printed, but instead Cargo itself should render the + JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others + coming from rustc are still emitted. diff --git a/src/etc/man/cargo-bench.1 b/src/etc/man/cargo-bench.1 index 629273951..82b30f1cf 100644 --- a/src/etc/man/cargo-bench.1 +++ b/src/etc/man/cargo-bench.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-bench .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-05-08 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-BENCH" "1" "2019-05-08" "\ \&" "\ \&" +.TH "CARGO\-BENCH" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -347,7 +347,8 @@ May also be specified with the \fBterm.color\fP .sp \fB\-\-message\-format\fP \fIFMT\fP .RS 4 -The output format for diagnostic messages. Valid values: +The output format for diagnostic messages. Can be specified multiple times +and consists of comma\-separated values. Valid values: .sp .RS 4 .ie n \{\ @@ -368,6 +369,17 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} \fBjson\fP: Emit JSON messages to stdout. .RE .sp @@ -379,7 +391,35 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} -\fBshort\fP: Emit shorter, human\-readable text messages. +\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains +the "short" rendering from rustc. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages +contains embedded ANSI color codes for respecting rustc\(cqs default color +scheme. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others +coming from rustc are still emitted. .RE .RE .SS "Manifest Options" diff --git a/src/etc/man/cargo-build.1 b/src/etc/man/cargo-build.1 index eeb3d9805..86b91a895 100644 --- a/src/etc/man/cargo-build.1 +++ b/src/etc/man/cargo-build.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-build .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-05-08 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-BUILD" "1" "2019-05-08" "\ \&" "\ \&" +.TH "CARGO\-BUILD" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -256,7 +256,8 @@ May also be specified with the \fBterm.color\fP .sp \fB\-\-message\-format\fP \fIFMT\fP .RS 4 -The output format for diagnostic messages. Valid values: +The output format for diagnostic messages. Can be specified multiple times +and consists of comma\-separated values. Valid values: .sp .RS 4 .ie n \{\ @@ -277,6 +278,17 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} \fBjson\fP: Emit JSON messages to stdout. .RE .sp @@ -288,7 +300,35 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} -\fBshort\fP: Emit shorter, human\-readable text messages. +\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains +the "short" rendering from rustc. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages +contains embedded ANSI color codes for respecting rustc\(cqs default color +scheme. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others +coming from rustc are still emitted. .RE .RE .sp diff --git a/src/etc/man/cargo-check.1 b/src/etc/man/cargo-check.1 index 2ae71dea3..43bf72a35 100644 --- a/src/etc/man/cargo-check.1 +++ b/src/etc/man/cargo-check.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-check .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-04-16 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-CHECK" "1" "2019-04-16" "\ \&" "\ \&" +.TH "CARGO\-CHECK" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -256,7 +256,8 @@ May also be specified with the \fBterm.color\fP .sp \fB\-\-message\-format\fP \fIFMT\fP .RS 4 -The output format for diagnostic messages. Valid values: +The output format for diagnostic messages. Can be specified multiple times +and consists of comma\-separated values. Valid values: .sp .RS 4 .ie n \{\ @@ -277,6 +278,17 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} \fBjson\fP: Emit JSON messages to stdout. .RE .sp @@ -288,7 +300,35 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} -\fBshort\fP: Emit shorter, human\-readable text messages. +\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains +the "short" rendering from rustc. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages +contains embedded ANSI color codes for respecting rustc\(cqs default color +scheme. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others +coming from rustc are still emitted. .RE .RE .SS "Manifest Options" diff --git a/src/etc/man/cargo-clean.1 b/src/etc/man/cargo-clean.1 index 1afb47c2d..74412bee6 100644 --- a/src/etc/man/cargo-clean.1 +++ b/src/etc/man/cargo-clean.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-clean .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-04-16 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-CLEAN" "1" "2019-04-16" "\ \&" "\ \&" +.TH "CARGO\-CLEAN" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-doc.1 b/src/etc/man/cargo-doc.1 index 26a91320f..91f176e69 100644 --- a/src/etc/man/cargo-doc.1 +++ b/src/etc/man/cargo-doc.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-doc .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-04-16 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-DOC" "1" "2019-04-16" "\ \&" "\ \&" +.TH "CARGO\-DOC" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -213,7 +213,8 @@ May also be specified with the \fBterm.color\fP .sp \fB\-\-message\-format\fP \fIFMT\fP .RS 4 -The output format for diagnostic messages. Valid values: +The output format for diagnostic messages. Can be specified multiple times +and consists of comma\-separated values. Valid values: .sp .RS 4 .ie n \{\ @@ -234,6 +235,17 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} \fBjson\fP: Emit JSON messages to stdout. .RE .sp @@ -245,7 +257,35 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} -\fBshort\fP: Emit shorter, human\-readable text messages. +\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains +the "short" rendering from rustc. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages +contains embedded ANSI color codes for respecting rustc\(cqs default color +scheme. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others +coming from rustc are still emitted. .RE .RE .SS "Manifest Options" diff --git a/src/etc/man/cargo-fetch.1 b/src/etc/man/cargo-fetch.1 index 1fb8c1c17..6eec525ed 100644 --- a/src/etc/man/cargo-fetch.1 +++ b/src/etc/man/cargo-fetch.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-fetch .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-05-12 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-FETCH" "1" "2019-05-12" "\ \&" "\ \&" +.TH "CARGO\-FETCH" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-fix.1 b/src/etc/man/cargo-fix.1 index 25ac7e7e7..48dbe6d52 100644 --- a/src/etc/man/cargo-fix.1 +++ b/src/etc/man/cargo-fix.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-fix .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-04-16 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-FIX" "1" "2019-04-16" "\ \&" "\ \&" +.TH "CARGO\-FIX" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -326,7 +326,8 @@ May also be specified with the \fBterm.color\fP .sp \fB\-\-message\-format\fP \fIFMT\fP .RS 4 -The output format for diagnostic messages. Valid values: +The output format for diagnostic messages. Can be specified multiple times +and consists of comma\-separated values. Valid values: .sp .RS 4 .ie n \{\ @@ -347,6 +348,17 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} \fBjson\fP: Emit JSON messages to stdout. .RE .sp @@ -358,7 +370,35 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} -\fBshort\fP: Emit shorter, human\-readable text messages. +\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains +the "short" rendering from rustc. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages +contains embedded ANSI color codes for respecting rustc\(cqs default color +scheme. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others +coming from rustc are still emitted. .RE .RE .SS "Manifest Options" diff --git a/src/etc/man/cargo-generate-lockfile.1 b/src/etc/man/cargo-generate-lockfile.1 index 107b8c45e..b43dd9f66 100644 --- a/src/etc/man/cargo-generate-lockfile.1 +++ b/src/etc/man/cargo-generate-lockfile.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-generate-lockfile .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-04-16 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-GENERATE\-LOCKFILE" "1" "2019-04-16" "\ \&" "\ \&" +.TH "CARGO\-GENERATE\-LOCKFILE" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-help.1 b/src/etc/man/cargo-help.1 index 30e036d5a..17a5a9636 100644 --- a/src/etc/man/cargo-help.1 +++ b/src/etc/man/cargo-help.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-help .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2018-12-20 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-HELP" "1" "2018-12-20" "\ \&" "\ \&" +.TH "CARGO\-HELP" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-init.1 b/src/etc/man/cargo-init.1 index 1a55f5c57..300d76818 100644 --- a/src/etc/man/cargo-init.1 +++ b/src/etc/man/cargo-init.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-init .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-01-23 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-INIT" "1" "2019-01-23" "\ \&" "\ \&" +.TH "CARGO\-INIT" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-install.1 b/src/etc/man/cargo-install.1 index 73998642f..4b76a9a88 100644 --- a/src/etc/man/cargo-install.1 +++ b/src/etc/man/cargo-install.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-install .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-06-10 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-07-15 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-INSTALL" "1" "2019-06-10" "\ \&" "\ \&" +.TH "CARGO\-INSTALL" "1" "2019-07-15" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-locate-project.1 b/src/etc/man/cargo-locate-project.1 index 9d5dca9f3..731ef6d3f 100644 --- a/src/etc/man/cargo-locate-project.1 +++ b/src/etc/man/cargo-locate-project.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-locate-project .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2018-12-20 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-LOCATE\-PROJECT" "1" "2018-12-20" "\ \&" "\ \&" +.TH "CARGO\-LOCATE\-PROJECT" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-login.1 b/src/etc/man/cargo-login.1 index 2f9ec810b..0da2cd739 100644 --- a/src/etc/man/cargo-login.1 +++ b/src/etc/man/cargo-login.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-login .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-01-23 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-LOGIN" "1" "2019-01-23" "\ \&" "\ \&" +.TH "CARGO\-LOGIN" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-metadata.1 b/src/etc/man/cargo-metadata.1 index 46ab46658..6792e538b 100644 --- a/src/etc/man/cargo-metadata.1 +++ b/src/etc/man/cargo-metadata.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-metadata .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-05-20 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-METADATA" "1" "2019-05-20" "\ \&" "\ \&" +.TH "CARGO\-METADATA" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-new.1 b/src/etc/man/cargo-new.1 index 894b5ab6e..b8266140c 100644 --- a/src/etc/man/cargo-new.1 +++ b/src/etc/man/cargo-new.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-new .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-01-23 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-NEW" "1" "2019-01-23" "\ \&" "\ \&" +.TH "CARGO\-NEW" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-owner.1 b/src/etc/man/cargo-owner.1 index 8e798a3b2..d0eda6c84 100644 --- a/src/etc/man/cargo-owner.1 +++ b/src/etc/man/cargo-owner.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-owner .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-02-05 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-OWNER" "1" "2019-02-05" "\ \&" "\ \&" +.TH "CARGO\-OWNER" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-package.1 b/src/etc/man/cargo-package.1 index dfd592e24..b27d7bc83 100644 --- a/src/etc/man/cargo-package.1 +++ b/src/etc/man/cargo-package.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-package .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-06-10 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-07-15 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-PACKAGE" "1" "2019-06-10" "\ \&" "\ \&" +.TH "CARGO\-PACKAGE" "1" "2019-07-15" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-pkgid.1 b/src/etc/man/cargo-pkgid.1 index 1b80afa3c..3a54667fa 100644 --- a/src/etc/man/cargo-pkgid.1 +++ b/src/etc/man/cargo-pkgid.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-pkgid .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-04-16 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-PKGID" "1" "2019-04-16" "\ \&" "\ \&" +.TH "CARGO\-PKGID" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-publish.1 b/src/etc/man/cargo-publish.1 index c457a003c..f0f686855 100644 --- a/src/etc/man/cargo-publish.1 +++ b/src/etc/man/cargo-publish.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-publish .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-05-08 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-PUBLISH" "1" "2019-05-08" "\ \&" "\ \&" +.TH "CARGO\-PUBLISH" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-run.1 b/src/etc/man/cargo-run.1 index da42337d2..c40844fa4 100644 --- a/src/etc/man/cargo-run.1 +++ b/src/etc/man/cargo-run.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-run .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-06-21 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-07-15 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-RUN" "1" "2019-06-21" "\ \&" "\ \&" +.TH "CARGO\-RUN" "1" "2019-07-15" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -176,7 +176,8 @@ May also be specified with the \fBterm.color\fP .sp \fB\-\-message\-format\fP \fIFMT\fP .RS 4 -The output format for diagnostic messages. Valid values: +The output format for diagnostic messages. Can be specified multiple times +and consists of comma\-separated values. Valid values: .sp .RS 4 .ie n \{\ @@ -197,6 +198,17 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} \fBjson\fP: Emit JSON messages to stdout. .RE .sp @@ -208,7 +220,35 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} -\fBshort\fP: Emit shorter, human\-readable text messages. +\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains +the "short" rendering from rustc. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages +contains embedded ANSI color codes for respecting rustc\(cqs default color +scheme. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others +coming from rustc are still emitted. .RE .RE .SS "Manifest Options" diff --git a/src/etc/man/cargo-rustc.1 b/src/etc/man/cargo-rustc.1 index f4cdd997f..a97628142 100644 --- a/src/etc/man/cargo-rustc.1 +++ b/src/etc/man/cargo-rustc.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-rustc .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-04-16 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-RUSTC" "1" "2019-04-16" "\ \&" "\ \&" +.TH "CARGO\-RUSTC" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -245,7 +245,8 @@ May also be specified with the \fBterm.color\fP .sp \fB\-\-message\-format\fP \fIFMT\fP .RS 4 -The output format for diagnostic messages. Valid values: +The output format for diagnostic messages. Can be specified multiple times +and consists of comma\-separated values. Valid values: .sp .RS 4 .ie n \{\ @@ -266,6 +267,17 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} \fBjson\fP: Emit JSON messages to stdout. .RE .sp @@ -277,7 +289,35 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} -\fBshort\fP: Emit shorter, human\-readable text messages. +\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains +the "short" rendering from rustc. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages +contains embedded ANSI color codes for respecting rustc\(cqs default color +scheme. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others +coming from rustc are still emitted. .RE .RE .SS "Manifest Options" diff --git a/src/etc/man/cargo-rustdoc.1 b/src/etc/man/cargo-rustdoc.1 index 4e1cc3693..ef322c0b6 100644 --- a/src/etc/man/cargo-rustdoc.1 +++ b/src/etc/man/cargo-rustdoc.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-rustdoc .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-04-16 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-RUSTDOC" "1" "2019-04-16" "\ \&" "\ \&" +.TH "CARGO\-RUSTDOC" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -253,7 +253,8 @@ May also be specified with the \fBterm.color\fP .sp \fB\-\-message\-format\fP \fIFMT\fP .RS 4 -The output format for diagnostic messages. Valid values: +The output format for diagnostic messages. Can be specified multiple times +and consists of comma\-separated values. Valid values: .sp .RS 4 .ie n \{\ @@ -274,6 +275,17 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} \fBjson\fP: Emit JSON messages to stdout. .RE .sp @@ -285,7 +297,35 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} -\fBshort\fP: Emit shorter, human\-readable text messages. +\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains +the "short" rendering from rustc. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages +contains embedded ANSI color codes for respecting rustc\(cqs default color +scheme. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others +coming from rustc are still emitted. .RE .RE .SS "Manifest Options" diff --git a/src/etc/man/cargo-search.1 b/src/etc/man/cargo-search.1 index a789ac6c0..9672a58f3 100644 --- a/src/etc/man/cargo-search.1 +++ b/src/etc/man/cargo-search.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-search .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-01-23 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-SEARCH" "1" "2019-01-23" "\ \&" "\ \&" +.TH "CARGO\-SEARCH" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-test.1 b/src/etc/man/cargo-test.1 index c856952c4..241101ab2 100644 --- a/src/etc/man/cargo-test.1 +++ b/src/etc/man/cargo-test.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-test .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-05-08 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-TEST" "1" "2019-05-08" "\ \&" "\ \&" +.TH "CARGO\-TEST" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -389,7 +389,8 @@ May also be specified with the \fBterm.color\fP .sp \fB\-\-message\-format\fP \fIFMT\fP .RS 4 -The output format for diagnostic messages. Valid values: +The output format for diagnostic messages. Can be specified multiple times +and consists of comma\-separated values. Valid values: .sp .RS 4 .ie n \{\ @@ -410,6 +411,17 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} \fBjson\fP: Emit JSON messages to stdout. .RE .sp @@ -421,7 +433,35 @@ The output format for diagnostic messages. Valid values: . sp -1 . IP \(bu 2.3 .\} -\fBshort\fP: Emit shorter, human\-readable text messages. +\fBjson\-diagnostic\-short\fP: Ensure the \fBrendered\fP field of JSON messages contains +the "short" rendering from rustc. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-diagnostic\-rendered\-ansi\fP: Ensure the \fBrendered\fP field of JSON messages +contains embedded ANSI color codes for respecting rustc\(cqs default color +scheme. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\-render\-diagnostics\fP: Instruct Cargo to not include rustc diagnostics in +in JSON messages printed, but instead Cargo itself should render the +JSON diagnostics coming from rustc. Cargo\(cqs own JSON diagnostics and others +coming from rustc are still emitted. .RE .RE .SS "Manifest Options" diff --git a/src/etc/man/cargo-uninstall.1 b/src/etc/man/cargo-uninstall.1 index 36ea8a806..277bc824d 100644 --- a/src/etc/man/cargo-uninstall.1 +++ b/src/etc/man/cargo-uninstall.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-uninstall .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2018-12-20 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-UNINSTALL" "1" "2018-12-20" "\ \&" "\ \&" +.TH "CARGO\-UNINSTALL" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-update.1 b/src/etc/man/cargo-update.1 index 835e6c314..fc302c112 100644 --- a/src/etc/man/cargo-update.1 +++ b/src/etc/man/cargo-update.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-update .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-04-16 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-UPDATE" "1" "2019-04-16" "\ \&" "\ \&" +.TH "CARGO\-UPDATE" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-vendor.1 b/src/etc/man/cargo-vendor.1 index 3937cc949..495f13645 100644 --- a/src/etc/man/cargo-vendor.1 +++ b/src/etc/man/cargo-vendor.1 @@ -2,12 +2,12 @@ .\" Title: cargo-vendor .\" Author: [see the "AUTHOR(S)" section] .\" Generator: Asciidoctor 2.0.8 -.\" Date: 2019-04-29 +.\" Date: 2019-07-15 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-VENDOR" "1" "2019-04-29" "\ \&" "\ \&" +.TH "CARGO\-VENDOR" "1" "2019-07-15" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -149,6 +149,23 @@ These may be used in environments where you want to assert that the \fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network access. .RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE .SH "ENVIRONMENT" .sp See \c diff --git a/src/etc/man/cargo-verify-project.1 b/src/etc/man/cargo-verify-project.1 index a39501340..96e59cc8f 100644 --- a/src/etc/man/cargo-verify-project.1 +++ b/src/etc/man/cargo-verify-project.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-verify-project .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-04-16 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-VERIFY\-PROJECT" "1" "2019-04-16" "\ \&" "\ \&" +.TH "CARGO\-VERIFY\-PROJECT" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-version.1 b/src/etc/man/cargo-version.1 index 4527d1490..676392752 100644 --- a/src/etc/man/cargo-version.1 +++ b/src/etc/man/cargo-version.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-version .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2018-12-20 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-VERSION" "1" "2018-12-20" "\ \&" "\ \&" +.TH "CARGO\-VERSION" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo-yank.1 b/src/etc/man/cargo-yank.1 index c3a637c7a..591d92d22 100644 --- a/src/etc/man/cargo-yank.1 +++ b/src/etc/man/cargo-yank.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo-yank .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-01-23 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-YANK" "1" "2019-01-23" "\ \&" "\ \&" +.TH "CARGO\-YANK" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 diff --git a/src/etc/man/cargo.1 b/src/etc/man/cargo.1 index 757510051..94f590748 100644 --- a/src/etc/man/cargo.1 +++ b/src/etc/man/cargo.1 @@ -1,13 +1,13 @@ '\" t .\" Title: cargo .\" Author: [see the "AUTHOR(S)" section] -.\" Generator: Asciidoctor 1.5.8 -.\" Date: 2019-05-20 +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-06-07 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO" "1" "2019-05-20" "\ \&" "\ \&" +.TH "CARGO" "1" "2019-06-07" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -487,4 +487,4 @@ See \c for issues. .SH "SEE ALSO" .sp -\fBrustc\fP(1), \fBrustdoc\fP(1) +\fBrustc\fP(1), \fBrustdoc\fP(1) \ No newline at end of file diff --git a/tests/testsuite/build.rs b/tests/testsuite/build.rs index 7c6323425..c80e528df 100644 --- a/tests/testsuite/build.rs +++ b/tests/testsuite/build.rs @@ -3264,11 +3264,10 @@ fn wrong_message_format_option() { .build(); p.cargo("build --message-format XML") - .with_status(1) + .with_status(101) .with_stderr_contains( "\ -error: 'XML' isn't a valid value for '--message-format ' -[possible values: human, json, short] +error: invalid message format specifier: `xml` ", ) .run(); diff --git a/tests/testsuite/fix.rs b/tests/testsuite/fix.rs index 225362ac1..3f3f1228a 100644 --- a/tests/testsuite/fix.rs +++ b/tests/testsuite/fix.rs @@ -937,7 +937,7 @@ fn both_edition_migrate_flags() { error: The argument '--edition' cannot be used with '--prepare-for ' USAGE: - cargo[..] fix --edition --message-format + cargo[..] fix --edition For more information try --help "; diff --git a/tests/testsuite/main.rs b/tests/testsuite/main.rs index 618c92ceb..a3b212459 100644 --- a/tests/testsuite/main.rs +++ b/tests/testsuite/main.rs @@ -57,6 +57,7 @@ mod local_registry; mod lockfile_compat; mod login; mod member_errors; +mod message_format; mod metabuild; mod metadata; mod net_config; diff --git a/tests/testsuite/message_format.rs b/tests/testsuite/message_format.rs new file mode 100644 index 000000000..06969e7a6 --- /dev/null +++ b/tests/testsuite/message_format.rs @@ -0,0 +1,126 @@ +use crate::support::{basic_manifest, project}; + +#[cargo_test] +fn cannot_specify_two() { + if !crate::support::is_nightly() { + return; + } + + let p = project() + .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("src/main.rs", "fn main() {}") + .build(); + + let formats = ["human", "json", "short"]; + + let two_kinds = "error: cannot specify two kinds of `message-format` arguments\n"; + for a in formats.iter() { + for b in formats.iter() { + p.cargo(&format!("build --message-format {},{}", a, b)) + .with_status(101) + .with_stderr(two_kinds) + .run(); + } + } +} + +#[cargo_test] +fn double_json_works() { + if !crate::support::is_nightly() { + return; + } + + let p = project() + .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build --message-format json,json-render-diagnostics") + .run(); + p.cargo("build --message-format json,json-diagnostic-short") + .run(); + p.cargo("build --message-format json,json-diagnostic-rendered-ansi") + .run(); + p.cargo("build --message-format json --message-format json-diagnostic-rendered-ansi") + .run(); + p.cargo("build --message-format json-diagnostic-rendered-ansi") + .run(); + p.cargo("build --message-format json-diagnostic-short,json-diagnostic-rendered-ansi") + .run(); +} + +#[cargo_test] +fn cargo_renders() { + if !crate::support::is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = 'foo' + version = '0.1.0' + + [dependencies] + bar = { path = 'bar' } + "#, + ) + .file("src/main.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build --message-format json-render-diagnostics") + .with_status(101) + .with_stdout("{\"reason\":\"compiler-artifact\",[..]") + .with_stderr_contains( + "\ +[COMPILING] bar [..] +[COMPILING] foo [..] +error[..]`main`[..] +", + ) + .run(); +} + +#[cargo_test] +fn cargo_renders_short() { + if !crate::support::is_nightly() { + return; + } + + let p = project() + .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("src/main.rs", "") + .build(); + + p.cargo("build --message-format json-render-diagnostics,json-diagnostic-short") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo [..] +error[..]`main`[..] +", + ) + .with_stderr_does_not_contain("note:") + .run(); +} + +#[cargo_test] +fn cargo_renders_ansi() { + if !crate::support::is_nightly() { + return; + } + + let p = project() + .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("src/main.rs", "") + .build(); + + p.cargo("build --message-format json-diagnostic-rendered-ansi") + .with_status(101) + .with_stdout_contains("[..]\\u001b[38;5;9merror[..]") + .run(); +} From ec9222a345281853cd15baf95683c8d1776cb13f Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Tue, 6 Aug 2019 09:00:43 -0700 Subject: [PATCH 25/29] Fix remap-path-prefix from failing. --- src/cargo/core/compiler/fingerprint.rs | 5 ++- tests/testsuite/rustflags.rs | 44 ++++++++++++++++++++++++-- 2 files changed, 46 insertions(+), 3 deletions(-) diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs index e7cccc818..9940db7d5 100644 --- a/src/cargo/core/compiler/fingerprint.rs +++ b/src/cargo/core/compiler/fingerprint.rs @@ -1581,8 +1581,11 @@ pub fn translate_dep_info( for file in deps { // The path may be absolute or relative, canonical or not. Make sure // it is canonicalized so we are comparing the same kinds of paths. - let canon_file = rustc_cwd.join(file).canonicalize()?; let abs_file = rustc_cwd.join(file); + // If canonicalization fails, just use the abs path. There is currently + // a bug where --remap-path-prefix is affecting .d files, causing them + // to point to non-existent paths. + let canon_file = abs_file.canonicalize().unwrap_or_else(|_| abs_file.clone()); let (ty, path) = if let Ok(stripped) = canon_file.strip_prefix(&target_root) { (DepInfoPathType::TargetRootRelative, stripped) diff --git a/tests/testsuite/rustflags.rs b/tests/testsuite/rustflags.rs index 008bee0f4..0589b49ac 100644 --- a/tests/testsuite/rustflags.rs +++ b/tests/testsuite/rustflags.rs @@ -1,8 +1,10 @@ use std::fs::{self, File}; use std::io::Write; -use crate::support::rustc_host; -use crate::support::{basic_lib_manifest, basic_manifest, paths, project, project_in_home}; +use crate::support::registry::Package; +use crate::support::{ + basic_lib_manifest, basic_manifest, paths, project, project_in_home, rustc_host, +}; #[cargo_test] fn env_rustflags_normal_source() { @@ -1393,3 +1395,41 @@ fn remap_path_prefix_ignored() { .run(); check_metadata_same(); } + +#[cargo_test] +fn remap_path_prefix_works() { + // Check that remap-path-prefix works. + Package::new("bar", "0.1.0") + .file("src/lib.rs", "pub fn f() -> &'static str { file!() }") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = "0.1" + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + println!("{}", bar::f()); + } + "#, + ) + .build(); + + p.cargo("run") + .env( + "RUSTFLAGS", + format!("--remap-path-prefix={}=/foo", paths::root().display()), + ) + .with_stdout("/foo/home/.cargo/registry/src/[..]/bar-0.1.0/src/lib.rs") + .run(); +} From f3d4c6b8f5727992ccbcd7a8f8f264b7ac73d358 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Wed, 7 Aug 2019 07:49:28 -0700 Subject: [PATCH 26/29] Bump rustfix --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 2fce01b9e..604df2968 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -48,7 +48,7 @@ num_cpus = "1.0" opener = "0.4" percent-encoding = "2.0" remove_dir_all = "0.5.2" -rustfix = "0.4.4" +rustfix = "0.4.6" same-file = "1" semver = { version = "0.9.0", features = ["serde"] } serde = { version = "1.0.82", features = ['derive'] } From c26e52c6d6a16a75bfcbbbae054f8f057ea02bd4 Mon Sep 17 00:00:00 2001 From: Alexander Sieg Date: Wed, 7 Aug 2019 21:19:06 +0200 Subject: [PATCH 27/29] enable progress bar for FreeBSD As FreeBSD uses a unsigned long for the IOCTL syscall this code would previously fail to compile. Adding a call to into() fixes this problem. This code should still work on other platfroms as into() is able to 'convert' an u32 into a u32. Also change the cfg attributes so that the working code is used. This may also work on other not yet supported platforms, but it was not tested. --- src/cargo/core/shell.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs index 3f1d86003..860d35d10 100644 --- a/src/cargo/core/shell.rs +++ b/src/cargo/core/shell.rs @@ -366,7 +366,7 @@ impl ColorChoice { } } -#[cfg(any(target_os = "linux", target_os = "macos"))] +#[cfg(any(target_os = "linux", target_os = "macos", target_os = "freebsd"))] mod imp { use std::mem; @@ -377,7 +377,7 @@ mod imp { pub fn stderr_width() -> Option { unsafe { let mut winsize: libc::winsize = mem::zeroed(); - if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) < 0 { + if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ.into(), &mut winsize) < 0 { return None; } if winsize.ws_col > 0 { @@ -396,7 +396,10 @@ mod imp { } } -#[cfg(all(unix, not(any(target_os = "linux", target_os = "macos"))))] +#[cfg(all( + unix, + not(any(target_os = "linux", target_os = "macos", target_os = "freebsd")) +))] mod imp { pub(super) use super::default_err_erase_line as err_erase_line; From c5c7227e42d545f08a35d73696e7b5a08a163a75 Mon Sep 17 00:00:00 2001 From: Alexander Sieg Date: Wed, 7 Aug 2019 21:40:41 +0200 Subject: [PATCH 28/29] fixed unused code warning In the commit c26e52c6d6a16a75bfcbbbae054f8f057ea02bd4 one cfg attribute was forgotten. Here we fix this. --- src/cargo/core/shell.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs index 860d35d10..c0c9b1390 100644 --- a/src/cargo/core/shell.rs +++ b/src/cargo/core/shell.rs @@ -464,7 +464,13 @@ mod imp { } } -#[cfg(any(all(unix, not(any(target_os = "linux", target_os = "macos"))), windows,))] +#[cfg(any( + all( + unix, + not(any(target_os = "linux", target_os = "macos", target_os = "freebsd")) + ), + windows, +))] fn default_err_erase_line(shell: &mut Shell) { if let Some(max_width) = imp::stderr_width() { let blank = " ".repeat(max_width); From 16f8cc39c814bf46a2b69d87c3f07c34f3b4efb3 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Thu, 8 Aug 2019 12:33:51 -0700 Subject: [PATCH 29/29] Layout docs and cleanup. --- .../compiler/build_context/target_info.rs | 13 +- src/cargo/core/compiler/compilation.rs | 1 + .../compiler/context/compilation_files.rs | 11 +- src/cargo/core/compiler/layout.rs | 168 ++++++++++++------ src/cargo/core/compiler/mod.rs | 1 + src/doc/src/reference/manifest.md | 2 +- 6 files changed, 123 insertions(+), 73 deletions(-) diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index 81a2d622c..cff55cf6f 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -53,14 +53,17 @@ pub struct FileType { /// The kind of file. pub flavor: FileFlavor, /// The suffix for the file (for example, `.rlib`). + /// This is an empty string for executables on Unix-like platforms. suffix: String, /// The prefix for the file (for example, `lib`). + /// This is an empty string for things like executables. prefix: String, - // Wasm bin target will generate two files in deps such as - // "web-stuff.js" and "web_stuff.wasm". Note the different usages of - // "-" and "_". should_replace_hyphens is a flag to indicate that - // we need to convert the stem "web-stuff" to "web_stuff", so we - // won't miss "web_stuff.wasm". + /// Flag to convert hyphen to underscore. + /// + /// wasm bin targets will generate two files in deps such as + /// "web-stuff.js" and "web_stuff.wasm". Note the different usages of "-" + /// and "_". This flag indicates that the stem "web-stuff" should be + /// converted to "web_stuff". should_replace_hyphens: bool, } diff --git a/src/cargo/core/compiler/compilation.rs b/src/cargo/core/compiler/compilation.rs index 8c24c2e28..c3a92f335 100644 --- a/src/cargo/core/compiler/compilation.rs +++ b/src/cargo/core/compiler/compilation.rs @@ -22,6 +22,7 @@ pub struct Doctest { /// A structure returning the result of a compilation. pub struct Compilation<'cfg> { /// An array of all tests created during this compilation. + /// `(package, target, path_to_test_exe)` pub tests: Vec<(Package, Target, PathBuf)>, /// An array of all binaries created. diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs index caf7c5f8b..f7e0ee7cf 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/context/compilation_files.rs @@ -145,7 +145,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { /// target. pub fn out_dir(&self, unit: &Unit<'a>) -> PathBuf { if unit.mode.is_doc() { - self.layout(unit.kind).root().parent().unwrap().join("doc") + self.layout(unit.kind).doc().to_path_buf() } else if unit.mode.is_doc_test() { panic!("doc tests do not have an out dir"); } else if unit.target.is_custom_build() { @@ -169,11 +169,6 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { } } - /// Returns the root of the build output tree for the target - pub fn target_root(&self) -> &Path { - self.target.as_ref().unwrap_or(&self.host).dest() - } - /// Returns the root of the build output tree for the host pub fn host_root(&self) -> &Path { self.host.dest() @@ -261,8 +256,8 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { /// (eg a dependent lib). fn link_stem(&self, unit: &Unit<'a>) -> Option<(PathBuf, String)> { let out_dir = self.out_dir(unit); - let bin_stem = self.bin_stem(unit); - let file_stem = self.file_stem(unit); + let bin_stem = self.bin_stem(unit); // Stem without metadata. + let file_stem = self.file_stem(unit); // Stem with metadata. // We currently only lift files up from the `deps` directory. If // it was compiled into something like `example/` or `doc/` then diff --git a/src/cargo/core/compiler/layout.rs b/src/cargo/core/compiler/layout.rs index e4ae2bc51..e5361b679 100644 --- a/src/cargo/core/compiler/layout.rs +++ b/src/cargo/core/compiler/layout.rs @@ -8,71 +8,123 @@ //! # places all of its output here. //! target/ //! -//! # This is the root directory for all output of *dependencies* -//! deps/ +//! # Cache of `rustc -Vv` output for performance. +//! .rustc-info.json //! -//! # Root directory for all compiled examples -//! examples/ +//! # All final artifacts are linked into this directory from `deps`. +//! debug/ # or release/ +//! +//! # File used to lock the directory to prevent multiple cargo processes +//! # from using it at the same time. +//! .cargo-lock +//! +//! # Hidden directory that holds all of the fingerprint files for all +//! # packages +//! .fingerprint/ +//! # Each package is in a separate directory. +//! $pkgname-$META/ +//! # Set of source filenames for this package. +//! dep-lib-$pkgname-$META +//! # Timestamp when this package was last built. +//! invoked.timestamp +//! # The fingerprint hash. +//! lib-$pkgname-$META +//! # Detailed information used for logging the reason why +//! # something is being recompiled. +//! lib-$pkgname-$META.json +//! +//! # This is the root directory for all rustc artifacts except build +//! # scripts, examples, and test and bench executables. Almost every +//! # artifact should have a metadata hash added to its filename to +//! # prevent collisions. One notable exception is dynamic libraries. +//! deps/ +//! +//! # Root directory for all compiled examples. +//! examples/ +//! +//! # Directory used to store incremental data for the compiler (when +//! # incremental is enabled. +//! incremental/ //! //! # This is the location at which the output of all custom build -//! # commands are rooted +//! # commands are rooted. //! build/ //! //! # Each package gets its own directory where its build script and //! # script output are placed -//! $pkg1/ -//! $pkg2/ -//! $pkg3/ +//! $pkgname-$META/ # For the build script itself. +//! # The build script executable (name may be changed by user). +//! build-script-build-$META +//! # Hard link to build-script-build-$META. +//! build-script-build +//! # Dependency information generated by rustc. +//! build-script-build-$META.d +//! # Debug information, depending on platform and profile +//! # settings. +//! //! -//! # Each directory package has a `out` directory where output -//! # is placed. +//! # The package shows up twice with two different metadata hashes. +//! $pkgname-$META/ # For the output of the build script. +//! # Timestamp when the build script was last executed. +//! invoked.timestamp +//! # Directory where script can output files ($OUT_DIR). //! out/ +//! # Output from the build script. +//! output +//! # Path to `out`, used to help when the target directory is +//! # moved. +//! root-output +//! # Stderr output from the build script. +//! stderr //! -//! # This is the location at which the output of all old custom build -//! # commands are rooted -//! native/ +//! # Output from rustdoc +//! doc/ //! -//! # Each package gets its own directory for where its output is -//! # placed. We can't track exactly what's getting put in here, so -//! # we just assume that all relevant output is in these -//! # directories. -//! $pkg1/ -//! $pkg2/ -//! $pkg3/ +//! # Used by `cargo package` and `cargo publish` to build a `.crate` file. +//! package/ //! -//! # Directory used to store incremental data for the compiler (when -//! # incremental is enabled. -//! incremental/ -//! -//! # Hidden directory that holds all of the fingerprint files for all -//! # packages -//! .fingerprint/ +//! # Experimental feature for generated build scripts. +//! .metabuild/ //! ``` +//! +//! When cross-compiling, the layout is the same, except it appears in +//! `target/$TRIPLE`. use std::fs; use std::io; use std::path::{Path, PathBuf}; use crate::core::Workspace; -use crate::util::{CargoResult, Config, FileLock, Filesystem}; +use crate::util::{CargoResult, FileLock}; /// Contains the paths of all target output locations. /// /// See module docs for more information. pub struct Layout { + /// The root directory: `/path/to/target`. + /// If cross compiling: `/path/to/target/$TRIPLE`. root: PathBuf, + /// The final artifact destination: `$root/debug` (or `release`). + dest: PathBuf, + /// The directory with rustc artifacts: `$dest/deps` deps: PathBuf, - native: PathBuf, + /// The directory for build scripts: `$dest/build` build: PathBuf, + /// The directory for incremental files: `$dest/incremental` incremental: PathBuf, + /// The directory for fingerprints: `$dest/.fingerprint` fingerprint: PathBuf, + /// The directory for examples: `$dest/examples` examples: PathBuf, - /// The lock file for a build, will be unlocked when this struct is `drop`ped. + /// The directory for rustdoc output: `$root/doc` + doc: PathBuf, + /// The lockfile for a build (`.cargo-lock`). Will be unlocked when this + /// struct is `drop`ped. _lock: FileLock, } pub fn is_bad_artifact_name(name: &str) -> bool { - ["deps", "examples", "build", "native", "incremental"] + ["deps", "examples", "build", "incremental"] .iter() .any(|&reserved| reserved == name) } @@ -82,55 +134,50 @@ impl Layout { /// /// This function will block if the directory is already locked. /// - /// Differs from `at` in that this calculates the root path from the workspace target directory, - /// adding the target triple and the profile (debug, release, ...). + /// `dest` should be the final artifact directory name. Currently either + /// "debug" or "release". pub fn new(ws: &Workspace<'_>, triple: Option<&str>, dest: &str) -> CargoResult { - let mut path = ws.target_dir(); + let mut root = ws.target_dir(); // Flexible target specifications often point at json files, so interpret // the target triple as a Path and then just use the file stem as the // component for the directory name in that case. if let Some(triple) = triple { let triple = Path::new(triple); if triple.extension().and_then(|s| s.to_str()) == Some("json") { - path.push( + root.push( triple .file_stem() .ok_or_else(|| failure::format_err!("invalid target"))?, ); } else { - path.push(triple); + root.push(triple); } } - path.push(dest); - Layout::at(ws.config(), path) - } - - /// Calculate the paths for build output, lock the build directory, and return as a Layout. - /// - /// This function will block if the directory is already locked. - pub fn at(config: &Config, root: Filesystem) -> CargoResult { + let dest = root.join(dest); // If the root directory doesn't already exist go ahead and create it // here. Use this opportunity to exclude it from backups as well if the // system supports it since this is a freshly created folder. - if !root.as_path_unlocked().exists() { - root.create_dir()?; - exclude_from_backups(root.as_path_unlocked()); + if !dest.as_path_unlocked().exists() { + dest.create_dir()?; + exclude_from_backups(dest.as_path_unlocked()); } // For now we don't do any more finer-grained locking on the artifact // directory, so just lock the entire thing for the duration of this // compile. - let lock = root.open_rw(".cargo-lock", config, "build directory")?; + let lock = dest.open_rw(".cargo-lock", ws.config(), "build directory")?; let root = root.into_path_unlocked(); + let dest = dest.into_path_unlocked(); Ok(Layout { - deps: root.join("deps"), - native: root.join("native"), - build: root.join("build"), - incremental: root.join("incremental"), - fingerprint: root.join(".fingerprint"), - examples: root.join("examples"), + deps: dest.join("deps"), + build: dest.join("build"), + incremental: dest.join("incremental"), + fingerprint: dest.join(".fingerprint"), + examples: dest.join("examples"), + doc: root.join("doc"), root, + dest, _lock: lock, }) } @@ -138,7 +185,6 @@ impl Layout { /// Makes sure all directories stored in the Layout exist on the filesystem. pub fn prepare(&mut self) -> io::Result<()> { mkdir(&self.deps)?; - mkdir(&self.native)?; mkdir(&self.incremental)?; mkdir(&self.fingerprint)?; mkdir(&self.examples)?; @@ -154,9 +200,9 @@ impl Layout { } } - /// Fetch the root path. + /// Fetch the destination path for final artifacts (`/…/target/debug`). pub fn dest(&self) -> &Path { - &self.root + &self.dest } /// Fetch the deps path. pub fn deps(&self) -> &Path { @@ -166,7 +212,11 @@ impl Layout { pub fn examples(&self) -> &Path { &self.examples } - /// Fetch the root path. + /// Fetch the doc path. + pub fn doc(&self) -> &Path { + &self.doc + } + /// Fetch the root path (`/…/target`). pub fn root(&self) -> &Path { &self.root } @@ -178,7 +228,7 @@ impl Layout { pub fn fingerprint(&self) -> &Path { &self.fingerprint } - /// Fetch the build path. + /// Fetch the build script path. pub fn build(&self) -> &Path { &self.build } diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 58763d975..14e2f60ad 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -480,6 +480,7 @@ fn link_targets<'a, 'cfg>( })) } +/// Hardlink (file) or symlink (dir) src to dst if possible, otherwise copy it. fn hardlink_or_copy(src: &Path, dst: &Path) -> CargoResult<()> { debug!("linking {} to {}", src.display(), dst.display()); if is_same_file(src, dst).unwrap_or(false) { diff --git a/src/doc/src/reference/manifest.md b/src/doc/src/reference/manifest.md index ea60e8c55..4839be125 100644 --- a/src/doc/src/reference/manifest.md +++ b/src/doc/src/reference/manifest.md @@ -628,7 +628,7 @@ dependencies residing in the workspace directory become members. You can add additional packages to the workspace by listing them in the `members` key. Note that members of the workspaces listed explicitly will also have their path dependencies included in the workspace. Sometimes a package may have a lot of -workspace members and it can be onerous to keep up to date. The path dependency +workspace members and it can be onerous to keep up to date. The `members` list can also use [globs][globs] to match multiple paths. Finally, the `exclude` key can be used to blacklist paths from being included in a workspace. This can be useful if some path dependencies aren't desired to be in the workspace at