Auto merge of #6005 - alexcrichton:download-parallel, r=ehuss

Download crates in parallel with HTTP/2

This PR revives some of the work of https://github.com/rust-lang/cargo/pull/5161 by refactoring Cargo to make it much easier to add parallel downloads, and then it does so with the `curl` crate's new `http2` feature to compile `nghttp2` has a backend.

The primary refactoring done here is to remove the concept of "download this one package" deep within a `Source`. Instead a `Source` still has a `download` method but it's considered to be largely non-blocking. If a crate needs to be downloaded it immediately returns information as to such. The `PackageSet` abstraction is now a central location for all parallel downloads, and all users of it have been refactored to be amenable to parallel downloads, when added.

Many more details are in the commits...
This commit is contained in:
bors 2018-09-18 22:21:31 +00:00
commit 57ac39287b
42 changed files with 1264 additions and 448 deletions

View File

@ -18,10 +18,11 @@ path = "src/cargo/lib.rs"
[dependencies] [dependencies]
atty = "0.2" atty = "0.2"
bytesize = "1.0"
crates-io = { path = "src/crates-io", version = "0.20" } crates-io = { path = "src/crates-io", version = "0.20" }
crossbeam-utils = "0.5" crossbeam-utils = "0.5"
crypto-hash = "0.3.1" crypto-hash = "0.3.1"
curl = "0.4.13" curl = { version = "0.4.17", features = ['http2'] }
env_logger = "0.5.11" env_logger = "0.5.11"
failure = "0.1.2" failure = "0.1.2"
filetime = "0.2" filetime = "0.2"

View File

@ -5,7 +5,7 @@ use std::str;
use core::profiles::Profiles; use core::profiles::Profiles;
use core::{Dependency, Workspace}; use core::{Dependency, Workspace};
use core::{Package, PackageId, PackageSet, Resolve}; use core::{PackageId, PackageSet, Resolve};
use util::errors::CargoResult; use util::errors::CargoResult;
use util::{profile, Cfg, CfgExpr, Config, Rustc}; use util::{profile, Cfg, CfgExpr, Config, Rustc};
@ -107,11 +107,6 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
platform.matches(name, info.cfg()) platform.matches(name, info.cfg())
} }
/// Gets a package for the given package id.
pub fn get_package(&self, id: &PackageId) -> CargoResult<&'a Package> {
self.packages.get(id)
}
/// Get the user-specified linker for a particular host or target /// Get the user-specified linker for a particular host or target
pub fn linker(&self, kind: Kind) -> Option<&Path> { pub fn linker(&self, kind: Kind) -> Option<&Path> {
self.target_config(kind).linker.as_ref().map(|s| s.as_ref()) self.target_config(kind).linker.as_ref().map(|s| s.as_ref())
@ -198,18 +193,6 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
pub fn extra_args_for(&self, unit: &Unit<'a>) -> Option<&Vec<String>> { pub fn extra_args_for(&self, unit: &Unit<'a>) -> Option<&Vec<String>> {
self.extra_compiler_args.get(unit) self.extra_compiler_args.get(unit)
} }
/// Return the list of filenames read by cargo to generate the BuildContext
/// (all Cargo.toml, etc).
pub fn inputs(&self) -> CargoResult<Vec<PathBuf>> {
let mut inputs = Vec::new();
for id in self.packages.package_ids() {
let pkg = self.get_package(id)?;
inputs.push(pkg.manifest_path().to_path_buf());
}
inputs.sort();
Ok(inputs)
}
} }
/// Information required to build for a target /// Information required to build for a target

View File

@ -99,6 +99,7 @@ pub struct Context<'a, 'cfg: 'a> {
primary_packages: HashSet<&'a PackageId>, primary_packages: HashSet<&'a PackageId>,
unit_dependencies: HashMap<Unit<'a>, Vec<Unit<'a>>>, unit_dependencies: HashMap<Unit<'a>, Vec<Unit<'a>>>,
files: Option<CompilationFiles<'a, 'cfg>>, files: Option<CompilationFiles<'a, 'cfg>>,
package_cache: HashMap<&'a PackageId, &'a Package>,
} }
impl<'a, 'cfg> Context<'a, 'cfg> { impl<'a, 'cfg> Context<'a, 'cfg> {
@ -133,6 +134,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
primary_packages: HashSet::new(), primary_packages: HashSet::new(),
unit_dependencies: HashMap::new(), unit_dependencies: HashMap::new(),
files: None, files: None,
package_cache: HashMap::new(),
}) })
} }
@ -165,7 +167,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
queue.execute(&mut self, &mut plan)?; queue.execute(&mut self, &mut plan)?;
if build_plan { if build_plan {
plan.set_inputs(self.bcx.inputs()?); plan.set_inputs(self.inputs()?);
plan.output_plan(); plan.output_plan();
} }
@ -326,7 +328,12 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}; };
self.primary_packages.extend(units.iter().map(|u| u.pkg.package_id())); self.primary_packages.extend(units.iter().map(|u| u.pkg.package_id()));
build_unit_dependencies(units, self.bcx, &mut self.unit_dependencies)?; build_unit_dependencies(
units,
self.bcx,
&mut self.unit_dependencies,
&mut self.package_cache,
)?;
self.build_used_in_plugin_map(units)?; self.build_used_in_plugin_map(units)?;
let files = CompilationFiles::new( let files = CompilationFiles::new(
units, units,
@ -495,6 +502,25 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
pub fn is_primary_package(&self, unit: &Unit<'a>) -> bool { pub fn is_primary_package(&self, unit: &Unit<'a>) -> bool {
self.primary_packages.contains(unit.pkg.package_id()) self.primary_packages.contains(unit.pkg.package_id())
} }
/// Gets a package for the given package id.
pub fn get_package(&self, id: &PackageId) -> CargoResult<&'a Package> {
self.package_cache.get(id)
.cloned()
.ok_or_else(|| format_err!("failed to find {}", id))
}
/// Return the list of filenames read by cargo to generate the BuildContext
/// (all Cargo.toml, etc).
pub fn inputs(&self) -> CargoResult<Vec<PathBuf>> {
let mut inputs = Vec::new();
for id in self.bcx.packages.package_ids() {
let pkg = self.get_package(id)?;
inputs.push(pkg.manifest_path().to_path_buf());
}
inputs.sort();
Ok(inputs)
}
} }
#[derive(Default)] #[derive(Default)]

View File

@ -15,46 +15,75 @@
//! (for example, with and without tests), so we actually build a dependency //! (for example, with and without tests), so we actually build a dependency
//! graph of `Unit`s, which capture these properties. //! graph of `Unit`s, which capture these properties.
use std::cell::RefCell;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use CargoResult; use CargoResult;
use core::dependency::Kind as DepKind; use core::dependency::Kind as DepKind;
use core::profiles::ProfileFor; use core::profiles::ProfileFor;
use core::{Package, Target}; use core::{Package, Target, PackageId};
use core::package::Downloads;
use super::{BuildContext, CompileMode, Kind, Unit}; use super::{BuildContext, CompileMode, Kind, Unit};
struct State<'a: 'tmp, 'cfg: 'a, 'tmp> {
bcx: &'tmp BuildContext<'a, 'cfg>,
deps: &'tmp mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
pkgs: RefCell<&'tmp mut HashMap<&'a PackageId, &'a Package>>,
waiting_on_download: HashSet<&'a PackageId>,
downloads: Downloads<'a, 'cfg>,
}
pub fn build_unit_dependencies<'a, 'cfg>( pub fn build_unit_dependencies<'a, 'cfg>(
roots: &[Unit<'a>], roots: &[Unit<'a>],
bcx: &BuildContext<'a, 'cfg>, bcx: &BuildContext<'a, 'cfg>,
deps: &mut HashMap<Unit<'a>, Vec<Unit<'a>>>, deps: &mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
pkgs: &mut HashMap<&'a PackageId, &'a Package>,
) -> CargoResult<()> { ) -> CargoResult<()> {
assert!(deps.is_empty(), "can only build unit deps once"); assert!(deps.is_empty(), "can only build unit deps once");
for unit in roots.iter() { let mut state = State {
// Dependencies of tests/benches should not have `panic` set. bcx,
// We check the global test mode to see if we are running in `cargo deps,
// test` in which case we ensure all dependencies have `panic` pkgs: RefCell::new(pkgs),
// cleared, and avoid building the lib thrice (once with `panic`, once waiting_on_download: HashSet::new(),
// without, once for --test). In particular, the lib included for downloads: bcx.packages.enable_download()?,
// doctests and examples are `Build` mode here. };
let profile_for = if unit.mode.is_any_test() || bcx.build_config.test() {
ProfileFor::TestDependency
} else {
ProfileFor::Any
};
deps_of(unit, bcx, deps, profile_for)?;
}
trace!("ALL UNIT DEPENDENCIES {:#?}", deps);
connect_run_custom_build_deps(bcx, deps); loop {
for unit in roots.iter() {
state.get(unit.pkg.package_id())?;
// Dependencies of tests/benches should not have `panic` set.
// We check the global test mode to see if we are running in `cargo
// test` in which case we ensure all dependencies have `panic`
// cleared, and avoid building the lib thrice (once with `panic`, once
// without, once for --test). In particular, the lib included for
// doctests and examples are `Build` mode here.
let profile_for = if unit.mode.is_any_test() || bcx.build_config.test() {
ProfileFor::TestDependency
} else {
ProfileFor::Any
};
deps_of(unit, &mut state, profile_for)?;
}
if state.waiting_on_download.len() > 0 {
state.finish_some_downloads()?;
state.deps.clear();
} else {
break
}
}
trace!("ALL UNIT DEPENDENCIES {:#?}", state.deps);
connect_run_custom_build_deps(&mut state);
Ok(()) Ok(())
} }
fn deps_of<'a, 'cfg>( fn deps_of<'a, 'cfg, 'tmp>(
unit: &Unit<'a>, unit: &Unit<'a>,
bcx: &BuildContext<'a, 'cfg>, state: &mut State<'a, 'cfg, 'tmp>,
deps: &mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
profile_for: ProfileFor, profile_for: ProfileFor,
) -> CargoResult<()> { ) -> CargoResult<()> {
// Currently the `deps` map does not include `profile_for`. This should // Currently the `deps` map does not include `profile_for`. This should
@ -63,12 +92,12 @@ fn deps_of<'a, 'cfg>(
// `TestDependency`. `CustomBuild` should also be fine since if the // `TestDependency`. `CustomBuild` should also be fine since if the
// requested unit's settings are the same as `Any`, `CustomBuild` can't // requested unit's settings are the same as `Any`, `CustomBuild` can't
// affect anything else in the hierarchy. // affect anything else in the hierarchy.
if !deps.contains_key(unit) { if !state.deps.contains_key(unit) {
let unit_deps = compute_deps(unit, bcx, profile_for)?; let unit_deps = compute_deps(unit, state, profile_for)?;
let to_insert: Vec<_> = unit_deps.iter().map(|&(unit, _)| unit).collect(); let to_insert: Vec<_> = unit_deps.iter().map(|&(unit, _)| unit).collect();
deps.insert(*unit, to_insert); state.deps.insert(*unit, to_insert);
for (unit, profile_for) in unit_deps { for (unit, profile_for) in unit_deps {
deps_of(&unit, bcx, deps, profile_for)?; deps_of(&unit, state, profile_for)?;
} }
} }
Ok(()) Ok(())
@ -78,63 +107,82 @@ fn deps_of<'a, 'cfg>(
/// for that package. /// for that package.
/// This returns a vec of `(Unit, ProfileFor)` pairs. The `ProfileFor` /// This returns a vec of `(Unit, ProfileFor)` pairs. The `ProfileFor`
/// is the profile type that should be used for dependencies of the unit. /// is the profile type that should be used for dependencies of the unit.
fn compute_deps<'a, 'cfg>( fn compute_deps<'a, 'cfg, 'tmp>(
unit: &Unit<'a>, unit: &Unit<'a>,
bcx: &BuildContext<'a, 'cfg>, state: &mut State<'a, 'cfg, 'tmp>,
profile_for: ProfileFor, profile_for: ProfileFor,
) -> CargoResult<Vec<(Unit<'a>, ProfileFor)>> { ) -> CargoResult<Vec<(Unit<'a>, ProfileFor)>> {
if unit.mode.is_run_custom_build() { if unit.mode.is_run_custom_build() {
return compute_deps_custom_build(unit, bcx); return compute_deps_custom_build(unit, state.bcx);
} else if unit.mode.is_doc() && !unit.mode.is_any_test() { } else if unit.mode.is_doc() && !unit.mode.is_any_test() {
// Note: This does not include Doctest. // Note: This does not include Doctest.
return compute_deps_doc(unit, bcx); return compute_deps_doc(unit, state);
} }
let bcx = state.bcx;
let id = unit.pkg.package_id(); let id = unit.pkg.package_id();
let deps = bcx.resolve.deps(id); let deps = bcx.resolve.deps(id)
let mut ret = deps.filter(|&(_id, deps)| { .filter(|&(_id, deps)| {
assert!(!deps.is_empty()); assert!(!deps.is_empty());
deps.iter().any(|dep| { deps.iter().any(|dep| {
// If this target is a build command, then we only want build // If this target is a build command, then we only want build
// dependencies, otherwise we want everything *other than* build // dependencies, otherwise we want everything *other than* build
// dependencies. // dependencies.
if unit.target.is_custom_build() != dep.is_build() { if unit.target.is_custom_build() != dep.is_build() {
return false; return false;
} }
// If this dependency is *not* a transitive dependency, then it // If this dependency is *not* a transitive dependency, then it
// only applies to test/example targets // only applies to test/example targets
if !dep.is_transitive() && !unit.target.is_test() && !unit.target.is_example() if !dep.is_transitive() &&
&& !unit.mode.is_any_test() !unit.target.is_test() &&
{ !unit.target.is_example() &&
return false; !unit.mode.is_any_test()
} {
return false;
}
// If this dependency is only available for certain platforms, // If this dependency is only available for certain platforms,
// make sure we're only enabling it for that platform. // make sure we're only enabling it for that platform.
if !bcx.dep_platform_activated(dep, unit.kind) { if !bcx.dep_platform_activated(dep, unit.kind) {
return false; return false;
} }
// If the dependency is optional, then we're only activating it // If the dependency is optional, then we're only activating it
// if the corresponding feature was activated // if the corresponding feature was activated
if dep.is_optional() && !bcx.resolve.features(id).contains(&*dep.name_in_toml()) { if dep.is_optional() &&
return false; !bcx.resolve.features(id).contains(&*dep.name_in_toml())
} {
return false;
}
// If we've gotten past all that, then this dependency is // If we've gotten past all that, then this dependency is
// actually used! // actually used!
true true
}) })
}).filter_map(|(id, _)| match bcx.get_package(id) { });
Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
let mode = check_or_build_mode(unit.mode, t); let mut ret = Vec::new();
let unit = new_unit(bcx, pkg, t, profile_for, unit.kind.for_target(t), mode); for (id, _) in deps {
Ok((unit, profile_for)) let pkg = match state.get(id)? {
}), Some(pkg) => pkg,
Err(e) => Some(Err(e)), None => continue,
}) };
.collect::<CargoResult<Vec<_>>>()?; let lib = match pkg.targets().iter().find(|t| t.is_lib()) {
Some(t) => t,
None => continue,
};
let mode = check_or_build_mode(unit.mode, lib);
let unit = new_unit(
bcx,
pkg,
lib,
profile_for,
unit.kind.for_target(lib),
mode,
);
ret.push((unit, profile_for));
}
// If this target is a build script, then what we've collected so far is // If this target is a build script, then what we've collected so far is
// all we need. If this isn't a build script, then it depends on the // all we need. If this isn't a build script, then it depends on the
@ -221,10 +269,11 @@ fn compute_deps_custom_build<'a, 'cfg>(
} }
/// Returns the dependencies necessary to document a package /// Returns the dependencies necessary to document a package
fn compute_deps_doc<'a, 'cfg>( fn compute_deps_doc<'a, 'cfg, 'tmp>(
unit: &Unit<'a>, unit: &Unit<'a>,
bcx: &BuildContext<'a, 'cfg>, state: &mut State<'a, 'cfg, 'tmp>,
) -> CargoResult<Vec<(Unit<'a>, ProfileFor)>> { ) -> CargoResult<Vec<(Unit<'a>, ProfileFor)>> {
let bcx = state.bcx;
let deps = bcx.resolve let deps = bcx.resolve
.deps(unit.pkg.package_id()) .deps(unit.pkg.package_id())
.filter(|&(_id, deps)| { .filter(|&(_id, deps)| {
@ -232,15 +281,17 @@ fn compute_deps_doc<'a, 'cfg>(
DepKind::Normal => bcx.dep_platform_activated(dep, unit.kind), DepKind::Normal => bcx.dep_platform_activated(dep, unit.kind),
_ => false, _ => false,
}) })
}) });
.map(|(id, _deps)| bcx.get_package(id));
// To document a library, we depend on dependencies actually being // To document a library, we depend on dependencies actually being
// built. If we're documenting *all* libraries, then we also depend on // built. If we're documenting *all* libraries, then we also depend on
// the documentation of the library being built. // the documentation of the library being built.
let mut ret = Vec::new(); let mut ret = Vec::new();
for dep in deps { for (id, _deps) in deps {
let dep = dep?; let dep = match state.get(id)? {
Some(dep) => dep,
None => continue,
};
let lib = match dep.targets().iter().find(|t| t.is_lib()) { let lib = match dep.targets().iter().find(|t| t.is_lib()) {
Some(lib) => lib, Some(lib) => lib,
None => continue, None => continue,
@ -288,7 +339,14 @@ fn maybe_lib<'a>(
) -> Option<(Unit<'a>, ProfileFor)> { ) -> Option<(Unit<'a>, ProfileFor)> {
unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| { unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| {
let mode = check_or_build_mode(unit.mode, t); let mode = check_or_build_mode(unit.mode, t);
let unit = new_unit(bcx, unit.pkg, t, profile_for, unit.kind.for_target(t), mode); let unit = new_unit(
bcx,
unit.pkg,
t,
profile_for,
unit.kind.for_target(t),
mode,
);
(unit, profile_for) (unit, profile_for)
}) })
} }
@ -373,10 +431,7 @@ fn new_unit<'a>(
/// ///
/// Here we take the entire `deps` map and add more dependencies from execution /// Here we take the entire `deps` map and add more dependencies from execution
/// of one build script to execution of another build script. /// of one build script to execution of another build script.
fn connect_run_custom_build_deps<'a>( fn connect_run_custom_build_deps(state: &mut State) {
bcx: &BuildContext,
deps: &mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
) {
let mut new_deps = Vec::new(); let mut new_deps = Vec::new();
{ {
@ -386,7 +441,7 @@ fn connect_run_custom_build_deps<'a>(
// have the build script as the key and the library would be in the // have the build script as the key and the library would be in the
// value's set. // value's set.
let mut reverse_deps = HashMap::new(); let mut reverse_deps = HashMap::new();
for (unit, deps) in deps.iter() { for (unit, deps) in state.deps.iter() {
for dep in deps { for dep in deps {
if dep.mode == CompileMode::RunCustomBuild { if dep.mode == CompileMode::RunCustomBuild {
reverse_deps.entry(dep) reverse_deps.entry(dep)
@ -405,7 +460,7 @@ fn connect_run_custom_build_deps<'a>(
// `links`, then we depend on that package's build script! Here we use // `links`, then we depend on that package's build script! Here we use
// `dep_build_script` to manufacture an appropriate build script unit to // `dep_build_script` to manufacture an appropriate build script unit to
// depend on. // depend on.
for unit in deps.keys().filter(|k| k.mode == CompileMode::RunCustomBuild) { for unit in state.deps.keys().filter(|k| k.mode == CompileMode::RunCustomBuild) {
let reverse_deps = match reverse_deps.get(unit) { let reverse_deps = match reverse_deps.get(unit) {
Some(set) => set, Some(set) => set,
None => continue, None => continue,
@ -413,13 +468,13 @@ fn connect_run_custom_build_deps<'a>(
let to_add = reverse_deps let to_add = reverse_deps
.iter() .iter()
.flat_map(|reverse_dep| deps[reverse_dep].iter()) .flat_map(|reverse_dep| state.deps[reverse_dep].iter())
.filter(|other| { .filter(|other| {
other.pkg != unit.pkg && other.pkg != unit.pkg &&
other.target.linkable() && other.target.linkable() &&
other.pkg.manifest().links().is_some() other.pkg.manifest().links().is_some()
}) })
.filter_map(|other| dep_build_script(other, bcx).map(|p| p.0)) .filter_map(|other| dep_build_script(other, state.bcx).map(|p| p.0))
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
if !to_add.is_empty() { if !to_add.is_empty() {
@ -430,6 +485,50 @@ fn connect_run_custom_build_deps<'a>(
// And finally, add in all the missing dependencies! // And finally, add in all the missing dependencies!
for (unit, new_deps) in new_deps { for (unit, new_deps) in new_deps {
deps.get_mut(&unit).unwrap().extend(new_deps); state.deps.get_mut(&unit).unwrap().extend(new_deps);
}
}
impl<'a, 'cfg, 'tmp> State<'a, 'cfg, 'tmp> {
fn get(&mut self, id: &'a PackageId) -> CargoResult<Option<&'a Package>> {
let mut pkgs = self.pkgs.borrow_mut();
if let Some(pkg) = pkgs.get(id) {
return Ok(Some(pkg))
}
if !self.waiting_on_download.insert(id) {
return Ok(None)
}
if let Some(pkg) = self.downloads.start(id)? {
pkgs.insert(id, pkg);
self.waiting_on_download.remove(id);
return Ok(Some(pkg))
}
Ok(None)
}
/// Completes at least one downloading, maybe waiting for more to complete.
///
/// This function will block the current thread waiting for at least one
/// crate to finish downloading. The function may continue to download more
/// crates if it looks like there's a long enough queue of crates to keep
/// downloading. When only a handful of packages remain this function
/// returns, and it's hoped that by returning we'll be able to push more
/// packages to download into the queue.
fn finish_some_downloads(&mut self) -> CargoResult<()> {
assert!(self.downloads.remaining() > 0);
loop {
let pkg = self.downloads.wait()?;
self.waiting_on_download.remove(pkg.package_id());
self.pkgs.borrow_mut().insert(pkg.package_id(), pkg);
// Arbitrarily choose that 5 or more packages concurrently download
// is a good enough number to "fill the network pipe". If we have
// less than this let's recompute the whole unit dependency graph
// again and try to find some more packages to download.
if self.downloads.remaining() < 5 {
break
}
}
Ok(())
} }
} }

View File

@ -14,6 +14,7 @@ use jobserver::{Acquired, HelperThread};
use core::profiles::Profile; use core::profiles::Profile;
use core::{PackageId, Target, TargetKind}; use core::{PackageId, Target, TargetKind};
use handle_error; use handle_error;
use util;
use util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder}; use util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder};
use util::{Config, DependencyQueue, Dirty, Fresh, Freshness}; use util::{Config, DependencyQueue, Dirty, Fresh, Freshness};
use util::{Progress, ProgressStyle}; use util::{Progress, ProgressStyle};
@ -368,16 +369,7 @@ impl<'a> JobQueue<'a> {
opt_type += " + debuginfo"; opt_type += " + debuginfo";
} }
let time_elapsed = { let time_elapsed = util::elapsed(cx.bcx.config.creation_time().elapsed());
let duration = cx.bcx.config.creation_time().elapsed();
let secs = duration.as_secs();
if secs >= 60 {
format!("{}m {:02}s", secs / 60, secs % 60)
} else {
format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000)
}
};
if self.queue.is_empty() { if self.queue.is_empty() {
let message = format!( let message = format!(
@ -535,7 +527,7 @@ impl<'a> Key<'a> {
fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) -> CargoResult<Vec<Key<'a>>> { fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) -> CargoResult<Vec<Key<'a>>> {
let unit = Unit { let unit = Unit {
pkg: cx.bcx.get_package(self.pkg)?, pkg: cx.get_package(self.pkg)?,
target: self.target, target: self.target,
profile: self.profile, profile: self.profile,
kind: self.kind, kind: self.kind,

View File

@ -1,19 +1,27 @@
use std::cell::{Ref, RefCell}; use std::cell::{Ref, RefCell, Cell};
use std::collections::HashMap; use std::collections::{HashMap, HashSet};
use std::fmt; use std::fmt;
use std::hash; use std::hash;
use std::mem;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::time::{Instant, Duration};
use bytesize::ByteSize;
use curl::easy::{Easy, HttpVersion};
use curl::multi::{Multi, EasyHandle};
use lazycell::LazyCell;
use semver::Version; use semver::Version;
use serde::ser; use serde::ser;
use toml; use toml;
use lazycell::LazyCell;
use core::{Dependency, Manifest, PackageId, SourceId, Target}; use core::{Dependency, Manifest, PackageId, SourceId, Target};
use core::{FeatureMap, SourceMap, Summary}; use core::{FeatureMap, SourceMap, Summary};
use core::source::MaybePackage;
use core::interning::InternedString; use core::interning::InternedString;
use util::{internal, lev_distance, Config}; use ops;
use util::errors::{CargoResult, CargoResultExt}; use util::{self, internal, lev_distance, Config, Progress, ProgressStyle};
use util::errors::{CargoResult, CargoResultExt, HttpNot200};
use util::network::Retry;
/// Information about a package that is available somewhere in the file system. /// Information about a package that is available somewhere in the file system.
/// ///
@ -236,46 +244,491 @@ impl hash::Hash for Package {
} }
} }
#[derive(Debug)]
pub struct PackageSet<'cfg> { pub struct PackageSet<'cfg> {
packages: HashMap<PackageId, LazyCell<Package>>, packages: HashMap<PackageId, LazyCell<Package>>,
sources: RefCell<SourceMap<'cfg>>, sources: RefCell<SourceMap<'cfg>>,
config: &'cfg Config,
multi: Multi,
downloading: Cell<bool>,
multiplexing: bool,
}
pub struct Downloads<'a, 'cfg: 'a> {
set: &'a PackageSet<'cfg>,
pending: HashMap<usize, (Download, EasyHandle)>,
pending_ids: HashSet<PackageId>,
results: Vec<(usize, CargoResult<()>)>,
next: usize,
retry: Retry<'cfg>,
progress: RefCell<Option<Progress<'cfg>>>,
downloads_finished: usize,
downloaded_bytes: u64,
largest: (u64, String),
start: Instant,
success: bool,
}
struct Download {
token: usize,
id: PackageId,
data: RefCell<Vec<u8>>,
url: String,
descriptor: String,
total: Cell<u64>,
current: Cell<u64>,
start: Instant,
} }
impl<'cfg> PackageSet<'cfg> { impl<'cfg> PackageSet<'cfg> {
pub fn new(package_ids: &[PackageId], sources: SourceMap<'cfg>) -> PackageSet<'cfg> { pub fn new(
PackageSet { package_ids: &[PackageId],
sources: SourceMap<'cfg>,
config: &'cfg Config,
) -> CargoResult<PackageSet<'cfg>> {
// We've enabled the `http2` feature of `curl` in Cargo, so treat
// failures here as fatal as it would indicate a build-time problem.
//
// Note that the multiplexing support is pretty new so we're having it
// off-by-default temporarily.
//
// Also note that pipelining is disabled as curl authors have indicated
// that it's buggy, and we've empirically seen that it's buggy with HTTP
// proxies.
let mut multi = Multi::new();
let multiplexing = config.get::<Option<bool>>("http.multiplexing")?
.unwrap_or(false);
multi.pipelining(false, multiplexing)
.chain_err(|| "failed to enable multiplexing/pipelining in curl")?;
// let's not flood crates.io with connections
multi.set_max_host_connections(2)?;
Ok(PackageSet {
packages: package_ids packages: package_ids
.iter() .iter()
.map(|id| (id.clone(), LazyCell::new())) .map(|id| (id.clone(), LazyCell::new()))
.collect(), .collect(),
sources: RefCell::new(sources), sources: RefCell::new(sources),
} config,
multi,
downloading: Cell::new(false),
multiplexing,
})
} }
pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item = &'a PackageId> + 'a> { pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item = &'a PackageId> + 'a> {
Box::new(self.packages.keys()) Box::new(self.packages.keys())
} }
pub fn get(&self, id: &PackageId) -> CargoResult<&Package> { pub fn enable_download<'a>(&'a self) -> CargoResult<Downloads<'a, 'cfg>> {
let slot = self.packages assert!(!self.downloading.replace(true));
.get(id) Ok(Downloads {
.ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?; start: Instant::now(),
if let Some(pkg) = slot.borrow() { set: self,
return Ok(pkg); next: 0,
pending: HashMap::new(),
pending_ids: HashSet::new(),
results: Vec::new(),
retry: Retry::new(self.config)?,
progress: RefCell::new(Some(Progress::with_style(
"Downloading",
ProgressStyle::Ratio,
self.config,
))),
downloads_finished: 0,
downloaded_bytes: 0,
largest: (0, String::new()),
success: false,
})
}
pub fn get_one(&self, id: &PackageId) -> CargoResult<&Package> {
Ok(self.get_many(Some(id))?.remove(0))
}
pub fn get_many<'a>(&self, ids: impl IntoIterator<Item = &'a PackageId>)
-> CargoResult<Vec<&Package>>
{
let mut pkgs = Vec::new();
let mut downloads = self.enable_download()?;
for id in ids {
pkgs.extend(downloads.start(id)?);
} }
let mut sources = self.sources.borrow_mut(); while downloads.remaining() > 0 {
let source = sources pkgs.push(downloads.wait()?);
.get_mut(id.source_id()) }
.ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?; downloads.success = true;
let pkg = source Ok(pkgs)
.download(id)
.chain_err(|| format_err!("unable to get packages from source"))?;
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
} }
pub fn sources(&self) -> Ref<SourceMap<'cfg>> { pub fn sources(&self) -> Ref<SourceMap<'cfg>> {
self.sources.borrow() self.sources.borrow()
} }
} }
impl<'a, 'cfg> Downloads<'a, 'cfg> {
/// Starts to download the package for the `id` specified.
///
/// Returns `None` if the package is queued up for download and will
/// eventually be returned from `wait_for_download`. Returns `Some(pkg)` if
/// the package is ready and doesn't need to be downloaded.
pub fn start(&mut self, id: &PackageId) -> CargoResult<Option<&'a Package>> {
// First up see if we've already cached this package, in which case
// there's nothing to do.
let slot = self.set.packages
.get(id)
.ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?;
if let Some(pkg) = slot.borrow() {
return Ok(Some(pkg));
}
// Ask the original source fo this `PackageId` for the corresponding
// package. That may immediately come back and tell us that the package
// is ready, or it could tell us that it needs to be downloaded.
let mut sources = self.set.sources.borrow_mut();
let source = sources
.get_mut(id.source_id())
.ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
let pkg = source
.download(id)
.chain_err(|| format_err!("unable to get packages from source"))?;
let (url, descriptor) = match pkg {
MaybePackage::Ready(pkg) => {
debug!("{} doesn't need a download", id);
assert!(slot.fill(pkg).is_ok());
return Ok(Some(slot.borrow().unwrap()))
}
MaybePackage::Download { url, descriptor } => (url, descriptor),
};
// Ok we're going to download this crate, so let's set up all our
// internal state and hand off an `Easy` handle to our libcurl `Multi`
// handle. This won't actually start the transfer, but later it'll
// hapen during `wait_for_download`
let token = self.next;
self.next += 1;
debug!("downloading {} as {}", id, token);
assert!(self.pending_ids.insert(id.clone()));
let mut handle = ops::http_handle(self.set.config)?;
handle.get(true)?;
handle.url(&url)?;
handle.follow_location(true)?; // follow redirects
// Enable HTTP/2 to be used as it'll allow true multiplexing which makes
// downloads much faster. Currently Cargo requests the `http2` feature
// of the `curl` crate which means it should always be built in, so
// treat it as a fatal error of http/2 support isn't found.
if self.set.multiplexing {
handle.http_version(HttpVersion::V2)
.chain_err(|| "failed to enable HTTP2, is curl not built right?")?;
}
// This is an option to `libcurl` which indicates that if there's a
// bunch of parallel requests to the same host they all wait until the
// pipelining status of the host is known. This means that we won't
// initiate dozens of connections to crates.io, but rather only one.
// Once the main one is opened we realized that pipelining is possible
// and multiplexing is possible with static.crates.io. All in all this
// reduces the number of connections done to a more manageable state.
handle.pipewait(true)?;
handle.write_function(move |buf| {
debug!("{} - {} bytes of data", token, buf.len());
tls::with(|downloads| {
if let Some(downloads) = downloads {
downloads.pending[&token].0.data
.borrow_mut()
.extend_from_slice(buf);
}
});
Ok(buf.len())
})?;
handle.progress(true)?;
handle.progress_function(move |dl_total, dl_cur, _, _| {
tls::with(|downloads| {
let downloads = match downloads {
Some(d) => d,
None => return false,
};
let dl = &downloads.pending[&token].0;
dl.total.set(dl_total as u64);
dl.current.set(dl_cur as u64);
downloads.tick(WhyTick::DownloadUpdate).is_ok()
})
})?;
// If the progress bar isn't enabled then it may be awhile before the
// first crate finishes downloading so we inform immediately that we're
// downloading crates here.
if self.downloads_finished == 0 &&
self.pending.len() == 0 &&
!self.progress.borrow().as_ref().unwrap().is_enabled()
{
self.set.config.shell().status("Downloading", "crates ...")?;
}
let dl = Download {
token,
data: RefCell::new(Vec::new()),
id: id.clone(),
url,
descriptor,
total: Cell::new(0),
current: Cell::new(0),
start: Instant::now(),
};
self.enqueue(dl, handle)?;
self.tick(WhyTick::DownloadStarted)?;
Ok(None)
}
/// Returns the number of crates that are still downloading
pub fn remaining(&self) -> usize {
self.pending.len()
}
/// Blocks the current thread waiting for a package to finish downloading.
///
/// This method will wait for a previously enqueued package to finish
/// downloading and return a reference to it after it's done downloading.
///
/// # Panics
///
/// This function will panic if there are no remaining downloads.
pub fn wait(&mut self) -> CargoResult<&'a Package> {
let (dl, data) = loop {
assert_eq!(self.pending.len(), self.pending_ids.len());
let (token, result) = self.wait_for_curl()?;
debug!("{} finished with {:?}", token, result);
let (mut dl, handle) = self.pending.remove(&token)
.expect("got a token for a non-in-progress transfer");
let data = mem::replace(&mut *dl.data.borrow_mut(), Vec::new());
let mut handle = self.set.multi.remove(handle)?;
self.pending_ids.remove(&dl.id);
// Check if this was a spurious error. If it was a spurious error
// then we want to re-enqueue our request for another attempt and
// then we wait for another request to finish.
let ret = {
self.retry.try(|| {
result?;
let code = handle.response_code()?;
if code != 200 && code != 0 {
let url = handle.effective_url()?.unwrap_or(&dl.url);
return Err(HttpNot200 {
code,
url: url.to_string(),
}.into())
}
Ok(())
}).chain_err(|| {
format!("failed to download from `{}`", dl.url)
})?
};
match ret {
Some(()) => break (dl, data),
None => {
self.pending_ids.insert(dl.id.clone());
self.enqueue(dl, handle)?
}
}
};
// If the progress bar isn't enabled then we still want to provide some
// semblance of progress of how we're downloading crates.
if !self.progress.borrow().as_ref().unwrap().is_enabled() {
self.set.config.shell().status("Downloaded", &dl.descriptor)?;
}
self.downloads_finished += 1;
self.downloaded_bytes += dl.total.get();
if dl.total.get() > self.largest.0 {
self.largest = (dl.total.get(), dl.id.name().to_string());
}
// We're about to synchronously extract the crate below. While we're
// doing that our download progress won't actually be updated, nor do we
// have a great view into the progress of the extraction. Let's prepare
// the user for this CPU-heavy step if it looks like it'll take some
// time to do so.
if dl.total.get() < ByteSize::kb(400).0 {
self.tick(WhyTick::DownloadFinished)?;
} else {
self.tick(WhyTick::Extracting(&dl.id.name()))?;
}
// Inform the original source that the download is finished which
// should allow us to actually get the package and fill it in now.
let mut sources = self.set.sources.borrow_mut();
let source = sources
.get_mut(dl.id.source_id())
.ok_or_else(|| internal(format!("couldn't find source for `{}`", dl.id)))?;
let pkg = source.finish_download(&dl.id, data)?;
let slot = &self.set.packages[&dl.id];
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
}
fn enqueue(&mut self, dl: Download, handle: Easy) -> CargoResult<()> {
let mut handle = self.set.multi.add(handle)?;
handle.set_token(dl.token)?;
self.pending.insert(dl.token, (dl, handle));
Ok(())
}
fn wait_for_curl(&mut self) -> CargoResult<(usize, CargoResult<()>)> {
// This is the main workhorse loop. We use libcurl's portable `wait`
// method to actually perform blocking. This isn't necessarily too
// efficient in terms of fd management, but we should only be juggling
// a few anyway.
//
// Here we start off by asking the `multi` handle to do some work via
// the `perform` method. This will actually do I/O work (nonblocking)
// and attempt to make progress. Afterwards we ask about the `messages`
// contained in the handle which will inform us if anything has finished
// transferring.
//
// If we've got a finished transfer after all that work we break out
// and process the finished transfer at the end. Otherwise we need to
// actually block waiting for I/O to happen, which we achieve with the
// `wait` method on `multi`.
loop {
let n = tls::set(self, || {
self.set.multi.perform()
.chain_err(|| "failed to perform http requests")
})?;
debug!("handles remaining: {}", n);
let results = &mut self.results;
let pending = &self.pending;
self.set.multi.messages(|msg| {
let token = msg.token().expect("failed to read token");
let handle = &pending[&token].1;
if let Some(result) = msg.result_for(&handle) {
results.push((token, result.map_err(|e| e.into())));
} else {
debug!("message without a result (?)");
}
});
if let Some(pair) = results.pop() {
break Ok(pair)
}
assert!(self.pending.len() > 0);
self.set.multi.wait(&mut [], Duration::new(60, 0))
.chain_err(|| "failed to wait on curl `Multi`")?;
}
}
fn tick(&self, why: WhyTick) -> CargoResult<()> {
let mut progress = self.progress.borrow_mut();
let progress = progress.as_mut().unwrap();
if let WhyTick::DownloadUpdate = why {
if !progress.update_allowed() {
return Ok(())
}
}
let mut msg = format!("{} crates", self.pending.len());
match why {
WhyTick::Extracting(krate) => {
msg.push_str(&format!(", extracting {} ...", krate));
}
_ => {
let mut dur = Duration::new(0, 0);
let mut remaining = 0;
for (dl, _) in self.pending.values() {
dur += dl.start.elapsed();
// If the total/current look weird just throw out the data
// point, sounds like curl has more to learn before we have
// the true information.
if dl.total.get() >= dl.current.get() {
remaining += dl.total.get() - dl.current.get();
}
}
if remaining > 0 && dur > Duration::from_millis(500) {
msg.push_str(&format!(", remaining bytes: {}", ByteSize(remaining)));
}
}
}
progress.print_now(&msg)
}
}
enum WhyTick<'a> {
DownloadStarted,
DownloadUpdate,
DownloadFinished,
Extracting(&'a str),
}
impl<'a, 'cfg> Drop for Downloads<'a, 'cfg> {
fn drop(&mut self) {
self.set.downloading.set(false);
let progress = self.progress.get_mut().take().unwrap();
// Don't print a download summary if we're not using a progress bar,
// we've already printed lots of `Downloading...` items.
if !progress.is_enabled() {
return
}
// If we didn't download anything, no need for a summary
if self.downloads_finished == 0 {
return
}
// If an error happened, let's not clutter up the output
if !self.success {
return
}
let mut status = format!("{} crates ({}) in {}",
self.downloads_finished,
ByteSize(self.downloaded_bytes),
util::elapsed(self.start.elapsed()));
if self.largest.0 > ByteSize::mb(1).0 {
status.push_str(&format!(
" (largest was `{}` at {})",
self.largest.1,
ByteSize(self.largest.0),
));
}
drop(self.set.config.shell().status("Downloaded", status));
}
}
mod tls {
use std::cell::Cell;
use super::Downloads;
thread_local!(static PTR: Cell<usize> = Cell::new(0));
pub(crate) fn with<R>(f: impl FnOnce(Option<&Downloads>) -> R) -> R {
let ptr = PTR.with(|p| p.get());
if ptr == 0 {
f(None)
} else {
unsafe {
f(Some(&*(ptr as *const Downloads)))
}
}
}
pub(crate) fn set<R>(dl: &Downloads, f: impl FnOnce() -> R) -> R {
struct Reset<'a, T: Copy + 'a>(&'a Cell<T>, T);
impl<'a, T: Copy> Drop for Reset<'a, T> {
fn drop(&mut self) {
self.0.set(self.1);
}
}
PTR.with(|p| {
let _reset = Reset(p, p.get());
p.set(dl as *const Downloads as usize);
f()
})
}
}

View File

@ -37,6 +37,7 @@ pub trait Registry {
/// a `Source`. Each `Source` in the map has been updated (using network /// a `Source`. Each `Source` in the map has been updated (using network
/// operations if necessary) and is ready to be queried for packages. /// operations if necessary) and is ready to be queried for packages.
pub struct PackageRegistry<'cfg> { pub struct PackageRegistry<'cfg> {
config: &'cfg Config,
sources: SourceMap<'cfg>, sources: SourceMap<'cfg>,
// A list of sources which are considered "overrides" which take precedent // A list of sources which are considered "overrides" which take precedent
@ -81,6 +82,7 @@ impl<'cfg> PackageRegistry<'cfg> {
pub fn new(config: &'cfg Config) -> CargoResult<PackageRegistry<'cfg>> { pub fn new(config: &'cfg Config) -> CargoResult<PackageRegistry<'cfg>> {
let source_config = SourceConfigMap::new(config)?; let source_config = SourceConfigMap::new(config)?;
Ok(PackageRegistry { Ok(PackageRegistry {
config,
sources: SourceMap::new(), sources: SourceMap::new(),
source_ids: HashMap::new(), source_ids: HashMap::new(),
overrides: Vec::new(), overrides: Vec::new(),
@ -92,9 +94,9 @@ impl<'cfg> PackageRegistry<'cfg> {
}) })
} }
pub fn get(self, package_ids: &[PackageId]) -> PackageSet<'cfg> { pub fn get(self, package_ids: &[PackageId]) -> CargoResult<PackageSet<'cfg>> {
trace!("getting packages; sources={}", self.sources.len()); trace!("getting packages; sources={}", self.sources.len());
PackageSet::new(package_ids, self.sources) PackageSet::new(package_ids, self.sources, self.config)
} }
fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> { fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> {

View File

@ -49,7 +49,10 @@ pub trait Source {
/// The download method fetches the full package for each name and /// The download method fetches the full package for each name and
/// version specified. /// version specified.
fn download(&mut self, package: &PackageId) -> CargoResult<Package>; fn download(&mut self, package: &PackageId) -> CargoResult<MaybePackage>;
fn finish_download(&mut self, package: &PackageId, contents: Vec<u8>)
-> CargoResult<Package>;
/// Generates a unique string which represents the fingerprint of the /// Generates a unique string which represents the fingerprint of the
/// current state of the source. /// current state of the source.
@ -74,6 +77,14 @@ pub trait Source {
} }
} }
pub enum MaybePackage {
Ready(Package),
Download {
url: String,
descriptor: String,
}
}
impl<'a, T: Source + ?Sized + 'a> Source for Box<T> { impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
/// Forwards to `Source::supports_checksums` /// Forwards to `Source::supports_checksums`
fn supports_checksums(&self) -> bool { fn supports_checksums(&self) -> bool {
@ -111,10 +122,14 @@ impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
} }
/// Forwards to `Source::download` /// Forwards to `Source::download`
fn download(&mut self, id: &PackageId) -> CargoResult<Package> { fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
(**self).download(id) (**self).download(id)
} }
fn finish_download(&mut self, id: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
(**self).finish_download(id, data)
}
/// Forwards to `Source::fingerprint` /// Forwards to `Source::fingerprint`
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> { fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
(**self).fingerprint(pkg) (**self).fingerprint(pkg)
@ -126,6 +141,52 @@ impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
} }
} }
impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T {
fn supports_checksums(&self) -> bool {
(**self).supports_checksums()
}
fn requires_precise(&self) -> bool {
(**self).requires_precise()
}
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
(**self).query(dep, f)
}
fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
(**self).fuzzy_query(dep, f)
}
fn source_id(&self) -> &SourceId {
(**self).source_id()
}
fn replaced_source_id(&self) -> &SourceId {
(**self).replaced_source_id()
}
fn update(&mut self) -> CargoResult<()> {
(**self).update()
}
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
(**self).download(id)
}
fn finish_download(&mut self, id: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
(**self).finish_download(id, data)
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
(**self).fingerprint(pkg)
}
fn verify(&self, pkg: &PackageId) -> CargoResult<()> {
(**self).verify(pkg)
}
}
/// A `HashMap` of `SourceId` -> `Box<Source>` /// A `HashMap` of `SourceId` -> `Box<Source>`
#[derive(Default)] #[derive(Default)]
pub struct SourceMap<'src> { pub struct SourceMap<'src> {

View File

@ -15,6 +15,7 @@
#![cfg_attr(feature = "cargo-clippy", allow(wrong_self_convention))] // perhaps Rc should be special cased in Clippy? #![cfg_attr(feature = "cargo-clippy", allow(wrong_self_convention))] // perhaps Rc should be special cased in Clippy?
extern crate atty; extern crate atty;
extern crate bytesize;
extern crate clap; extern crate clap;
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
extern crate core_foundation; extern crate core_foundation;

View File

@ -52,7 +52,7 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
for spec in opts.spec.iter() { for spec in opts.spec.iter() {
// Translate the spec to a Package // Translate the spec to a Package
let pkgid = resolve.query(spec)?; let pkgid = resolve.query(spec)?;
let pkg = packages.get(pkgid)?; let pkg = packages.get_one(pkgid)?;
// Generate all relevant `Unit` targets for this package // Generate all relevant `Unit` targets for this package
for target in pkg.targets() { for target in pkg.targets() {

View File

@ -243,15 +243,19 @@ pub fn compile_ws<'a>(
let resolve = ops::resolve_ws_with_method(ws, source, method, &specs)?; let resolve = ops::resolve_ws_with_method(ws, source, method, &specs)?;
let (packages, resolve_with_overrides) = resolve; let (packages, resolve_with_overrides) = resolve;
let to_builds = specs let to_build_ids = specs.iter()
.iter() .map(|s| s.query(resolve_with_overrides.iter()))
.map(|p| {
let pkgid = p.query(resolve_with_overrides.iter())?;
let p = packages.get(pkgid)?;
p.manifest().print_teapot(ws.config());
Ok(p)
})
.collect::<CargoResult<Vec<_>>>()?; .collect::<CargoResult<Vec<_>>>()?;
let mut to_builds = packages.get_many(to_build_ids)?;
// The ordering here affects some error messages coming out of cargo, so
// let's be test and CLI friendly by always printing in the same order if
// there's an error.
to_builds.sort_by_key(|p| p.package_id());
for pkg in to_builds.iter() {
pkg.manifest().print_teapot(ws.config());
}
let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) { let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
(&Some(ref args), _) => (Some(args.clone()), "rustc"), (&Some(ref args), _) => (Some(args.clone()), "rustc"),

View File

@ -31,13 +31,10 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
)?; )?;
let (packages, resolve_with_overrides) = resolve; let (packages, resolve_with_overrides) = resolve;
let pkgs = specs let ids = specs.iter()
.iter() .map(|s| s.query(resolve_with_overrides.iter()))
.map(|p| {
let pkgid = p.query(resolve_with_overrides.iter())?;
packages.get(pkgid)
})
.collect::<CargoResult<Vec<_>>>()?; .collect::<CargoResult<Vec<_>>>()?;
let pkgs = packages.get_many(ids)?;
let mut lib_names = HashMap::new(); let mut lib_names = HashMap::new();
let mut bin_names = HashMap::new(); let mut bin_names = HashMap::new();

View File

@ -27,13 +27,14 @@ pub fn fetch<'a>(
{ {
let mut fetched_packages = HashSet::new(); let mut fetched_packages = HashSet::new();
let mut deps_to_fetch = ws.members().map(|p| p.package_id()).collect::<Vec<_>>(); let mut deps_to_fetch = ws.members().map(|p| p.package_id()).collect::<Vec<_>>();
let mut to_download = Vec::new();
while let Some(id) = deps_to_fetch.pop() { while let Some(id) = deps_to_fetch.pop() {
if !fetched_packages.insert(id) { if !fetched_packages.insert(id) {
continue; continue;
} }
packages.get(id)?; to_download.push(id.clone());
let deps = resolve.deps(id) let deps = resolve.deps(id)
.filter(|&(_id, deps)| { .filter(|&(_id, deps)| {
deps.iter() deps.iter()
@ -57,6 +58,7 @@ pub fn fetch<'a>(
.map(|(id, _deps)| id); .map(|(id, _deps)| id);
deps_to_fetch.extend(deps); deps_to_fetch.extend(deps);
} }
packages.get_many(&to_download)?;
} }
Ok((resolve, packages)) Ok((resolve, packages))

View File

@ -12,6 +12,8 @@ use toml;
use core::{Dependency, Edition, Package, PackageIdSpec, Source, SourceId}; use core::{Dependency, Edition, Package, PackageIdSpec, Source, SourceId};
use core::{PackageId, Workspace}; use core::{PackageId, Workspace};
use core::source::SourceMap;
use core::package::PackageSet;
use core::compiler::{DefaultExecutor, Executor}; use core::compiler::{DefaultExecutor, Executor};
use ops::{self, CompileFilter}; use ops::{self, CompileFilter};
use sources::{GitSource, PathSource, SourceConfigMap}; use sources::{GitSource, PathSource, SourceConfigMap};
@ -499,22 +501,28 @@ where
source.source_id(), source.source_id(),
)?; )?;
let deps = source.query_vec(&dep)?; let deps = source.query_vec(&dep)?;
match deps.iter().map(|p| p.package_id()).max() { let pkgid = match deps.iter().map(|p| p.package_id()).max() {
Some(pkgid) => { Some(pkgid) => pkgid,
let pkg = source.download(pkgid)?;
Ok((pkg, Box::new(source)))
}
None => { None => {
let vers_info = vers.map(|v| format!(" with version `{}`", v)) let vers_info = vers.map(|v| format!(" with version `{}`", v))
.unwrap_or_default(); .unwrap_or_default();
Err(format_err!( bail!(
"could not find `{}` in {}{}", "could not find `{}` in {}{}",
name, name,
source.source_id(), source.source_id(),
vers_info vers_info
)) )
} }
} };
let pkg = {
let mut map = SourceMap::new();
map.insert(Box::new(&mut source));
PackageSet::new(&[pkgid.clone()], map, config)?
.get_one(&pkgid)?
.clone()
};
Ok((pkg, Box::new(source)))
} }
None => { None => {
let candidates = list_all(&mut source)?; let candidates = list_all(&mut source)?;

View File

@ -1,7 +1,9 @@
use std::collections::HashMap;
use serde::ser; use serde::ser;
use core::resolver::Resolve; use core::resolver::Resolve;
use core::{Package, PackageId, Workspace, PackageSet}; use core::{Package, PackageId, Workspace};
use ops::{self, Packages}; use ops::{self, Packages};
use util::CargoResult; use util::CargoResult;
@ -18,7 +20,7 @@ pub struct OutputMetadataOptions {
/// Loads the manifest, resolves the dependencies of the project to the concrete /// Loads the manifest, resolves the dependencies of the project to the concrete
/// used versions - considering overrides - and writes all dependencies in a JSON /// used versions - considering overrides - and writes all dependencies in a JSON
/// format to stdout. /// format to stdout.
pub fn output_metadata<'a>(ws: &'a Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo<'a>> { pub fn output_metadata(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
if opt.version != VERSION { if opt.version != VERSION {
bail!( bail!(
"metadata version {} not supported, only {} is currently supported", "metadata version {} not supported, only {} is currently supported",
@ -33,7 +35,7 @@ pub fn output_metadata<'a>(ws: &'a Workspace, opt: &OutputMetadataOptions) -> Ca
} }
} }
fn metadata_no_deps<'a>(ws: &'a Workspace, _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo<'a>> { fn metadata_no_deps(ws: &Workspace, _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
Ok(ExportInfo { Ok(ExportInfo {
packages: ws.members().cloned().collect(), packages: ws.members().cloned().collect(),
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(), workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
@ -44,9 +46,9 @@ fn metadata_no_deps<'a>(ws: &'a Workspace, _opt: &OutputMetadataOptions) -> Carg
}) })
} }
fn metadata_full<'a>(ws: &'a Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo<'a>> { fn metadata_full(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
let specs = Packages::All.to_package_id_specs(ws)?; let specs = Packages::All.to_package_id_specs(ws)?;
let deps = ops::resolve_ws_precisely( let (package_set, resolve) = ops::resolve_ws_precisely(
ws, ws,
None, None,
&opt.features, &opt.features,
@ -54,18 +56,16 @@ fn metadata_full<'a>(ws: &'a Workspace, opt: &OutputMetadataOptions) -> CargoRes
opt.no_default_features, opt.no_default_features,
&specs, &specs,
)?; )?;
let (package_set, resolve) = deps; let mut packages = HashMap::new();
for pkg in package_set.get_many(package_set.package_ids())? {
let packages = package_set packages.insert(pkg.package_id().clone(), pkg.clone());
.package_ids() }
.map(|i| package_set.get(i).map(|p| p.clone()))
.collect::<CargoResult<Vec<_>>>()?;
Ok(ExportInfo { Ok(ExportInfo {
packages, packages: packages.values().map(|p| (*p).clone()).collect(),
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(), workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
resolve: Some(MetadataResolve { resolve: Some(MetadataResolve {
resolve: (package_set, resolve), resolve: (packages, resolve),
root: ws.current_opt().map(|pkg| pkg.package_id().clone()), root: ws.current_opt().map(|pkg| pkg.package_id().clone()),
}), }),
target_directory: ws.target_dir().display().to_string(), target_directory: ws.target_dir().display().to_string(),
@ -75,10 +75,10 @@ fn metadata_full<'a>(ws: &'a Workspace, opt: &OutputMetadataOptions) -> CargoRes
} }
#[derive(Serialize)] #[derive(Serialize)]
pub struct ExportInfo<'a> { pub struct ExportInfo {
packages: Vec<Package>, packages: Vec<Package>,
workspace_members: Vec<PackageId>, workspace_members: Vec<PackageId>,
resolve: Option<MetadataResolve<'a>>, resolve: Option<MetadataResolve>,
target_directory: String, target_directory: String,
version: u32, version: u32,
workspace_root: String, workspace_root: String,
@ -88,13 +88,13 @@ pub struct ExportInfo<'a> {
/// The one from lockfile does not fit because it uses a non-standard /// The one from lockfile does not fit because it uses a non-standard
/// format for `PackageId`s /// format for `PackageId`s
#[derive(Serialize)] #[derive(Serialize)]
struct MetadataResolve<'a> { struct MetadataResolve {
#[serde(rename = "nodes", serialize_with = "serialize_resolve")] #[serde(rename = "nodes", serialize_with = "serialize_resolve")]
resolve: (PackageSet<'a>, Resolve), resolve: (HashMap<PackageId, Package>, Resolve),
root: Option<PackageId>, root: Option<PackageId>,
} }
fn serialize_resolve<S>((package_set, resolve): &(PackageSet, Resolve), s: S) -> Result<S::Ok, S::Error> fn serialize_resolve<S>((packages, resolve): &(HashMap<PackageId, Package>, Resolve), s: S) -> Result<S::Ok, S::Error>
where where
S: ser::Serializer, S: ser::Serializer,
{ {
@ -119,7 +119,7 @@ where
dependencies: resolve.deps(id).map(|(pkg, _deps)| pkg).collect(), dependencies: resolve.deps(id).map(|(pkg, _deps)| pkg).collect(),
deps: resolve.deps(id) deps: resolve.deps(id)
.map(|(pkg, _deps)| { .map(|(pkg, _deps)| {
let name = package_set.get(pkg).ok() let name = packages.get(pkg)
.and_then(|pkg| pkg.targets().iter().find(|t| t.is_lib())) .and_then(|pkg| pkg.targets().iter().find(|t| t.is_lib()))
.and_then(|lib_target| { .and_then(|lib_target| {
resolve.extern_crate_name(id, pkg, lib_target).ok() resolve.extern_crate_name(id, pkg, lib_target).ok()

View File

@ -16,7 +16,7 @@ use util::profile;
pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> { pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> {
let mut registry = PackageRegistry::new(ws.config())?; let mut registry = PackageRegistry::new(ws.config())?;
let resolve = resolve_with_registry(ws, &mut registry, true)?; let resolve = resolve_with_registry(ws, &mut registry, true)?;
let packages = get_resolved_packages(&resolve, registry); let packages = get_resolved_packages(&resolve, registry)?;
Ok((packages, resolve)) Ok((packages, resolve))
} }
@ -96,7 +96,7 @@ pub fn resolve_ws_with_method<'a>(
true, true,
)?; )?;
let packages = get_resolved_packages(&resolved_with_overrides, registry); let packages = get_resolved_packages(&resolved_with_overrides, registry)?;
Ok((packages, resolved_with_overrides)) Ok((packages, resolved_with_overrides))
} }
@ -374,7 +374,7 @@ pub fn add_overrides<'a>(
pub fn get_resolved_packages<'a>( pub fn get_resolved_packages<'a>(
resolve: &Resolve, resolve: &Resolve,
registry: PackageRegistry<'a>, registry: PackageRegistry<'a>,
) -> PackageSet<'a> { ) -> CargoResult<PackageSet<'a>> {
let ids: Vec<PackageId> = resolve.iter().cloned().collect(); let ids: Vec<PackageId> = resolve.iter().cloned().collect();
registry.get(&ids) registry.get(&ids)
} }

View File

@ -9,6 +9,7 @@ use hex;
use serde_json; use serde_json;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary}; use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use core::source::MaybePackage;
use sources::PathSource; use sources::PathSource;
use util::{Config, Sha256}; use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
@ -150,14 +151,19 @@ impl<'cfg> Source for DirectorySource<'cfg> {
Ok(()) Ok(())
} }
fn download(&mut self, id: &PackageId) -> CargoResult<Package> { fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
self.packages self.packages
.get(id) .get(id)
.map(|p| &p.0) .map(|p| &p.0)
.cloned() .cloned()
.map(MaybePackage::Ready)
.ok_or_else(|| format_err!("failed to find package with id: {}", id)) .ok_or_else(|| format_err!("failed to find package with id: {}", id))
} }
fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
panic!("no downloads to do")
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> { fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
Ok(pkg.package_id().version().to_string()) Ok(pkg.package_id().version().to_string())
} }

View File

@ -2,7 +2,7 @@ use std::fmt::{self, Debug, Formatter};
use url::Url; use url::Url;
use core::source::{Source, SourceId}; use core::source::{Source, SourceId, MaybePackage};
use core::GitReference; use core::GitReference;
use core::{Dependency, Package, PackageId, Summary}; use core::{Dependency, Package, PackageId, Summary};
use util::Config; use util::Config;
@ -210,7 +210,7 @@ impl<'cfg> Source for GitSource<'cfg> {
self.path_source.as_mut().unwrap().update() self.path_source.as_mut().unwrap().update()
} }
fn download(&mut self, id: &PackageId) -> CargoResult<Package> { fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
trace!( trace!(
"getting packages for package id `{}` from `{:?}`", "getting packages for package id `{}` from `{:?}`",
id, id,
@ -222,6 +222,10 @@ impl<'cfg> Source for GitSource<'cfg> {
.download(id) .download(id)
} }
fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
panic!("no download should have started")
}
fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> { fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> {
Ok(self.rev.as_ref().unwrap().to_string()) Ok(self.rev.as_ref().unwrap().to_string())
} }

View File

@ -9,6 +9,7 @@ use ignore::Match;
use ignore::gitignore::GitignoreBuilder; use ignore::gitignore::GitignoreBuilder;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary}; use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use core::source::MaybePackage;
use ops; use ops;
use util::{self, internal, CargoResult}; use util::{self, internal, CargoResult};
use util::paths; use util::paths;
@ -540,14 +541,19 @@ impl<'cfg> Source for PathSource<'cfg> {
Ok(()) Ok(())
} }
fn download(&mut self, id: &PackageId) -> CargoResult<Package> { fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
trace!("getting packages; id={}", id); trace!("getting packages; id={}", id);
let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id); let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id);
pkg.cloned() pkg.cloned()
.map(MaybePackage::Ready)
.ok_or_else(|| internal(format!("failed to find {} in path source", id))) .ok_or_else(|| internal(format!("failed to find {} in path source", id)))
} }
fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
panic!("no download should have started")
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> { fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
let (max, max_path) = self.last_modified_file(pkg)?; let (max, max_path) = self.last_modified_file(pkg)?;
Ok(format!("{} ({})", max, max_path.display())) Ok(format!("{} ({})", max, max_path.display()))

View File

@ -4,10 +4,9 @@ use std::path::Path;
use core::PackageId; use core::PackageId;
use hex; use hex;
use sources::registry::{RegistryConfig, RegistryData}; use sources::registry::{RegistryConfig, RegistryData, MaybeLock};
use util::FileLock;
use util::paths; use util::paths;
use util::{Config, Filesystem, Sha256}; use util::{Config, Filesystem, Sha256, FileLock};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
pub struct LocalRegistry<'cfg> { pub struct LocalRegistry<'cfg> {
@ -70,7 +69,7 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
Ok(()) Ok(())
} }
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock> { fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<MaybeLock> {
let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version()); let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version());
let mut crate_file = self.root.open_ro(&crate_file, self.config, "crate file")?; let mut crate_file = self.root.open_ro(&crate_file, self.config, "crate file")?;
@ -78,7 +77,7 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
// checksum below as it is in theory already verified. // checksum below as it is in theory already verified.
let dst = format!("{}-{}", pkg.name(), pkg.version()); let dst = format!("{}-{}", pkg.name(), pkg.version());
if self.src_path.join(dst).into_path_unlocked().exists() { if self.src_path.join(dst).into_path_unlocked().exists() {
return Ok(crate_file); return Ok(MaybeLock::Ready(crate_file));
} }
self.config.shell().status("Unpacking", pkg)?; self.config.shell().status("Unpacking", pkg)?;
@ -102,6 +101,12 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
crate_file.seek(SeekFrom::Start(0))?; crate_file.seek(SeekFrom::Start(0))?;
Ok(crate_file) Ok(MaybeLock::Ready(crate_file))
}
fn finish_download(&mut self, _pkg: &PackageId, _checksum: &str, _data: &[u8])
-> CargoResult<FileLock>
{
panic!("this source doesn't download")
} }
} }

View File

@ -170,6 +170,7 @@ use serde_json;
use tar::Archive; use tar::Archive;
use core::dependency::{Dependency, Kind}; use core::dependency::{Dependency, Kind};
use core::source::MaybePackage;
use core::{Package, PackageId, Source, SourceId, Summary}; use core::{Package, PackageId, Source, SourceId, Summary};
use sources::PathSource; use sources::PathSource;
use util::errors::CargoResultExt; use util::errors::CargoResultExt;
@ -347,13 +348,20 @@ pub trait RegistryData {
) -> CargoResult<()>; ) -> CargoResult<()>;
fn config(&mut self) -> CargoResult<Option<RegistryConfig>>; fn config(&mut self) -> CargoResult<Option<RegistryConfig>>;
fn update_index(&mut self) -> CargoResult<()>; fn update_index(&mut self) -> CargoResult<()>;
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock>; fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<MaybeLock>;
fn finish_download(&mut self, pkg: &PackageId, checksum: &str, data: &[u8])
-> CargoResult<FileLock>;
fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool { fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool {
true true
} }
} }
pub enum MaybeLock {
Ready(FileLock),
Download { url: String, descriptor: String }
}
mod index; mod index;
mod local; mod local;
mod remote; mod remote;
@ -462,6 +470,34 @@ impl<'cfg> RegistrySource<'cfg> {
index::RegistryIndex::new(&self.source_id, path, self.config, self.index_locked); index::RegistryIndex::new(&self.source_id, path, self.config, self.index_locked);
Ok(()) Ok(())
} }
fn get_pkg(&mut self, package: &PackageId, path: FileLock) -> CargoResult<Package> {
let path = self
.unpack_package(package, &path)
.chain_err(|| internal(format!("failed to unpack package `{}`", package)))?;
let mut src = PathSource::new(&path, &self.source_id, self.config);
src.update()?;
let pkg = match src.download(package)? {
MaybePackage::Ready(pkg) => pkg,
MaybePackage::Download { .. } => unreachable!(),
};
// Unfortunately the index and the actual Cargo.toml in the index can
// differ due to historical Cargo bugs. To paper over these we trash the
// *summary* loaded from the Cargo.toml we just downloaded with the one
// we loaded from the index.
let summaries = self
.index
.summaries(package.name().as_str(), &mut *self.ops)?;
let summary = summaries
.iter()
.map(|s| &s.0)
.find(|s| s.package_id() == package)
.expect("summary not found");
let mut manifest = pkg.manifest().clone();
manifest.set_summary(summary.clone());
Ok(Package::new(manifest, pkg.manifest_path()))
}
} }
impl<'cfg> Source for RegistrySource<'cfg> { impl<'cfg> Source for RegistrySource<'cfg> {
@ -526,31 +562,24 @@ impl<'cfg> Source for RegistrySource<'cfg> {
Ok(()) Ok(())
} }
fn download(&mut self, package: &PackageId) -> CargoResult<Package> { fn download(&mut self, package: &PackageId) -> CargoResult<MaybePackage> {
let hash = self.index.hash(package, &mut *self.ops)?; let hash = self.index.hash(package, &mut *self.ops)?;
let path = self.ops.download(package, &hash)?; match self.ops.download(package, &hash)? {
let path = self MaybeLock::Ready(file) => {
.unpack_package(package, &path) self.get_pkg(package, file).map(MaybePackage::Ready)
.chain_err(|| internal(format!("failed to unpack package `{}`", package)))?; }
let mut src = PathSource::new(&path, &self.source_id, self.config); MaybeLock::Download { url, descriptor } => {
src.update()?; Ok(MaybePackage::Download { url, descriptor })
let pkg = src.download(package)?; }
}
}
// Unfortunately the index and the actual Cargo.toml in the index can fn finish_download(&mut self, package: &PackageId, data: Vec<u8>)
// differ due to historical Cargo bugs. To paper over these we trash the -> CargoResult<Package>
// *summary* loaded from the Cargo.toml we just downloaded with the one {
// we loaded from the index. let hash = self.index.hash(package, &mut *self.ops)?;
let summaries = self let file = self.ops.finish_download(package, &hash, &data)?;
.index self.get_pkg(package, file)
.summaries(package.name().as_str(), &mut *self.ops)?;
let summary = summaries
.iter()
.map(|s| &s.0)
.find(|s| s.package_id() == package)
.expect("summary not found");
let mut manifest = pkg.manifest().clone();
manifest.set_summary(summary.clone());
Ok(Package::new(manifest, pkg.manifest_path()))
} }
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> { fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {

View File

@ -14,10 +14,10 @@ use lazycell::LazyCell;
use core::{PackageId, SourceId}; use core::{PackageId, SourceId};
use sources::git; use sources::git;
use sources::registry::{RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE}; use sources::registry::{RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE};
use util::network; use sources::registry::MaybeLock;
use util::{FileLock, Filesystem}; use util::{FileLock, Filesystem};
use util::{Config, Progress, Sha256, ToUrl}; use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt, HttpNot200}; use util::errors::{CargoResult, CargoResultExt};
pub struct RemoteRegistry<'cfg> { pub struct RemoteRegistry<'cfg> {
index_path: Filesystem, index_path: Filesystem,
@ -122,6 +122,10 @@ impl<'cfg> RemoteRegistry<'cfg> {
*self.tree.borrow_mut() = Some(tree); *self.tree.borrow_mut() = Some(tree);
Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap())) Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap()))
} }
fn filename(&self, pkg: &PackageId) -> String {
format!("{}-{}.crate", pkg.name(), pkg.version())
}
} }
impl<'cfg> RegistryData for RemoteRegistry<'cfg> { impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
@ -206,9 +210,8 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
Ok(()) Ok(())
} }
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock> { fn download(&mut self, pkg: &PackageId, _checksum: &str) -> CargoResult<MaybeLock> {
let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); let filename = self.filename(pkg);
let path = Path::new(&filename);
// Attempt to open an read-only copy first to avoid an exclusive write // Attempt to open an read-only copy first to avoid an exclusive write
// lock and also work with read-only filesystems. Note that we check the // lock and also work with read-only filesystems. Note that we check the
@ -216,18 +219,12 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
// //
// If this fails then we fall through to the exclusive path where we may // If this fails then we fall through to the exclusive path where we may
// have to redownload the file. // have to redownload the file.
if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) { if let Ok(dst) = self.cache_path.open_ro(&filename, self.config, &filename) {
let meta = dst.file().metadata()?; let meta = dst.file().metadata()?;
if meta.len() > 0 { if meta.len() > 0 {
return Ok(dst); return Ok(MaybeLock::Ready(dst));
} }
} }
let mut dst = self.cache_path.open_rw(path, self.config, &filename)?;
let meta = dst.file().metadata()?;
if meta.len() > 0 {
return Ok(dst);
}
self.config.shell().status("Downloading", pkg)?;
let config = self.config()?.unwrap(); let config = self.config()?.unwrap();
let mut url = config.dl.clone(); let mut url = config.dl.clone();
@ -235,56 +232,29 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap(); write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap();
} }
let url = url.replace(CRATE_TEMPLATE, &*pkg.name()) let url = url.replace(CRATE_TEMPLATE, &*pkg.name())
.replace(VERSION_TEMPLATE, &pkg.version().to_string()) .replace(VERSION_TEMPLATE, &pkg.version().to_string());
.to_url()?;
// TODO: don't download into memory, but ensure that if we ctrl-c a Ok(MaybeLock::Download { url, descriptor: pkg.to_string() })
// download we should resume either from the start or the middle }
// on the next time
let url = url.to_string();
let mut handle = self.config.http()?.borrow_mut();
handle.get(true)?;
handle.url(&url)?;
handle.follow_location(true)?;
let mut state = Sha256::new();
let mut body = Vec::new();
network::with_retry(self.config, || {
state = Sha256::new();
body = Vec::new();
let mut pb = Progress::new("Fetch", self.config);
{
handle.progress(true)?;
let mut handle = handle.transfer();
handle.progress_function(|dl_total, dl_cur, _, _| {
pb.tick(dl_cur as usize, dl_total as usize).is_ok()
})?;
handle.write_function(|buf| {
state.update(buf);
body.extend_from_slice(buf);
Ok(buf.len())
})?;
handle.perform().chain_err(|| {
format!("failed to download from `{}`", url)
})?;
}
let code = handle.response_code()?;
if code != 200 && code != 0 {
let url = handle.effective_url()?.unwrap_or(&url);
Err(HttpNot200 {
code,
url: url.to_string(),
}.into())
} else {
Ok(())
}
})?;
fn finish_download(&mut self, pkg: &PackageId, checksum: &str, data: &[u8])
-> CargoResult<FileLock>
{
// Verify what we just downloaded // Verify what we just downloaded
let mut state = Sha256::new();
state.update(data);
if hex::encode(state.finish()) != checksum { if hex::encode(state.finish()) != checksum {
bail!("failed to verify the checksum of `{}`", pkg) bail!("failed to verify the checksum of `{}`", pkg)
} }
dst.write_all(&body)?; let filename = self.filename(pkg);
let mut dst = self.cache_path.open_rw(&filename, self.config, &filename)?;
let meta = dst.file().metadata()?;
if meta.len() > 0 {
return Ok(dst);
}
dst.write_all(data)?;
dst.seek(SeekFrom::Start(0))?; dst.seek(SeekFrom::Start(0))?;
Ok(dst) Ok(dst)
} }

View File

@ -1,4 +1,5 @@
use core::{Dependency, Package, PackageId, Source, SourceId, Summary}; use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use core::source::MaybePackage;
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
pub struct ReplacedSource<'cfg> { pub struct ReplacedSource<'cfg> {
@ -71,11 +72,26 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
Ok(()) Ok(())
} }
fn download(&mut self, id: &PackageId) -> CargoResult<Package> { fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
let id = id.with_source_id(&self.replace_with); let id = id.with_source_id(&self.replace_with);
let pkg = self.inner let pkg = self.inner
.download(&id) .download(&id)
.chain_err(|| format!("failed to download replaced source {}", self.to_replace))?; .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
Ok(match pkg {
MaybePackage::Ready(pkg) => {
MaybePackage::Ready(pkg.map_source(&self.replace_with, &self.to_replace))
}
other @ MaybePackage::Download { .. } => other,
})
}
fn finish_download(&mut self, id: &PackageId, data: Vec<u8>)
-> CargoResult<Package>
{
let id = id.with_source_id(&self.replace_with);
let pkg = self.inner
.finish_download(&id, data)
.chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
Ok(pkg.map_source(&self.replace_with, &self.to_replace)) Ok(pkg.map_source(&self.replace_with, &self.to_replace))
} }

View File

@ -1,3 +1,5 @@
use std::time::Duration;
pub use self::cfg::{Cfg, CfgExpr}; pub use self::cfg::{Cfg, CfgExpr};
pub use self::config::{homedir, Config, ConfigValue}; pub use self::config::{homedir, Config, ConfigValue};
pub use self::dependency_queue::{DependencyQueue, Dirty, Fresh, Freshness}; pub use self::dependency_queue::{DependencyQueue, Dirty, Fresh, Freshness};
@ -46,3 +48,13 @@ mod read2;
mod progress; mod progress;
mod lockserver; mod lockserver;
pub mod diagnostic_server; pub mod diagnostic_server;
pub fn elapsed(duration: Duration) -> String {
let secs = duration.as_secs();
if secs >= 60 {
format!("{}m {:02}s", secs / 60, secs % 60)
} else {
format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000)
}
}

View File

@ -6,6 +6,38 @@ use failure::Error;
use util::Config; use util::Config;
use util::errors::{CargoResult, HttpNot200}; use util::errors::{CargoResult, HttpNot200};
pub struct Retry<'a> {
config: &'a Config,
remaining: u32,
}
impl<'a> Retry<'a> {
pub fn new(config: &'a Config) -> CargoResult<Retry<'a>> {
Ok(Retry {
config,
remaining: config.get::<Option<u32>>("net.retry")?.unwrap_or(2),
})
}
pub fn try<T>(&mut self, f: impl FnOnce() -> CargoResult<T>)
-> CargoResult<Option<T>>
{
match f() {
Err(ref e) if maybe_spurious(e) && self.remaining > 0 => {
let msg = format!(
"spurious network error ({} tries \
remaining): {}",
self.remaining, e
);
self.config.shell().warn(msg)?;
self.remaining -= 1;
Ok(None)
}
other => other.map(Some),
}
}
}
fn maybe_spurious(err: &Error) -> bool { fn maybe_spurious(err: &Error) -> bool {
for e in err.iter_chain() { for e in err.iter_chain() {
if let Some(git_err) = e.downcast_ref::<git2::Error>() { if let Some(git_err) = e.downcast_ref::<git2::Error>() {
@ -48,21 +80,10 @@ pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>
where where
F: FnMut() -> CargoResult<T>, F: FnMut() -> CargoResult<T>,
{ {
let mut remaining = config.get::<Option<u32>>("net.retry")?.unwrap_or(2); let mut retry = Retry::new(config)?;
loop { loop {
match callback() { if let Some(ret) = retry.try(&mut callback)? {
Ok(ret) => return Ok(ret), return Ok(ret)
Err(ref e) if maybe_spurious(e) && remaining > 0 => {
let msg = format!(
"spurious network error ({} tries \
remaining): {}",
remaining, e
);
config.shell().warn(msg)?;
remaining -= 1;
}
//todo impl from
Err(e) => return Err(e),
} }
} }
} }

View File

@ -16,13 +16,17 @@ pub enum ProgressStyle {
Ratio, Ratio,
} }
struct Throttle {
first: bool,
last_update: Instant,
}
struct State<'cfg> { struct State<'cfg> {
config: &'cfg Config, config: &'cfg Config,
format: Format, format: Format,
first: bool,
last_update: Instant,
name: String, name: String,
done: bool, done: bool,
throttle: Throttle,
} }
struct Format { struct Format {
@ -50,10 +54,9 @@ impl<'cfg> Progress<'cfg> {
max_width: n, max_width: n,
max_print: 80, max_print: 80,
}, },
first: true,
last_update: Instant::now(),
name: name.to_string(), name: name.to_string(),
done: false, done: false,
throttle: Throttle::new(),
}), }),
} }
} }
@ -62,36 +65,19 @@ impl<'cfg> Progress<'cfg> {
self.state = None; self.state = None;
} }
pub fn is_enabled(&self) -> bool {
self.state.is_some()
}
pub fn new(name: &str, cfg: &'cfg Config) -> Progress<'cfg> { pub fn new(name: &str, cfg: &'cfg Config) -> Progress<'cfg> {
Self::with_style(name, ProgressStyle::Percentage, cfg) Self::with_style(name, ProgressStyle::Percentage, cfg)
} }
pub fn tick(&mut self, cur: usize, max: usize) -> CargoResult<()> { pub fn tick(&mut self, cur: usize, max: usize) -> CargoResult<()> {
match self.state { let s = match &mut self.state {
Some(ref mut s) => s.tick(cur, max, "", true), Some(s) => s,
None => Ok(()), None => return Ok(()),
} };
}
pub fn clear(&mut self) {
if let Some(ref mut s) = self.state {
s.clear();
}
}
pub fn tick_now(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
match self.state {
Some(ref mut s) => s.tick(cur, max, msg, false),
None => Ok(()),
}
}
}
impl<'cfg> State<'cfg> {
fn tick(&mut self, cur: usize, max: usize, msg: &str, throttle: bool) -> CargoResult<()> {
if self.done {
return Ok(());
}
// Don't update too often as it can cause excessive performance loss // Don't update too often as it can cause excessive performance loss
// just putting stuff onto the terminal. We also want to avoid // just putting stuff onto the terminal. We also want to avoid
@ -105,36 +91,110 @@ impl<'cfg> State<'cfg> {
// 2. If we've drawn something, then we rate limit ourselves to only // 2. If we've drawn something, then we rate limit ourselves to only
// draw to the console every so often. Currently there's a 100ms // draw to the console every so often. Currently there's a 100ms
// delay between updates. // delay between updates.
if throttle { if !s.throttle.allowed() {
if self.first { return Ok(())
let delay = Duration::from_millis(500);
if self.last_update.elapsed() < delay {
return Ok(());
}
self.first = false;
} else {
let interval = Duration::from_millis(100);
if self.last_update.elapsed() < interval {
return Ok(());
}
}
self.last_update = Instant::now();
} }
if cur == max { s.tick(cur, max, "")
}
pub fn tick_now(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
match self.state {
Some(ref mut s) => s.tick(cur, max, msg),
None => Ok(()),
}
}
pub fn update_allowed(&mut self) -> bool {
match &mut self.state {
Some(s) => s.throttle.allowed(),
None => false,
}
}
pub fn print_now(&mut self, msg: &str) -> CargoResult<()> {
match &mut self.state {
Some(s) => s.print("", msg),
None => Ok(()),
}
}
pub fn clear(&mut self) {
if let Some(ref mut s) = self.state {
s.clear();
}
}
}
impl Throttle {
fn new() -> Throttle {
Throttle {
first: true,
last_update: Instant::now(),
}
}
fn allowed(&mut self) -> bool {
if self.first {
let delay = Duration::from_millis(500);
if self.last_update.elapsed() < delay {
return false
}
} else {
let interval = Duration::from_millis(100);
if self.last_update.elapsed() < interval {
return false
}
}
self.update();
true
}
fn update(&mut self) {
self.first = false;
self.last_update = Instant::now();
}
}
impl<'cfg> State<'cfg> {
fn tick(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
if self.done {
return Ok(());
}
if max > 0 && cur == max {
self.done = true; self.done = true;
} }
// Write out a pretty header, then the progress bar itself, and then // Write out a pretty header, then the progress bar itself, and then
// return back to the beginning of the line for the next print. // return back to the beginning of the line for the next print.
self.try_update_max_width(); self.try_update_max_width();
if let Some(string) = self.format.progress_status(cur, max, msg) { if let Some(pbar) = self.format.progress(cur, max) {
self.config.shell().status_header(&self.name)?; self.print(&pbar, msg)?;
write!(self.config.shell().err(), "{}\r", string)?;
} }
Ok(()) Ok(())
} }
fn print(&mut self, prefix: &str, msg: &str) -> CargoResult<()> {
self.throttle.update();
self.try_update_max_width();
// make sure we have enough room for the header
if self.format.max_width < 15 {
return Ok(())
}
self.config.shell().status_header(&self.name)?;
let mut line = prefix.to_string();
self.format.render(&mut line, msg);
while line.len() < self.format.max_width - 15 {
line.push(' ');
}
write!(self.config.shell().err(), "{}\r", line)?;
Ok(())
}
fn clear(&mut self) { fn clear(&mut self) {
self.try_update_max_width(); self.try_update_max_width();
let blank = " ".repeat(self.format.max_width); let blank = " ".repeat(self.format.max_width);
@ -149,7 +209,7 @@ impl<'cfg> State<'cfg> {
} }
impl Format { impl Format {
fn progress_status(&self, cur: usize, max: usize, msg: &str) -> Option<String> { fn progress(&self, cur: usize, max: usize) -> Option<String> {
// Render the percentage at the far right and then figure how long the // Render the percentage at the far right and then figure how long the
// progress bar is // progress bar is
let pct = (cur as f64) / (max as f64); let pct = (cur as f64) / (max as f64);
@ -188,26 +248,36 @@ impl Format {
string.push_str("]"); string.push_str("]");
string.push_str(&stats); string.push_str(&stats);
let mut avail_msg_len = self.max_width - self.width(); Some(string)
}
fn render(&self, string: &mut String, msg: &str) {
let mut avail_msg_len = self.max_width - string.len() - 15;
let mut ellipsis_pos = 0; let mut ellipsis_pos = 0;
if avail_msg_len > 3 { if avail_msg_len <= 3 {
for c in msg.chars() { return
let display_width = c.width().unwrap_or(0); }
if avail_msg_len >= display_width { for c in msg.chars() {
avail_msg_len -= display_width; let display_width = c.width().unwrap_or(0);
string.push(c); if avail_msg_len >= display_width {
if avail_msg_len >= 3 { avail_msg_len -= display_width;
ellipsis_pos = string.len(); string.push(c);
} if avail_msg_len >= 3 {
} else { ellipsis_pos = string.len();
string.truncate(ellipsis_pos);
string.push_str("...");
break;
} }
} else {
string.truncate(ellipsis_pos);
string.push_str("...");
break;
} }
} }
}
Some(string) #[cfg(test)]
fn progress_status(&self, cur: usize, max: usize, msg: &str) -> Option<String> {
let mut ret = self.progress(cur, max)?;
self.render(&mut ret, msg);
Some(ret)
} }
fn width(&self) -> usize { fn width(&self) -> usize {

View File

@ -101,6 +101,7 @@ timeout = 30 # Timeout for each HTTP request, in seconds
cainfo = "cert.pem" # Path to Certificate Authority (CA) bundle (optional) cainfo = "cert.pem" # Path to Certificate Authority (CA) bundle (optional)
check-revoke = true # Indicates whether SSL certs are checked for revocation check-revoke = true # Indicates whether SSL certs are checked for revocation
low-speed-limit = 5 # Lower threshold for bytes/sec (10 = default, 0 = disabled) low-speed-limit = 5 # Lower threshold for bytes/sec (10 = default, 0 = disabled)
multiplexing = false # whether or not to use HTTP/2 multiplexing where possible
[build] [build]
jobs = 1 # number of parallel jobs, defaults to # of CPUs jobs = 1 # number of parallel jobs, defaults to # of CPUs

View File

@ -57,7 +57,8 @@ fn depend_on_alt_registry() {
.with_stderr(&format!( .with_stderr(&format!(
"\ "\
[UPDATING] `{reg}` index [UPDATING] `{reg}` index
[DOWNLOADING] bar v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) [COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
@ -110,8 +111,9 @@ fn depend_on_alt_registry_depends_on_same_registry_no_index() {
.with_stderr(&format!( .with_stderr(&format!(
"\ "\
[UPDATING] `{reg}` index [UPDATING] `{reg}` index
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[COMPILING] baz v0.0.1 (registry `[ROOT][..]`) [COMPILING] baz v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) [COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -152,8 +154,9 @@ fn depend_on_alt_registry_depends_on_same_registry() {
.with_stderr(&format!( .with_stderr(&format!(
"\ "\
[UPDATING] `{reg}` index [UPDATING] `{reg}` index
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[COMPILING] baz v0.0.1 (registry `[ROOT][..]`) [COMPILING] baz v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) [COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -195,8 +198,9 @@ fn depend_on_alt_registry_depends_on_crates_io() {
"\ "\
[UPDATING] `{alt_reg}` index [UPDATING] `{alt_reg}` index
[UPDATING] `{reg}` index [UPDATING] `{reg}` index
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[COMPILING] baz v0.0.1 (registry `[ROOT][..]`) [COMPILING] baz v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) [COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -363,8 +367,8 @@ fn alt_registry_and_crates_io_deps() {
)).with_stderr_contains(&format!( )).with_stderr_contains(&format!(
"[UPDATING] `{}` index", "[UPDATING] `{}` index",
registry::registry_path().to_str().unwrap())) registry::registry_path().to_str().unwrap()))
.with_stderr_contains("[DOWNLOADING] crates_io_dep v0.0.1 (registry `[ROOT][..]`)") .with_stderr_contains("[DOWNLOADED] crates_io_dep v0.0.1 (registry `[ROOT][..]`)")
.with_stderr_contains("[DOWNLOADING] alt_reg_dep v0.1.0 (registry `[ROOT][..]`)") .with_stderr_contains("[DOWNLOADED] alt_reg_dep v0.1.0 (registry `[ROOT][..]`)")
.with_stderr_contains("[COMPILING] alt_reg_dep v0.1.0 (registry `[ROOT][..]`)") .with_stderr_contains("[COMPILING] alt_reg_dep v0.1.0 (registry `[ROOT][..]`)")
.with_stderr_contains("[COMPILING] crates_io_dep v0.0.1") .with_stderr_contains("[COMPILING] crates_io_dep v0.0.1")
.with_stderr_contains("[COMPILING] foo v0.0.1 ([CWD])") .with_stderr_contains("[COMPILING] foo v0.0.1 ([CWD])")

View File

@ -3569,11 +3569,12 @@ fn build_all_member_dependency_same_name() {
p.cargo("build --all") p.cargo("build --all")
.with_stderr( .with_stderr(
"[..] Updating `[..]` index\n\ "[UPDATING] `[..]` index\n\
[..] Downloading a v0.1.0 ([..])\n\ [DOWNLOADING] crates ...\n\
[..] Compiling a v0.1.0\n\ [DOWNLOADED] a v0.1.0 ([..])\n\
[..] Compiling a v0.1.0 ([..])\n\ [COMPILING] a v0.1.0\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", [COMPILING] a v0.1.0 ([..])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
).run(); ).run();
} }

View File

@ -2707,7 +2707,8 @@ fn warnings_hidden_for_upstream() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] bar v0.1.0 ([..]) [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 ([..])
[COMPILING] bar v0.1.0 [COMPILING] bar v0.1.0
[RUNNING] `rustc [..]` [RUNNING] `rustc [..]`
[RUNNING] `[..]` [RUNNING] `[..]`
@ -2761,7 +2762,8 @@ fn warnings_printed_on_vv() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] bar v0.1.0 ([..]) [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 ([..])
[COMPILING] bar v0.1.0 [COMPILING] bar v0.1.0
[RUNNING] `rustc [..]` [RUNNING] `rustc [..]`
[RUNNING] `[..]` [RUNNING] `[..]`

View File

@ -223,8 +223,9 @@ fn works_through_the_registry() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] [..] index [UPDATING] [..] index
[DOWNLOADING] [..] [DOWNLOADING] crates ...
[DOWNLOADING] [..] [DOWNLOADED] [..]
[DOWNLOADED] [..]
[COMPILING] baz v0.1.0 [COMPILING] baz v0.1.0
[COMPILING] bar v0.1.0 [COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 ([..]) [COMPILING] foo v0.0.1 ([..])
@ -267,7 +268,8 @@ fn ignore_version_from_other_platform() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] [..] index [UPDATING] [..] index
[DOWNLOADING] [..] [DOWNLOADING] crates ...
[DOWNLOADED] [..]
[COMPILING] bar v0.1.0 [COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 ([..]) [COMPILING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]

View File

@ -331,7 +331,8 @@ fn crates_io_then_directory() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] bar v0.1.0 ([..]) [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 ([..])
[COMPILING] bar v0.1.0 [COMPILING] bar v0.1.0
[COMPILING] foo v0.1.0 ([CWD]) [COMPILING] foo v0.1.0 ([CWD])
[FINISHED] [..] [FINISHED] [..]

View File

@ -53,8 +53,8 @@ fn fetch_all_platform_dependencies_when_no_target_is_given() {
.build(); .build();
p.cargo("fetch") p.cargo("fetch")
.with_stderr_contains("[..] Downloading d1 v1.2.3 [..]") .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]")
.with_stderr_contains("[..] Downloading d2 v0.1.2 [..]") .with_stderr_contains("[DOWNLOADED] d2 v0.1.2 [..]")
.run(); .run();
} }
@ -100,13 +100,13 @@ fn fetch_platform_specific_dependencies() {
p.cargo("fetch --target") p.cargo("fetch --target")
.arg(&host) .arg(&host)
.with_stderr_contains("[..] Downloading d1 v1.2.3 [..]") .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]")
.with_stderr_does_not_contain("[..] Downloading d2 v0.1.2 [..]") .with_stderr_does_not_contain("[DOWNLOADED] d2 v0.1.2 [..]")
.run(); .run();
p.cargo("fetch --target") p.cargo("fetch --target")
.arg(&target) .arg(&target)
.with_stderr_contains("[..] Downloading d2 v0.1.2[..]") .with_stderr_contains("[DOWNLOADED] d2 v0.1.2[..]")
.with_stderr_does_not_contain("[..] Downloading d1 v1.2.3 [..]") .with_stderr_does_not_contain("[DOWNLOADED] d1 v1.2.3 [..]")
.run(); .run();
} }

View File

@ -2359,8 +2359,8 @@ fn include_overrides_gitignore() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] filetime [..] [DOWNLOADED] filetime [..]
[DOWNLOADING] libc [..] [DOWNLOADED] libc [..]
[COMPILING] libc [..] [COMPILING] libc [..]
[RUNNING] `rustc --crate-name libc [..]` [RUNNING] `rustc --crate-name libc [..]`
[COMPILING] filetime [..] [COMPILING] filetime [..]

View File

@ -27,7 +27,8 @@ fn simple() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] foo v0.0.1 (registry [..]) [DOWNLOADING] crates ...
[DOWNLOADED] foo v0.0.1 (registry [..])
[INSTALLING] foo v0.0.1 [INSTALLING] foo v0.0.1
[COMPILING] foo v0.0.1 [COMPILING] foo v0.0.1
[FINISHED] release [optimized] target(s) in [..] [FINISHED] release [optimized] target(s) in [..]
@ -53,12 +54,14 @@ fn multiple_pkgs() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] foo v0.0.1 (registry `[CWD]/registry`) [DOWNLOADING] crates ...
[DOWNLOADED] foo v0.0.1 (registry `[CWD]/registry`)
[INSTALLING] foo v0.0.1 [INSTALLING] foo v0.0.1
[COMPILING] foo v0.0.1 [COMPILING] foo v0.0.1
[FINISHED] release [optimized] target(s) in [..] [FINISHED] release [optimized] target(s) in [..]
[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] [INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
[DOWNLOADING] bar v0.0.2 (registry `[CWD]/registry`) [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.0.2 (registry `[CWD]/registry`)
[INSTALLING] bar v0.0.2 [INSTALLING] bar v0.0.2
[COMPILING] bar v0.0.2 [COMPILING] bar v0.0.2
[FINISHED] release [optimized] target(s) in [..] [FINISHED] release [optimized] target(s) in [..]
@ -97,7 +100,8 @@ fn pick_max_version() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] foo v0.2.1 (registry [..]) [DOWNLOADING] crates ...
[DOWNLOADED] foo v0.2.1 (registry [..])
[INSTALLING] foo v0.2.1 [INSTALLING] foo v0.2.1
[COMPILING] foo v0.2.1 [COMPILING] foo v0.2.1
[FINISHED] release [optimized] target(s) in [..] [FINISHED] release [optimized] target(s) in [..]
@ -1004,7 +1008,7 @@ fn vers_precise() {
pkg("foo", "0.1.2"); pkg("foo", "0.1.2");
cargo_process("install foo --vers 0.1.1") cargo_process("install foo --vers 0.1.1")
.with_stderr_contains("[DOWNLOADING] foo v0.1.1 (registry [..])") .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])")
.run(); .run();
} }
@ -1014,7 +1018,7 @@ fn version_too() {
pkg("foo", "0.1.2"); pkg("foo", "0.1.2");
cargo_process("install foo --version 0.1.1") cargo_process("install foo --version 0.1.1")
.with_stderr_contains("[DOWNLOADING] foo v0.1.1 (registry [..])") .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])")
.run(); .run();
} }

View File

@ -185,7 +185,8 @@ fn transitive() {
"\ "\
[UPDATING] `[ROOT][..]` index [UPDATING] `[ROOT][..]` index
[UPDATING] git repository `[..]` [UPDATING] git repository `[..]`
[DOWNLOADING] baz v0.2.0 (registry [..]) [DOWNLOADING] crates ...
[DOWNLOADED] baz v0.2.0 (registry [..])
[COMPILING] bar v0.1.0 (file://[..]) [COMPILING] bar v0.1.0 (file://[..])
[COMPILING] baz v0.2.0 [COMPILING] baz v0.2.0
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -338,8 +339,9 @@ fn use_a_spec_to_select() {
"\ "\
[UPDATING] `[ROOT][..]` index [UPDATING] `[ROOT][..]` index
[UPDATING] git repository `[..]` [UPDATING] git repository `[..]`
[DOWNLOADING] [..] [DOWNLOADING] crates ...
[DOWNLOADING] [..] [DOWNLOADED] [..]
[DOWNLOADED] [..]
[COMPILING] [..] [COMPILING] [..]
[COMPILING] [..] [COMPILING] [..]
[COMPILING] [..] [COMPILING] [..]
@ -395,7 +397,8 @@ fn override_adds_some_deps() {
"\ "\
[UPDATING] `[ROOT][..]` index [UPDATING] `[ROOT][..]` index
[UPDATING] git repository `[..]` [UPDATING] git repository `[..]`
[DOWNLOADING] baz v0.1.1 (registry [..]) [DOWNLOADING] crates ...
[DOWNLOADED] baz v0.1.1 (registry [..])
[COMPILING] baz v0.1.1 [COMPILING] baz v0.1.1
[COMPILING] bar v0.1.0 ([..]) [COMPILING] bar v0.1.0 ([..])
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -832,7 +835,8 @@ documented online at the url below for more information.
https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#overriding-dependencies https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#overriding-dependencies
[DOWNLOADING] [..] [DOWNLOADING] crates ...
[DOWNLOADED] [..]
[COMPILING] [..] [COMPILING] [..]
[COMPILING] [..] [COMPILING] [..]
[COMPILING] [..] [COMPILING] [..]

View File

@ -51,7 +51,8 @@ fn replace() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[ROOT][..]` index [UPDATING] `[ROOT][..]` index
[DOWNLOADING] baz v0.1.0 ([..]) [DOWNLOADING] crates ...
[DOWNLOADED] baz v0.1.0 ([..])
[COMPILING] bar v0.1.0 ([CWD]/bar) [COMPILING] bar v0.1.0 ([CWD]/bar)
[COMPILING] baz v0.1.0 [COMPILING] baz v0.1.0
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -217,7 +218,8 @@ fn unused() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[ROOT][..]` index [UPDATING] `[ROOT][..]` index
[DOWNLOADING] bar v0.1.0 [..] [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 [..]
[COMPILING] bar v0.1.0 [COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
@ -275,7 +277,8 @@ fn unused_git() {
"\ "\
[UPDATING] git repository `file://[..]` [UPDATING] git repository `file://[..]`
[UPDATING] `[ROOT][..]` index [UPDATING] `[ROOT][..]` index
[DOWNLOADING] bar v0.1.0 [..] [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 [..]
[COMPILING] bar v0.1.0 [COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
@ -309,7 +312,8 @@ fn add_patch() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[ROOT][..]` index [UPDATING] `[ROOT][..]` index
[DOWNLOADING] bar v0.1.0 [..] [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 [..]
[COMPILING] bar v0.1.0 [COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
@ -368,7 +372,8 @@ fn add_ignored_patch() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[ROOT][..]` index [UPDATING] `[ROOT][..]` index
[DOWNLOADING] bar v0.1.0 [..] [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 [..]
[COMPILING] bar v0.1.0 [COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
@ -535,7 +540,8 @@ fn new_major() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[ROOT][..]` index [UPDATING] `[ROOT][..]` index
[DOWNLOADING] bar v0.2.0 [..] [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.2.0 [..]
[COMPILING] bar v0.2.0 [COMPILING] bar v0.2.0
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]

View File

@ -40,7 +40,8 @@ fn simple() {
.with_stderr(&format!( .with_stderr(&format!(
"\ "\
[UPDATING] `{reg}` index [UPDATING] `{reg}` index
[DOWNLOADING] bar v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 [COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
@ -85,8 +86,9 @@ fn deps() {
.with_stderr(&format!( .with_stderr(&format!(
"\ "\
[UPDATING] `{reg}` index [UPDATING] `{reg}` index
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[COMPILING] baz v0.0.1 [COMPILING] baz v0.0.1
[COMPILING] bar v0.0.1 [COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -265,11 +267,9 @@ fn bad_cksum() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] [..] index [UPDATING] [..] index
[DOWNLOADING] bad-cksum [..] [DOWNLOADING] crates ...
[ERROR] unable to get packages from source [DOWNLOADED] bad-cksum [..]
[ERROR] failed to download replaced source registry `https://[..]`
Caused by:
failed to download replaced source registry `https://[..]`
Caused by: Caused by:
failed to verify the checksum of `bad-cksum v0.0.1 (registry `[ROOT][..]`)` failed to verify the checksum of `bad-cksum v0.0.1 (registry `[ROOT][..]`)`
@ -312,7 +312,8 @@ required by package `foo v0.0.1 ([..])`
.with_stderr(format!( .with_stderr(format!(
"\ "\
[UPDATING] `{reg}` index [UPDATING] `{reg}` index
[DOWNLOADING] notyet v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] notyet v0.0.1 (registry `[ROOT][..]`)
[COMPILING] notyet v0.0.1 [COMPILING] notyet v0.0.1
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
@ -367,7 +368,8 @@ required by package `foo v0.0.1 ([..])`
[PACKAGING] foo v0.0.1 ([CWD]) [PACKAGING] foo v0.0.1 ([CWD])
[VERIFYING] foo v0.0.1 ([CWD]) [VERIFYING] foo v0.0.1 ([CWD])
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] notyet v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] notyet v0.0.1 (registry `[ROOT][..]`)
[COMPILING] notyet v0.0.1 [COMPILING] notyet v0.0.1
[COMPILING] foo v0.0.1 ([CWD][..]) [COMPILING] foo v0.0.1 ([CWD][..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
@ -398,7 +400,8 @@ fn lockfile_locks() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] bar v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 [COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
@ -435,8 +438,9 @@ fn lockfile_locks_transitively() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[COMPILING] baz v0.0.1 [COMPILING] baz v0.0.1
[COMPILING] bar v0.0.1 [COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -480,8 +484,9 @@ fn yanks_are_not_used() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[COMPILING] baz v0.0.1 [COMPILING] baz v0.0.1
[COMPILING] bar v0.0.1 [COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -587,7 +592,8 @@ fn update_with_lockfile_if_packages_missing() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] bar v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
", ",
).run(); ).run();
@ -630,7 +636,8 @@ fn update_lockfile() {
p.cargo("build") p.cargo("build")
.with_stderr( .with_stderr(
"\ "\
[DOWNLOADING] [..] v0.0.2 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] [..] v0.0.2 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.2 [COMPILING] bar v0.0.2
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
@ -650,7 +657,8 @@ fn update_lockfile() {
p.cargo("build") p.cargo("build")
.with_stderr( .with_stderr(
"\ "\
[DOWNLOADING] [..] v0.0.3 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] [..] v0.0.3 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.3 [COMPILING] bar v0.0.3
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
@ -728,7 +736,8 @@ fn dev_dependency_not_used() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] [..] v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 [COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
@ -812,7 +821,8 @@ fn updating_a_dep() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] bar v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 [COMPILING] bar v0.0.1
[COMPILING] a v0.0.1 ([CWD]/a) [COMPILING] a v0.0.1 ([CWD]/a)
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -838,7 +848,8 @@ fn updating_a_dep() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] bar v0.1.0 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 (registry `[ROOT][..]`)
[COMPILING] bar v0.1.0 [COMPILING] bar v0.1.0
[COMPILING] a v0.0.1 ([CWD]/a) [COMPILING] a v0.0.1 ([CWD]/a)
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -892,7 +903,8 @@ fn git_and_registry_dep() {
"\ "\
[UPDATING] [..] [UPDATING] [..]
[UPDATING] [..] [UPDATING] [..]
[DOWNLOADING] a v0.0.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] a v0.0.1 (registry `[ROOT][..]`)
[COMPILING] a v0.0.1 [COMPILING] a v0.0.1
[COMPILING] b v0.0.1 ([..]) [COMPILING] b v0.0.1 ([..])
[COMPILING] foo v0.0.1 ([CWD]) [COMPILING] foo v0.0.1 ([CWD])
@ -965,7 +977,8 @@ fn update_publish_then_update() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] [..] [UPDATING] [..]
[DOWNLOADING] a v0.1.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] a v0.1.1 (registry `[ROOT][..]`)
[COMPILING] a v0.1.1 [COMPILING] a v0.1.1
[COMPILING] foo v0.5.0 ([CWD]) [COMPILING] foo v0.5.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
@ -996,7 +1009,8 @@ fn fetch_downloads() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] a v0.1.0 (registry [..]) [DOWNLOADING] crates ...
[DOWNLOADED] a v0.1.0 (registry [..])
", ",
).run(); ).run();
} }
@ -1036,7 +1050,8 @@ fn update_transitive_dependency() {
p.cargo("build") p.cargo("build")
.with_stderr( .with_stderr(
"\ "\
[DOWNLOADING] b v0.1.1 (registry `[ROOT][..]`) [DOWNLOADING] crates ...
[DOWNLOADED] b v0.1.1 (registry `[ROOT][..]`)
[COMPILING] b v0.1.1 [COMPILING] b v0.1.1
[COMPILING] a v0.1.0 [COMPILING] a v0.1.0
[COMPILING] foo v0.5.0 ([..]) [COMPILING] foo v0.5.0 ([..])
@ -1139,9 +1154,9 @@ fn update_multiple_packages() {
).run(); ).run();
p.cargo("build") p.cargo("build")
.with_stderr_contains("[DOWNLOADING] a v0.1.1 (registry `[ROOT][..]`)") .with_stderr_contains("[DOWNLOADED] a v0.1.1 (registry `[ROOT][..]`)")
.with_stderr_contains("[DOWNLOADING] b v0.1.1 (registry `[ROOT][..]`)") .with_stderr_contains("[DOWNLOADED] b v0.1.1 (registry `[ROOT][..]`)")
.with_stderr_contains("[DOWNLOADING] c v0.1.1 (registry `[ROOT][..]`)") .with_stderr_contains("[DOWNLOADED] c v0.1.1 (registry `[ROOT][..]`)")
.with_stderr_contains("[COMPILING] a v0.1.1") .with_stderr_contains("[COMPILING] a v0.1.1")
.with_stderr_contains("[COMPILING] b v0.1.1") .with_stderr_contains("[COMPILING] b v0.1.1")
.with_stderr_contains("[COMPILING] c v0.1.1") .with_stderr_contains("[COMPILING] c v0.1.1")
@ -1266,7 +1281,8 @@ fn only_download_relevant() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] baz v0.1.0 ([..]) [DOWNLOADING] crates ...
[DOWNLOADED] baz v0.1.0 ([..])
[COMPILING] baz v0.1.0 [COMPILING] baz v0.1.0
[COMPILING] bar v0.5.0 ([..]) [COMPILING] bar v0.5.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
@ -1509,7 +1525,8 @@ update to a fixed version or contact the upstream maintainer about
this warning. this warning.
[UPDATING] [..] [UPDATING] [..]
[DOWNLOADING] [..] [DOWNLOADING] crates ...
[DOWNLOADED] [..]
[COMPILING] [..] [COMPILING] [..]
[COMPILING] [..] [COMPILING] [..]
[FINISHED] [..] [FINISHED] [..]
@ -1554,7 +1571,8 @@ fn old_version_req_upstream() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] [..] [UPDATING] [..]
[DOWNLOADING] [..] [DOWNLOADING] crates ...
[DOWNLOADED] [..]
warning: parsed version requirement `0.2*` is no longer valid warning: parsed version requirement `0.2*` is no longer valid
Previous versions of Cargo accepted this malformed requirement, Previous versions of Cargo accepted this malformed requirement,
@ -1661,11 +1679,9 @@ fn bad_and_or_malicious_packages_rejected() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] [..] [UPDATING] [..]
[DOWNLOADING] [..] [DOWNLOADING] crates ...
error: unable to get packages from source [DOWNLOADED] [..]
error: failed to download [..]
Caused by:
failed to download [..]
Caused by: Caused by:
failed to unpack [..] failed to unpack [..]

View File

@ -248,7 +248,8 @@ fn rename_twice() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] foo v0.1.0 (registry [..]) [DOWNLOADING] crates ...
[DOWNLOADED] foo v0.1.0 (registry [..])
error: multiple dependencies listed for the same crate must all have the same \ error: multiple dependencies listed for the same crate must all have the same \
name, but the dependency on `foo v0.1.0` is listed as having different names name, but the dependency on `foo v0.1.0` is listed as having different names
", ",

View File

@ -1389,6 +1389,7 @@ fn substitute_macros(input: &str) -> String {
("[DOCTEST]", " Doc-tests"), ("[DOCTEST]", " Doc-tests"),
("[PACKAGING]", " Packaging"), ("[PACKAGING]", " Packaging"),
("[DOWNLOADING]", " Downloading"), ("[DOWNLOADING]", " Downloading"),
("[DOWNLOADED]", " Downloaded"),
("[UPLOADING]", " Uploading"), ("[UPLOADING]", " Uploading"),
("[VERIFYING]", " Verifying"), ("[VERIFYING]", " Verifying"),
("[ARCHIVING]", " Archiving"), ("[ARCHIVING]", " Archiving"),

View File

@ -59,7 +59,8 @@ fn no_warning_on_success() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] bar v0.0.1 ([..]) [DOWNLOADING] crates ...
[DOWNLOADED] bar v0.0.1 ([..])
[COMPILING] bar v0.0.1 [COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 ([..]) [COMPILING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
@ -79,7 +80,7 @@ fn no_warning_on_bin_failure() {
.with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING1)) .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING1))
.with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING2)) .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING2))
.with_stderr_contains("[UPDATING] `[..]` index") .with_stderr_contains("[UPDATING] `[..]` index")
.with_stderr_contains("[DOWNLOADING] bar v0.0.1 ([..])") .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])")
.with_stderr_contains("[COMPILING] bar v0.0.1") .with_stderr_contains("[COMPILING] bar v0.0.1")
.with_stderr_contains("[COMPILING] foo v0.0.1 ([..])") .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])")
.run(); .run();
@ -96,7 +97,7 @@ fn warning_on_lib_failure() {
.with_stderr_does_not_contain("hidden stderr") .with_stderr_does_not_contain("hidden stderr")
.with_stderr_does_not_contain("[COMPILING] foo v0.0.1 ([..])") .with_stderr_does_not_contain("[COMPILING] foo v0.0.1 ([..])")
.with_stderr_contains("[UPDATING] `[..]` index") .with_stderr_contains("[UPDATING] `[..]` index")
.with_stderr_contains("[DOWNLOADING] bar v0.0.1 ([..])") .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])")
.with_stderr_contains("[COMPILING] bar v0.0.1") .with_stderr_contains("[COMPILING] bar v0.0.1")
.with_stderr_contains(&format!("[WARNING] {}", WARNING1)) .with_stderr_contains(&format!("[WARNING] {}", WARNING1))
.with_stderr_contains(&format!("[WARNING] {}", WARNING2)) .with_stderr_contains(&format!("[WARNING] {}", WARNING2))

View File

@ -557,7 +557,8 @@ fn share_dependencies() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] dep1 v0.1.3 ([..]) [DOWNLOADING] crates ...
[DOWNLOADED] dep1 v0.1.3 ([..])
[COMPILING] dep1 v0.1.3 [COMPILING] dep1 v0.1.3
[COMPILING] foo v0.1.0 ([..]) [COMPILING] foo v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
@ -600,7 +601,8 @@ fn fetch_fetches_all() {
.with_stderr( .with_stderr(
"\ "\
[UPDATING] `[..]` index [UPDATING] `[..]` index
[DOWNLOADING] dep1 v0.1.3 ([..]) [DOWNLOADING] crates ...
[DOWNLOADED] dep1 v0.1.3 ([..])
", ",
).run(); ).run();
} }
@ -650,7 +652,8 @@ fn lock_works_for_everyone() {
p.cargo("build") p.cargo("build")
.with_stderr( .with_stderr(
"\ "\
[DOWNLOADING] dep2 v0.1.0 ([..]) [DOWNLOADING] crates ...
[DOWNLOADED] dep2 v0.1.0 ([..])
[COMPILING] dep2 v0.1.0 [COMPILING] dep2 v0.1.0
[COMPILING] foo v0.1.0 ([..]) [COMPILING] foo v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
@ -661,7 +664,8 @@ fn lock_works_for_everyone() {
.cwd(p.root().join("bar")) .cwd(p.root().join("bar"))
.with_stderr( .with_stderr(
"\ "\
[DOWNLOADING] dep1 v0.1.0 ([..]) [DOWNLOADING] crates ...
[DOWNLOADED] dep1 v0.1.0 ([..])
[COMPILING] dep1 v0.1.0 [COMPILING] dep1 v0.1.0
[COMPILING] bar v0.1.0 ([..]) [COMPILING] bar v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]