Multi-package publishing

Co-authored-by: Tor Hovland <55164+torhovland@users.noreply.github.com>
Co-authored-by: Ed Page <eopage@gmail.com>
This commit is contained in:
Joe Neeman 2024-09-03 16:59:17 +07:00
parent 431d84a6bf
commit a016e5f5c2
6 changed files with 485 additions and 126 deletions

View File

@ -93,30 +93,6 @@ struct GitVcsInfo {
dirty: bool,
}
/// Packages a single package in a workspace, returning the resulting tar file.
///
/// # Panics
/// Panics if `opts.list` is true. In that case you probably don't want to
/// actually build the package tarball; you should just make and print the list
/// of files. (We don't currently provide a public API for that, but see how
/// [`package`] does it.)
pub fn package_one(
ws: &Workspace<'_>,
pkg: &Package,
opts: &PackageOpts<'_>,
) -> CargoResult<FileLock> {
assert!(!opts.list);
let ar_files = prepare_archive(ws, pkg, opts)?;
let tarball = create_package(ws, pkg, ar_files, None)?;
if opts.verify {
run_verify(ws, pkg, &tarball, None, opts)?;
}
Ok(tarball)
}
// Builds a tarball and places it in the output directory.
fn create_package(
ws: &Workspace<'_>,
@ -193,6 +169,34 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<Fi
// So we need filter
pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
Ok(do_package(ws, opts, pkgs)?
.into_iter()
.map(|x| x.2)
.collect())
}
/// Packages an entire workspace.
///
/// Returns the generated package files and the dependencies between them. If
/// `opts.list` is true, skips generating package files and returns an empty
/// list.
pub(crate) fn package_with_dep_graph(
ws: &Workspace<'_>,
opts: &PackageOpts<'_>,
pkgs: Vec<(&Package, CliFeatures)>,
) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
let output = do_package(ws, opts, pkgs)?;
Ok(local_deps(output.into_iter().map(
|(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
)))
}
fn do_package<'a>(
ws: &Workspace<'_>,
opts: &PackageOpts<'a>,
pkgs: Vec<(&Package, CliFeatures)>,
) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
if ws
.lock_root()
.as_path_unlocked()
@ -264,7 +268,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<Fi
}
}
Ok(outputs.into_iter().map(|x| x.2).collect())
Ok(outputs)
}
/// Determine which registry the packages are for.
@ -308,15 +312,14 @@ fn get_registry(
}
/// Just the part of the dependency graph that's between the packages we're packaging.
/// (Is the package name a good key? Does it uniquely identify packages?)
#[derive(Clone, Debug, Default)]
struct LocalDependencies {
packages: HashMap<PackageId, (Package, CliFeatures)>,
graph: Graph<PackageId, ()>,
pub(crate) struct LocalDependencies<T> {
pub packages: HashMap<PackageId, (Package, T)>,
pub graph: Graph<PackageId, ()>,
}
impl LocalDependencies {
fn sort(&self) -> Vec<(Package, CliFeatures)> {
impl<T: Clone> LocalDependencies<T> {
pub fn sort(&self) -> Vec<(Package, T)> {
self.graph
.sort()
.into_iter()
@ -335,9 +338,10 @@ impl LocalDependencies {
/// ignoring dev dependencies.
///
/// We assume that the packages all belong to this workspace.
fn local_deps(packages: impl Iterator<Item = (Package, CliFeatures)>) -> LocalDependencies {
let packages: HashMap<PackageId, (Package, CliFeatures)> =
packages.map(|pkg| (pkg.0.package_id(), pkg)).collect();
fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
let packages: HashMap<PackageId, (Package, T)> = packages
.map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
.collect();
// Dependencies have source ids but not package ids. We draw an edge
// whenever a dependency's source id matches one of our packages. This is
@ -349,7 +353,7 @@ fn local_deps(packages: impl Iterator<Item = (Package, CliFeatures)>) -> LocalDe
.collect();
let mut graph = Graph::new();
for (pkg, _features) in packages.values() {
for (pkg, _payload) in packages.values() {
graph.add(pkg.package_id());
for dep in pkg.dependencies() {
// Ignore local dev-dependencies because they aren't needed for intra-workspace

View File

@ -10,7 +10,7 @@ pub use self::cargo_fetch::{fetch, FetchOptions};
pub use self::cargo_install::{install, install_list};
pub use self::cargo_new::{init, new, NewOptions, NewProjectKind, VersionControl};
pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions};
pub use self::cargo_package::{check_yanked, package, package_one, PackageOpts};
pub use self::cargo_package::{check_yanked, package, PackageOpts};
pub use self::cargo_pkgid::pkgid;
pub use self::cargo_read_manifest::read_package;
pub use self::cargo_run::run;

View File

@ -3,8 +3,12 @@
//! [1]: https://doc.rust-lang.org/nightly/cargo/reference/registry-web-api.html#publish
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::collections::HashSet;
use std::fs::File;
use std::io::Seek;
use std::io::SeekFrom;
use std::time::Duration;
use anyhow::bail;
@ -15,6 +19,7 @@ use cargo_util::paths;
use crates_io::NewCrate;
use crates_io::NewCrateDependency;
use crates_io::Registry;
use itertools::Itertools;
use crate::core::dependency::DepKind;
use crate::core::manifest::ManifestMetadata;
@ -28,6 +33,7 @@ use crate::core::Workspace;
use crate::ops;
use crate::ops::PackageOpts;
use crate::ops::Packages;
use crate::ops::RegistryOrIndex;
use crate::sources::source::QueryKind;
use crate::sources::source::Source;
use crate::sources::SourceConfigMap;
@ -36,13 +42,13 @@ use crate::util::auth;
use crate::util::cache_lock::CacheLockMode;
use crate::util::context::JobsConfig;
use crate::util::toml::prepare_for_publish;
use crate::util::Graph;
use crate::util::Progress;
use crate::util::ProgressStyle;
use crate::CargoResult;
use crate::GlobalContext;
use super::super::check_dep_has_version;
use super::RegistryOrIndex;
pub struct PublishOpts<'gctx> {
pub gctx: &'gctx GlobalContext,
@ -59,16 +65,23 @@ pub struct PublishOpts<'gctx> {
}
pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
let multi_package_mode = ws.gctx().cli_unstable().package_workspace;
let specs = opts.to_publish.to_package_id_specs(ws)?;
if specs.len() > 1 {
bail!("the `-p` argument must be specified to select a single package to publish")
if !multi_package_mode {
if specs.len() > 1 {
bail!("the `-p` argument must be specified to select a single package to publish")
}
if Packages::Default == opts.to_publish && ws.is_virtual() {
bail!("the `-p` argument must be specified in the root of a virtual workspace")
}
}
if Packages::Default == opts.to_publish && ws.is_virtual() {
bail!("the `-p` argument must be specified in the root of a virtual workspace")
let member_ids: Vec<_> = ws.members().map(|p| p.package_id()).collect();
// Check that the specs match members.
for spec in &specs {
spec.query(member_ids.clone())?;
}
let member_ids = ws.members().map(|p| p.package_id());
// Check that the spec matches exactly one member.
specs[0].query(member_ids)?;
let mut pkgs = ws.members_with_features(&specs, &opts.cli_features)?;
// In `members_with_features_old`, it will add "current" package (determined by the cwd)
// So we need filter
@ -76,8 +89,6 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
.into_iter()
.filter(|(m, _)| specs.iter().any(|spec| spec.matches(m.package_id())))
.collect();
// Double check. It is safe theoretically, unless logic has updated.
assert_eq!(pkgs.len(), 1);
let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
let reg_or_index = match opts.reg_or_index.clone() {
@ -103,9 +114,6 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
// This is only used to confirm that we can create a token before we build the package.
// This causes the credential provider to be called an extra time, but keeps the same order of errors.
let (pkg, cli_features) = pkgs.pop().unwrap();
let ver = pkg.version().to_string();
let source_ids = super::get_source_id(opts.gctx, reg_or_index.as_ref())?;
let mut registry = super::registry(
opts.gctx,
@ -115,82 +123,147 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
true,
Some(Operation::Read).filter(|_| !opts.dry_run),
)?;
verify_dependencies(pkg, &registry, source_ids.original)?;
// Prepare a tarball, with a non-suppressible warning if metadata
// is missing since this is being put online.
let tarball = ops::package_one(
// Validate all the packages before publishing any of them.
for (pkg, _) in &pkgs {
verify_dependencies(pkg, &registry, source_ids.original)?;
}
let pkg_dep_graph = ops::cargo_package::package_with_dep_graph(
ws,
pkg,
&PackageOpts {
gctx: opts.gctx,
verify: opts.verify,
list: false,
check_metadata: true,
allow_dirty: opts.allow_dirty,
to_package: Packages::Default,
// `package_with_dep_graph` ignores this field in favor of
// the already-resolved list of packages
to_package: ops::Packages::Default,
targets: opts.targets.clone(),
jobs: opts.jobs.clone(),
keep_going: opts.keep_going,
cli_features,
reg_or_index,
cli_features: opts.cli_features.clone(),
reg_or_index: reg_or_index.clone(),
},
pkgs,
)?;
if !opts.dry_run {
let hash = cargo_util::Sha256::new()
.update_file(tarball.file())?
.finish_hex();
let operation = Operation::Publish {
name: pkg.name().as_str(),
vers: &ver,
cksum: &hash,
};
registry.set_token(Some(auth::auth_token(
&opts.gctx,
&source_ids.original,
None,
operation,
vec![],
false,
)?));
}
let mut plan = PublishPlan::new(&pkg_dep_graph.graph);
// May contains packages from previous rounds as `wait_for_any_publish_confirmation` returns
// after it confirms any packages, not all packages, requiring us to handle the rest in the next
// iteration.
//
// As a side effect, any given package's "effective" timeout may be much larger.
let mut to_confirm = BTreeSet::new();
opts.gctx
.shell()
.status("Uploading", pkg.package_id().to_string())?;
transmit(
opts.gctx,
ws,
pkg,
tarball.file(),
&mut registry,
source_ids.original,
opts.dry_run,
)?;
if !opts.dry_run {
const DEFAULT_TIMEOUT: u64 = 60;
let timeout = if opts.gctx.cli_unstable().publish_timeout {
let timeout: Option<u64> = opts.gctx.get("publish.timeout")?;
timeout.unwrap_or(DEFAULT_TIMEOUT)
} else {
DEFAULT_TIMEOUT
};
if 0 < timeout {
let timeout = Duration::from_secs(timeout);
wait_for_publish(opts.gctx, source_ids.original, pkg, timeout)?;
while !plan.is_empty() {
// There might not be any ready package, if the previous confirmations
// didn't unlock a new one. For example, if `c` depends on `a` and
// `b`, and we uploaded `a` and `b` but only confirmed `a`, then on
// the following pass through the outer loop nothing will be ready for
// upload.
for pkg_id in plan.take_ready() {
let (pkg, (_features, tarball)) = &pkg_dep_graph.packages[&pkg_id];
opts.gctx.shell().status("Uploading", pkg.package_id())?;
if !opts.dry_run {
let ver = pkg.version().to_string();
tarball.file().seek(SeekFrom::Start(0))?;
let hash = cargo_util::Sha256::new()
.update_file(tarball.file())?
.finish_hex();
let operation = Operation::Publish {
name: pkg.name().as_str(),
vers: &ver,
cksum: &hash,
};
registry.set_token(Some(auth::auth_token(
&opts.gctx,
&source_ids.original,
None,
operation,
vec![],
false,
)?));
}
transmit(
opts.gctx,
ws,
pkg,
tarball.file(),
&mut registry,
source_ids.original,
opts.dry_run,
)?;
to_confirm.insert(pkg_id);
if !opts.dry_run {
// Short does not include the registry name.
let short_pkg_description = format!("{} v{}", pkg.name(), pkg.version());
let source_description = source_ids.original.to_string();
ws.gctx().shell().status(
"Uploaded",
format!("{short_pkg_description} to {source_description}"),
)?;
}
}
let confirmed = if opts.dry_run {
to_confirm.clone()
} else {
const DEFAULT_TIMEOUT: u64 = 60;
let timeout = if opts.gctx.cli_unstable().publish_timeout {
let timeout: Option<u64> = opts.gctx.get("publish.timeout")?;
timeout.unwrap_or(DEFAULT_TIMEOUT)
} else {
DEFAULT_TIMEOUT
};
if 0 < timeout {
let timeout = Duration::from_secs(timeout);
wait_for_any_publish_confirmation(
opts.gctx,
source_ids.original,
&to_confirm,
timeout,
)?
} else {
BTreeSet::new()
}
};
if confirmed.is_empty() {
// If nothing finished, it means we timed out while waiting for confirmation.
// We're going to exit, but first we need to check: have we uploaded everything?
if plan.is_empty() {
// It's ok that we timed out, because nothing was waiting on dependencies to
// be confirmed.
break;
} else {
let failed_list = package_list(plan.iter(), "and");
bail!("unable to publish {failed_list} due to time out while waiting for published dependencies to be available.");
}
}
for id in &confirmed {
to_confirm.remove(id);
}
plan.mark_confirmed(confirmed);
}
Ok(())
}
fn wait_for_publish(
/// Poll the registry for any packages that are ready for use.
///
/// Returns the subset of `pkgs` that are ready for use.
/// This will be an empty set if we timed out before confirming anything.
fn wait_for_any_publish_confirmation(
gctx: &GlobalContext,
registry_src: SourceId,
pkg: &Package,
pkgs: &BTreeSet<PackageId>,
timeout: Duration,
) -> CargoResult<()> {
) -> CargoResult<BTreeSet<PackageId>> {
let mut source = SourceConfigMap::empty(gctx)?.load(registry_src, &HashSet::new())?;
// Disable the source's built-in progress bars. Repeatedly showing a bunch
// of independent progress bars can be a little confusing. There is an
@ -202,18 +275,14 @@ fn wait_for_publish(
let sleep_time = Duration::from_secs(1);
let max = timeout.as_secs() as usize;
// Short does not include the registry name.
let short_pkg_description = format!("{} v{}", pkg.name(), pkg.version());
gctx.shell().status(
"Uploaded",
format!("{short_pkg_description} to {source_description}"),
)?;
let short_pkg_descriptions = package_list(pkgs.iter().copied(), "or");
gctx.shell().note(format!(
"waiting for `{short_pkg_description}` to be available at {source_description}.\n\
"waiting for {short_pkg_descriptions} to be available at {source_description}.\n\
You may press ctrl-c to skip waiting; the crate should be available shortly."
))?;
let mut progress = Progress::with_style("Waiting", ProgressStyle::Ratio, gctx);
progress.tick_now(0, max, "")?;
let is_available = loop {
let available = loop {
{
let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
// Force re-fetching the source
@ -223,34 +292,48 @@ fn wait_for_publish(
// multiple times
gctx.updated_sources().remove(&source.replaced_source_id());
source.invalidate_cache();
if poll_one_package(registry_src, &pkg.package_id(), &mut source)? {
break true;
let mut available = BTreeSet::new();
for pkg in pkgs {
if poll_one_package(registry_src, pkg, &mut source)? {
available.insert(*pkg);
}
}
// As soon as any package is available, break this loop so we can see if another
// one can be uploaded.
if !available.is_empty() {
break available;
}
}
let elapsed = now.elapsed();
if timeout < elapsed {
gctx.shell().warn(format!(
"timed out waiting for `{short_pkg_description}` to be available in {source_description}",
"timed out waiting for {short_pkg_descriptions} to be available in {source_description}",
))?;
gctx.shell().note(
"the registry may have a backlog that is delaying making the \
crate available. The crate should be available soon.",
)?;
break false;
break BTreeSet::new();
}
progress.tick_now(elapsed.as_secs() as usize, max, "")?;
std::thread::sleep(sleep_time);
};
if is_available {
if !available.is_empty() {
let short_pkg_description = available
.iter()
.map(|pkg| format!("{} v{}", pkg.name(), pkg.version()))
.sorted()
.join(", ");
gctx.shell().status(
"Published",
format!("{short_pkg_description} at {source_description}"),
)?;
}
Ok(())
Ok(available)
}
fn poll_one_package(
@ -485,6 +568,87 @@ fn transmit(
Ok(())
}
/// State for tracking dependencies during upload.
struct PublishPlan {
/// Graph of publishable packages where the edges are `(dependency -> dependent)`
dependents: Graph<PackageId, ()>,
/// The weight of a package is the number of unpublished dependencies it has.
dependencies_count: HashMap<PackageId, usize>,
}
impl PublishPlan {
/// Given a package dependency graph, creates a `PublishPlan` for tracking state.
fn new(graph: &Graph<PackageId, ()>) -> Self {
let dependents = graph.reversed();
let dependencies_count: HashMap<_, _> = dependents
.iter()
.map(|id| (*id, graph.edges(id).count()))
.collect();
Self {
dependents,
dependencies_count,
}
}
fn iter(&self) -> impl Iterator<Item = PackageId> + '_ {
self.dependencies_count.iter().map(|(id, _)| *id)
}
fn is_empty(&self) -> bool {
self.dependencies_count.is_empty()
}
/// Returns the set of packages that are ready for publishing (i.e. have no outstanding dependencies).
///
/// These will not be returned in future calls.
fn take_ready(&mut self) -> BTreeSet<PackageId> {
let ready: BTreeSet<_> = self
.dependencies_count
.iter()
.filter_map(|(id, weight)| (*weight == 0).then_some(*id))
.collect();
for pkg in &ready {
self.dependencies_count.remove(pkg);
}
ready
}
/// Packages confirmed to be available in the registry, potentially allowing additional
/// packages to be "ready".
fn mark_confirmed(&mut self, published: impl IntoIterator<Item = PackageId>) {
for id in published {
for (dependent_id, _) in self.dependents.edges(&id) {
if let Some(weight) = self.dependencies_count.get_mut(dependent_id) {
*weight = weight.saturating_sub(1);
}
}
}
}
}
/// Format a collection of packages as a list
///
/// e.g. "foo v0.1.0, bar v0.2.0, and baz v0.3.0".
///
/// Note: the final separator (e.g. "and" in the previous example) can be chosen.
fn package_list(pkgs: impl IntoIterator<Item = PackageId>, final_sep: &str) -> String {
let mut names: Vec<_> = pkgs
.into_iter()
.map(|pkg| format!("`{} v{}`", pkg.name(), pkg.version()))
.collect();
names.sort();
match &names[..] {
[] => String::new(),
[a] => a.clone(),
[a, b] => format!("{a} {final_sep} {b}"),
[names @ .., last] => {
format!("{}, {final_sep} {last}", names.join(", "))
}
}
}
fn validate_registry(pkgs: &[&Package], reg_or_index: Option<&RegistryOrIndex>) -> CargoResult<()> {
for pkg in pkgs {
if pkg.publish() == &Some(Vec::new()) {
@ -518,3 +682,59 @@ fn validate_registry(pkgs: &[&Package], reg_or_index: Option<&RegistryOrIndex>)
Ok(())
}
#[cfg(test)]
mod tests {
use crate::{
core::{PackageId, SourceId},
sources::CRATES_IO_INDEX,
util::{Graph, IntoUrl},
};
use super::PublishPlan;
fn pkg_id(name: &str) -> PackageId {
let loc = CRATES_IO_INDEX.into_url().unwrap();
PackageId::try_new(name, "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap()
}
#[test]
fn parallel_schedule() {
let mut graph: Graph<PackageId, ()> = Graph::new();
let a = pkg_id("a");
let b = pkg_id("b");
let c = pkg_id("c");
let d = pkg_id("d");
let e = pkg_id("e");
graph.add(a);
graph.add(b);
graph.add(c);
graph.add(d);
graph.add(e);
graph.link(a, c);
graph.link(b, c);
graph.link(c, d);
graph.link(c, e);
let mut order = PublishPlan::new(&graph);
let ready: Vec<_> = order.take_ready().into_iter().collect();
assert_eq!(ready, vec![d, e]);
order.mark_confirmed(vec![d]);
let ready: Vec<_> = order.take_ready().into_iter().collect();
assert!(ready.is_empty());
order.mark_confirmed(vec![e]);
let ready: Vec<_> = order.take_ready().into_iter().collect();
assert_eq!(ready, vec![c]);
order.mark_confirmed(vec![c]);
let ready: Vec<_> = order.take_ready().into_iter().collect();
assert_eq!(ready, vec![a, b]);
order.mark_confirmed(vec![a, b]);
let ready: Vec<_> = order.take_ready().into_iter().collect();
assert!(ready.is_empty());
}
}

View File

@ -25,6 +25,20 @@ impl<N: Eq + Ord + Clone, E: Default + Clone> Graph<N, E> {
.or_insert_with(Default::default)
}
/// Returns the graph obtained by reversing all edges.
pub fn reversed(&self) -> Graph<N, E> {
let mut ret = Graph::new();
for n in self.iter() {
ret.add(n.clone());
for (m, e) in self.edges(n) {
*ret.link(m.clone(), n.clone()) = e.clone();
}
}
ret
}
pub fn contains<Q: ?Sized>(&self, k: &Q) -> bool
where
N: Borrow<Q>,
@ -206,6 +220,19 @@ fn path_to_self() {
assert_eq!(new.path_to_bottom(&0), vec![(&0, Some(&()))]);
}
#[test]
fn reverse() {
let mut new: Graph<i32, ()> = Graph::new();
new.link(0, 1);
new.link(0, 2);
let mut expected: Graph<i32, ()> = Graph::new();
expected.add(0);
expected.link(1, 0);
expected.link(2, 0);
assert_eq!(new.reversed(), expected);
}
impl<N: Eq + Ord + Clone, E: Default + Clone> Default for Graph<N, E> {
fn default() -> Graph<N, E> {
Graph::new()

View File

@ -76,8 +76,8 @@ fn publish() {
{"v":1,"registry":{"index-url":"[..]","name":"alternative","headers":[..]},"kind":"get","operation":"read"}
[PACKAGING] foo v0.1.0 ([ROOT]/foo)
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"}
[UPLOADING] foo v0.1.0 ([ROOT]/foo)
{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"}
[UPLOADED] foo v0.1.0 to registry `alternative`
[NOTE] waiting for `foo v0.1.0` to be available at registry `alternative`.
You may press ctrl-c [..]
@ -529,8 +529,8 @@ fn token_caching() {
{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"read"}
[PACKAGING] foo v0.1.0 ([ROOT]/foo)
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"}
[UPLOADING] foo v0.1.0 ([ROOT]/foo)
{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"}
[UPLOADED] foo v0.1.0 to registry `alternative`
[NOTE] waiting [..]
You may press ctrl-c [..]

View File

@ -3173,6 +3173,7 @@ See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for
[PACKAGING] foo v0.0.1 ([ROOT]/foo)
[PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[UPLOADING] foo v0.0.1 ([ROOT]/foo)
[UPLOADED] foo v0.0.1 to registry `crates-io`
"#]])
.run();
@ -3305,7 +3306,26 @@ fn timeout_waiting_for_dependency_publish() {
.masquerade_as_nightly_cargo(&["publish-timeout", "package-workspace"])
.with_status(101)
.with_stderr_data(str![[r#"
[ERROR] the `-p` argument must be specified to select a single package to publish
[UPDATING] crates.io index
[WARNING] manifest has no documentation, homepage or repository.
See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
[PACKAGING] dep v0.0.1 ([ROOT]/foo/dep)
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[WARNING] manifest has no documentation, homepage or repository.
See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
[PACKAGING] main v0.0.1 ([ROOT]/foo/main)
[PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[WARNING] manifest has no documentation, homepage or repository.
See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
[PACKAGING] other v0.0.1 ([ROOT]/foo/other)
[PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[UPLOADING] dep v0.0.1 ([ROOT]/foo/dep)
[UPLOADED] dep v0.0.1 to registry `crates-io`
[NOTE] waiting for `dep v0.0.1` to be available at registry `crates-io`.
You may press ctrl-c to skip waiting; the crate should be available shortly.
[WARNING] timed out waiting for `dep v0.0.1` to be available in registry `crates-io`
[NOTE] the registry may have a backlog that is delaying making the crate available. The crate should be available soon.
[ERROR] unable to publish `main v0.0.1` and `other v0.0.1` due to time out while waiting for published dependencies to be available.
"#]])
.run();
@ -3588,10 +3608,47 @@ fn workspace_with_local_deps_nightly() {
p.cargo("publish -Zpackage-workspace")
.masquerade_as_nightly_cargo(&["package-workspace"])
.with_status(101)
.replace_crates_io(registry.index_url())
.with_stderr_data(str![[r#"
[ERROR] the `-p` argument must be specified to select a single package to publish
[UPDATING] crates.io index
[PACKAGING] level3 v0.0.1 ([ROOT]/foo/level3)
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[PACKAGING] level2 v0.0.1 ([ROOT]/foo/level2)
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[PACKAGING] level1 v0.0.1 ([ROOT]/foo/level1)
[UPDATING] crates.io index
[PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[VERIFYING] level3 v0.0.1 ([ROOT]/foo/level3)
[COMPILING] level3 v0.0.1 ([ROOT]/foo/target/package/level3-0.0.1)
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
[VERIFYING] level2 v0.0.1 ([ROOT]/foo/level2)
[UPDATING] crates.io index
[UNPACKING] level3 v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`)
[COMPILING] level3 v0.0.1
[COMPILING] level2 v0.0.1 ([ROOT]/foo/target/package/level2-0.0.1)
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
[VERIFYING] level1 v0.0.1 ([ROOT]/foo/level1)
[UPDATING] crates.io index
[UNPACKING] level2 v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`)
[COMPILING] level3 v0.0.1
[COMPILING] level2 v0.0.1
[COMPILING] level1 v0.0.1 ([ROOT]/foo/target/package/level1-0.0.1)
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
[UPLOADING] level3 v0.0.1 ([ROOT]/foo/level3)
[UPLOADED] level3 v0.0.1 to registry `crates-io`
[NOTE] waiting for `level3 v0.0.1` to be available at registry `crates-io`.
You may press ctrl-c to skip waiting; the crate should be available shortly.
[PUBLISHED] level3 v0.0.1 at registry `crates-io`
[UPLOADING] level2 v0.0.1 ([ROOT]/foo/level2)
[UPLOADED] level2 v0.0.1 to registry `crates-io`
[NOTE] waiting for `level2 v0.0.1` to be available at registry `crates-io`.
You may press ctrl-c to skip waiting; the crate should be available shortly.
[PUBLISHED] level2 v0.0.1 at registry `crates-io`
[UPLOADING] level1 v0.0.1 ([ROOT]/foo/level1)
[UPLOADED] level1 v0.0.1 to registry `crates-io`
[NOTE] waiting for `level1 v0.0.1` to be available at registry `crates-io`.
You may press ctrl-c to skip waiting; the crate should be available shortly.
[PUBLISHED] level1 v0.0.1 at registry `crates-io`
"#]])
.run();
@ -3659,10 +3716,41 @@ fn workspace_parallel() {
p.cargo("publish -Zpackage-workspace")
.masquerade_as_nightly_cargo(&["package-workspace"])
.replace_crates_io(registry.index_url())
.with_status(101)
.with_stderr_data(
str![[r#"
[ERROR] the `-p` argument must be specified to select a single package to publish
[UPDATING] crates.io index
[PACKAGING] a v0.0.1 ([ROOT]/foo/a)
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[PACKAGING] b v0.0.1 ([ROOT]/foo/b)
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[PACKAGING] c v0.0.1 ([ROOT]/foo/c)
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[VERIFYING] a v0.0.1 ([ROOT]/foo/a)
[COMPILING] a v0.0.1 ([ROOT]/foo/target/package/a-0.0.1)
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
[VERIFYING] b v0.0.1 ([ROOT]/foo/b)
[COMPILING] b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1)
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
[VERIFYING] c v0.0.1 ([ROOT]/foo/c)
[UPDATING] crates.io index
[UNPACKING] a v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`)
[UNPACKING] b v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`)
[COMPILING] a v0.0.1
[COMPILING] b v0.0.1
[COMPILING] c v0.0.1 ([ROOT]/foo/target/package/c-0.0.1)
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
[UPLOADED] b v0.0.1 to registry `crates-io`
[UPLOADED] a v0.0.1 to registry `crates-io`
[NOTE] waiting for `a v0.0.1` or `b v0.0.1` to be available at registry `crates-io`.
You may press ctrl-c to skip waiting; the crate should be available shortly.
[PUBLISHED] a v0.0.1, b v0.0.1 at registry `crates-io`
[UPLOADING] c v0.0.1 ([ROOT]/foo/c)
[UPLOADED] c v0.0.1 to registry `crates-io`
[NOTE] waiting for `c v0.0.1` to be available at registry `crates-io`.
You may press ctrl-c to skip waiting; the crate should be available shortly.
[PUBLISHED] c v0.0.1 at registry `crates-io`
[UPLOADING] a v0.0.1 ([ROOT]/foo/a)
[UPLOADING] b v0.0.1 ([ROOT]/foo/b)
"#]]
.unordered(),
@ -3724,9 +3812,12 @@ fn workspace_missing_dependency() {
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[VERIFYING] b v0.0.1 ([ROOT]/foo/b)
[UPDATING] crates.io index
[ERROR] no matching package named `a` found
location searched: registry `crates-io`
required by package `b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1)`
[ERROR] failed to verify package tarball
Caused by:
no matching package named `a` found
location searched: registry `crates-io`
required by package `b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1)`
"#]])
.run();
@ -3756,7 +3847,23 @@ You may press ctrl-c to skip waiting; the crate should be available shortly.
.replace_crates_io(registry.index_url())
.with_status(101)
.with_stderr_data(str![[r#"
[ERROR] the `-p` argument must be specified to select a single package to publish
[UPDATING] crates.io index
[PACKAGING] a v0.0.1 ([ROOT]/foo/a)
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[PACKAGING] b v0.0.1 ([ROOT]/foo/b)
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
[VERIFYING] a v0.0.1 ([ROOT]/foo/a)
[COMPILING] a v0.0.1 ([ROOT]/foo/target/package/a-0.0.1)
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
[VERIFYING] b v0.0.1 ([ROOT]/foo/b)
[UPDATING] crates.io index
[ERROR] failed to verify package tarball
Caused by:
failed to get `a` as a dependency of package `b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1)`
Caused by:
found a package in the remote registry and the local overlay: a@0.0.1
"#]])
.run();
@ -3817,7 +3924,8 @@ fn one_unpublishable_package() {
.masquerade_as_nightly_cargo(&["package-workspace"])
.with_status(101)
.with_stderr_data(str![[r#"
[ERROR] the `-p` argument must be specified to select a single package to publish
[ERROR] `main` cannot be published.
`package.publish` must be set to `true` or a non-empty list in Cargo.toml to publish.
"#]])
.run();