mirror of
https://github.com/rust-lang/cargo.git
synced 2025-10-01 11:30:39 +00:00
Auto merge of #14433 - tweag:multi-package-publishing-rebased, r=epage
Publish workspace Adds support for simultaneously publishing multiple (possibly inter-dependent) packages in a workspace, gated by the `-Zpackage-workspace` flag. Questions to be worked out through stabilization: - Are we ok stabilizing this and #10948 at the same time? Currently, they are behind the same flag - What is the desired behavior for the publish timeout? This PR uploads the crates in batches (depending on the dependency graph), and we only timeout if nothing in the batch is available within the timeout, deferring the rest to the next wait-for-publish. So for example, if you have packages `a`, `b`, `c` then we'll wait up to 60 seconds and if only `a` and `b` were ready in that time, we'll then wait another 60 seconds for `c`. - What is the desired behavior when some packages in a workspace have `publish = false`? This PR raises an error whenever any of the selected packages has `publish = false`, so it will error on `cargo publish --workspace` in a workspace with an unpublishable package. An alternative interface would implicitly exclude unpublishable packages in this case, but still error out if you explicitly select an unpublishable package with `-p package-name` (see #14356). This PR's behavior is the most conservative one as it can change from an error to implicit excludes later. This is part of #1169
This commit is contained in:
commit
be1bbda84b
@ -93,30 +93,6 @@ struct GitVcsInfo {
|
||||
dirty: bool,
|
||||
}
|
||||
|
||||
/// Packages a single package in a workspace, returning the resulting tar file.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics if `opts.list` is true. In that case you probably don't want to
|
||||
/// actually build the package tarball; you should just make and print the list
|
||||
/// of files. (We don't currently provide a public API for that, but see how
|
||||
/// [`package`] does it.)
|
||||
pub fn package_one(
|
||||
ws: &Workspace<'_>,
|
||||
pkg: &Package,
|
||||
opts: &PackageOpts<'_>,
|
||||
) -> CargoResult<FileLock> {
|
||||
assert!(!opts.list);
|
||||
|
||||
let ar_files = prepare_archive(ws, pkg, opts)?;
|
||||
let tarball = create_package(ws, pkg, ar_files, None)?;
|
||||
|
||||
if opts.verify {
|
||||
run_verify(ws, pkg, &tarball, None, opts)?;
|
||||
}
|
||||
|
||||
Ok(tarball)
|
||||
}
|
||||
|
||||
// Builds a tarball and places it in the output directory.
|
||||
fn create_package(
|
||||
ws: &Workspace<'_>,
|
||||
@ -193,6 +169,34 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<Fi
|
||||
// So we need filter
|
||||
pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
|
||||
|
||||
Ok(do_package(ws, opts, pkgs)?
|
||||
.into_iter()
|
||||
.map(|x| x.2)
|
||||
.collect())
|
||||
}
|
||||
|
||||
/// Packages an entire workspace.
|
||||
///
|
||||
/// Returns the generated package files and the dependencies between them. If
|
||||
/// `opts.list` is true, skips generating package files and returns an empty
|
||||
/// list.
|
||||
pub(crate) fn package_with_dep_graph(
|
||||
ws: &Workspace<'_>,
|
||||
opts: &PackageOpts<'_>,
|
||||
pkgs: Vec<(&Package, CliFeatures)>,
|
||||
) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
|
||||
let output = do_package(ws, opts, pkgs)?;
|
||||
|
||||
Ok(local_deps(output.into_iter().map(
|
||||
|(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
|
||||
)))
|
||||
}
|
||||
|
||||
fn do_package<'a>(
|
||||
ws: &Workspace<'_>,
|
||||
opts: &PackageOpts<'a>,
|
||||
pkgs: Vec<(&Package, CliFeatures)>,
|
||||
) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
|
||||
if ws
|
||||
.lock_root()
|
||||
.as_path_unlocked()
|
||||
@ -264,7 +268,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<Fi
|
||||
}
|
||||
}
|
||||
|
||||
Ok(outputs.into_iter().map(|x| x.2).collect())
|
||||
Ok(outputs)
|
||||
}
|
||||
|
||||
/// Determine which registry the packages are for.
|
||||
@ -308,15 +312,14 @@ fn get_registry(
|
||||
}
|
||||
|
||||
/// Just the part of the dependency graph that's between the packages we're packaging.
|
||||
/// (Is the package name a good key? Does it uniquely identify packages?)
|
||||
#[derive(Clone, Debug, Default)]
|
||||
struct LocalDependencies {
|
||||
packages: HashMap<PackageId, (Package, CliFeatures)>,
|
||||
graph: Graph<PackageId, ()>,
|
||||
pub(crate) struct LocalDependencies<T> {
|
||||
pub packages: HashMap<PackageId, (Package, T)>,
|
||||
pub graph: Graph<PackageId, ()>,
|
||||
}
|
||||
|
||||
impl LocalDependencies {
|
||||
fn sort(&self) -> Vec<(Package, CliFeatures)> {
|
||||
impl<T: Clone> LocalDependencies<T> {
|
||||
pub fn sort(&self) -> Vec<(Package, T)> {
|
||||
self.graph
|
||||
.sort()
|
||||
.into_iter()
|
||||
@ -335,9 +338,10 @@ impl LocalDependencies {
|
||||
/// ignoring dev dependencies.
|
||||
///
|
||||
/// We assume that the packages all belong to this workspace.
|
||||
fn local_deps(packages: impl Iterator<Item = (Package, CliFeatures)>) -> LocalDependencies {
|
||||
let packages: HashMap<PackageId, (Package, CliFeatures)> =
|
||||
packages.map(|pkg| (pkg.0.package_id(), pkg)).collect();
|
||||
fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
|
||||
let packages: HashMap<PackageId, (Package, T)> = packages
|
||||
.map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
|
||||
.collect();
|
||||
|
||||
// Dependencies have source ids but not package ids. We draw an edge
|
||||
// whenever a dependency's source id matches one of our packages. This is
|
||||
@ -349,7 +353,7 @@ fn local_deps(packages: impl Iterator<Item = (Package, CliFeatures)>) -> LocalDe
|
||||
.collect();
|
||||
|
||||
let mut graph = Graph::new();
|
||||
for (pkg, _features) in packages.values() {
|
||||
for (pkg, _payload) in packages.values() {
|
||||
graph.add(pkg.package_id());
|
||||
for dep in pkg.dependencies() {
|
||||
// Ignore local dev-dependencies because they aren't needed for intra-workspace
|
||||
|
@ -10,7 +10,7 @@ pub use self::cargo_fetch::{fetch, FetchOptions};
|
||||
pub use self::cargo_install::{install, install_list};
|
||||
pub use self::cargo_new::{init, new, NewOptions, NewProjectKind, VersionControl};
|
||||
pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions};
|
||||
pub use self::cargo_package::{check_yanked, package, package_one, PackageOpts};
|
||||
pub use self::cargo_package::{check_yanked, package, PackageOpts};
|
||||
pub use self::cargo_pkgid::pkgid;
|
||||
pub use self::cargo_read_manifest::read_package;
|
||||
pub use self::cargo_run::run;
|
||||
|
@ -3,8 +3,12 @@
|
||||
//! [1]: https://doc.rust-lang.org/nightly/cargo/reference/registry-web-api.html#publish
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::fs::File;
|
||||
use std::io::Seek;
|
||||
use std::io::SeekFrom;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::bail;
|
||||
@ -15,32 +19,36 @@ use cargo_util::paths;
|
||||
use crates_io::NewCrate;
|
||||
use crates_io::NewCrateDependency;
|
||||
use crates_io::Registry;
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::core::dependency::DepKind;
|
||||
use crate::core::manifest::ManifestMetadata;
|
||||
use crate::core::resolver::CliFeatures;
|
||||
use crate::core::Dependency;
|
||||
use crate::core::Package;
|
||||
use crate::core::PackageId;
|
||||
use crate::core::PackageIdSpecQuery;
|
||||
use crate::core::SourceId;
|
||||
use crate::core::Workspace;
|
||||
use crate::ops;
|
||||
use crate::ops::PackageOpts;
|
||||
use crate::ops::Packages;
|
||||
use crate::ops::RegistryOrIndex;
|
||||
use crate::sources::source::QueryKind;
|
||||
use crate::sources::source::Source;
|
||||
use crate::sources::SourceConfigMap;
|
||||
use crate::sources::CRATES_IO_REGISTRY;
|
||||
use crate::util::auth;
|
||||
use crate::util::cache_lock::CacheLockMode;
|
||||
use crate::util::context::JobsConfig;
|
||||
use crate::util::toml::prepare_for_publish;
|
||||
use crate::util::Graph;
|
||||
use crate::util::Progress;
|
||||
use crate::util::ProgressStyle;
|
||||
use crate::CargoResult;
|
||||
use crate::GlobalContext;
|
||||
|
||||
use super::super::check_dep_has_version;
|
||||
use super::RegistryOrIndex;
|
||||
|
||||
pub struct PublishOpts<'gctx> {
|
||||
pub gctx: &'gctx GlobalContext,
|
||||
@ -57,16 +65,23 @@ pub struct PublishOpts<'gctx> {
|
||||
}
|
||||
|
||||
pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
|
||||
let multi_package_mode = ws.gctx().cli_unstable().package_workspace;
|
||||
let specs = opts.to_publish.to_package_id_specs(ws)?;
|
||||
if specs.len() > 1 {
|
||||
bail!("the `-p` argument must be specified to select a single package to publish")
|
||||
|
||||
if !multi_package_mode {
|
||||
if specs.len() > 1 {
|
||||
bail!("the `-p` argument must be specified to select a single package to publish")
|
||||
}
|
||||
if Packages::Default == opts.to_publish && ws.is_virtual() {
|
||||
bail!("the `-p` argument must be specified in the root of a virtual workspace")
|
||||
}
|
||||
}
|
||||
if Packages::Default == opts.to_publish && ws.is_virtual() {
|
||||
bail!("the `-p` argument must be specified in the root of a virtual workspace")
|
||||
|
||||
let member_ids: Vec<_> = ws.members().map(|p| p.package_id()).collect();
|
||||
// Check that the specs match members.
|
||||
for spec in &specs {
|
||||
spec.query(member_ids.clone())?;
|
||||
}
|
||||
let member_ids = ws.members().map(|p| p.package_id());
|
||||
// Check that the spec matches exactly one member.
|
||||
specs[0].query(member_ids)?;
|
||||
let mut pkgs = ws.members_with_features(&specs, &opts.cli_features)?;
|
||||
// In `members_with_features_old`, it will add "current" package (determined by the cwd)
|
||||
// So we need filter
|
||||
@ -74,60 +89,31 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
|
||||
.into_iter()
|
||||
.filter(|(m, _)| specs.iter().any(|spec| spec.matches(m.package_id())))
|
||||
.collect();
|
||||
// Double check. It is safe theoretically, unless logic has updated.
|
||||
assert_eq!(pkgs.len(), 1);
|
||||
|
||||
let (pkg, cli_features) = pkgs.pop().unwrap();
|
||||
|
||||
let mut publish_registry = match opts.reg_or_index.as_ref() {
|
||||
Some(RegistryOrIndex::Registry(registry)) => Some(registry.clone()),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(ref allowed_registries) = *pkg.publish() {
|
||||
if publish_registry.is_none() && allowed_registries.len() == 1 {
|
||||
// If there is only one allowed registry, push to that one directly,
|
||||
// even though there is no registry specified in the command.
|
||||
let default_registry = &allowed_registries[0];
|
||||
if default_registry != CRATES_IO_REGISTRY {
|
||||
// Don't change the registry for crates.io and don't warn the user.
|
||||
// crates.io will be defaulted even without this.
|
||||
opts.gctx.shell().note(&format!(
|
||||
"found `{}` as only allowed registry. Publishing to it automatically.",
|
||||
default_registry
|
||||
))?;
|
||||
publish_registry = Some(default_registry.clone());
|
||||
let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
|
||||
let reg_or_index = match opts.reg_or_index.clone() {
|
||||
Some(r) => {
|
||||
validate_registry(&just_pkgs, Some(&r))?;
|
||||
Some(r)
|
||||
}
|
||||
None => {
|
||||
let reg = super::infer_registry(&just_pkgs)?;
|
||||
validate_registry(&just_pkgs, reg.as_ref())?;
|
||||
if let Some(RegistryOrIndex::Registry(ref registry)) = ® {
|
||||
if registry != CRATES_IO_REGISTRY {
|
||||
// Don't warn for crates.io.
|
||||
opts.gctx.shell().note(&format!(
|
||||
"found `{}` as only allowed registry. Publishing to it automatically.",
|
||||
registry
|
||||
))?;
|
||||
}
|
||||
}
|
||||
reg
|
||||
}
|
||||
};
|
||||
|
||||
let reg_name = publish_registry
|
||||
.clone()
|
||||
.unwrap_or_else(|| CRATES_IO_REGISTRY.to_string());
|
||||
if allowed_registries.is_empty() {
|
||||
bail!(
|
||||
"`{}` cannot be published.\n\
|
||||
`package.publish` must be set to `true` or a non-empty list in Cargo.toml to publish.",
|
||||
pkg.name(),
|
||||
);
|
||||
} else if !allowed_registries.contains(®_name) {
|
||||
bail!(
|
||||
"`{}` cannot be published.\n\
|
||||
The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
|
||||
pkg.name(),
|
||||
reg_name
|
||||
);
|
||||
}
|
||||
}
|
||||
// This is only used to confirm that we can create a token before we build the package.
|
||||
// This causes the credential provider to be called an extra time, but keeps the same order of errors.
|
||||
let ver = pkg.version().to_string();
|
||||
let operation = Operation::Read;
|
||||
|
||||
let reg_or_index = match opts.reg_or_index.clone() {
|
||||
Some(RegistryOrIndex::Registry(_)) | None => {
|
||||
publish_registry.map(RegistryOrIndex::Registry)
|
||||
}
|
||||
val => val,
|
||||
};
|
||||
let source_ids = super::get_source_id(opts.gctx, reg_or_index.as_ref())?;
|
||||
let mut registry = super::registry(
|
||||
opts.gctx,
|
||||
@ -135,109 +121,168 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
|
||||
opts.token.as_ref().map(Secret::as_deref),
|
||||
reg_or_index.as_ref(),
|
||||
true,
|
||||
Some(operation).filter(|_| !opts.dry_run),
|
||||
Some(Operation::Read).filter(|_| !opts.dry_run),
|
||||
)?;
|
||||
verify_dependencies(pkg, ®istry, source_ids.original)?;
|
||||
|
||||
// Prepare a tarball, with a non-suppressible warning if metadata
|
||||
// is missing since this is being put online.
|
||||
let tarball = ops::package_one(
|
||||
// Validate all the packages before publishing any of them.
|
||||
for (pkg, _) in &pkgs {
|
||||
verify_dependencies(pkg, ®istry, source_ids.original)?;
|
||||
}
|
||||
|
||||
let pkg_dep_graph = ops::cargo_package::package_with_dep_graph(
|
||||
ws,
|
||||
pkg,
|
||||
&PackageOpts {
|
||||
gctx: opts.gctx,
|
||||
verify: opts.verify,
|
||||
list: false,
|
||||
check_metadata: true,
|
||||
allow_dirty: opts.allow_dirty,
|
||||
to_package: Packages::Default,
|
||||
// `package_with_dep_graph` ignores this field in favor of
|
||||
// the already-resolved list of packages
|
||||
to_package: ops::Packages::Default,
|
||||
targets: opts.targets.clone(),
|
||||
jobs: opts.jobs.clone(),
|
||||
keep_going: opts.keep_going,
|
||||
cli_features,
|
||||
reg_or_index,
|
||||
cli_features: opts.cli_features.clone(),
|
||||
reg_or_index: reg_or_index.clone(),
|
||||
},
|
||||
pkgs,
|
||||
)?;
|
||||
|
||||
if !opts.dry_run {
|
||||
let hash = cargo_util::Sha256::new()
|
||||
.update_file(tarball.file())?
|
||||
.finish_hex();
|
||||
let operation = Operation::Publish {
|
||||
name: pkg.name().as_str(),
|
||||
vers: &ver,
|
||||
cksum: &hash,
|
||||
};
|
||||
registry.set_token(Some(auth::auth_token(
|
||||
&opts.gctx,
|
||||
&source_ids.original,
|
||||
None,
|
||||
operation,
|
||||
vec![],
|
||||
false,
|
||||
)?));
|
||||
}
|
||||
let mut plan = PublishPlan::new(&pkg_dep_graph.graph);
|
||||
// May contains packages from previous rounds as `wait_for_any_publish_confirmation` returns
|
||||
// after it confirms any packages, not all packages, requiring us to handle the rest in the next
|
||||
// iteration.
|
||||
//
|
||||
// As a side effect, any given package's "effective" timeout may be much larger.
|
||||
let mut to_confirm = BTreeSet::new();
|
||||
|
||||
opts.gctx
|
||||
.shell()
|
||||
.status("Uploading", pkg.package_id().to_string())?;
|
||||
transmit(
|
||||
opts.gctx,
|
||||
ws,
|
||||
pkg,
|
||||
tarball.file(),
|
||||
&mut registry,
|
||||
source_ids.original,
|
||||
opts.dry_run,
|
||||
)?;
|
||||
if !opts.dry_run {
|
||||
const DEFAULT_TIMEOUT: u64 = 60;
|
||||
let timeout = if opts.gctx.cli_unstable().publish_timeout {
|
||||
let timeout: Option<u64> = opts.gctx.get("publish.timeout")?;
|
||||
timeout.unwrap_or(DEFAULT_TIMEOUT)
|
||||
} else {
|
||||
DEFAULT_TIMEOUT
|
||||
};
|
||||
if 0 < timeout {
|
||||
let timeout = Duration::from_secs(timeout);
|
||||
wait_for_publish(opts.gctx, source_ids.original, pkg, timeout)?;
|
||||
while !plan.is_empty() {
|
||||
// There might not be any ready package, if the previous confirmations
|
||||
// didn't unlock a new one. For example, if `c` depends on `a` and
|
||||
// `b`, and we uploaded `a` and `b` but only confirmed `a`, then on
|
||||
// the following pass through the outer loop nothing will be ready for
|
||||
// upload.
|
||||
for pkg_id in plan.take_ready() {
|
||||
let (pkg, (_features, tarball)) = &pkg_dep_graph.packages[&pkg_id];
|
||||
opts.gctx.shell().status("Uploading", pkg.package_id())?;
|
||||
|
||||
if !opts.dry_run {
|
||||
let ver = pkg.version().to_string();
|
||||
|
||||
tarball.file().seek(SeekFrom::Start(0))?;
|
||||
let hash = cargo_util::Sha256::new()
|
||||
.update_file(tarball.file())?
|
||||
.finish_hex();
|
||||
let operation = Operation::Publish {
|
||||
name: pkg.name().as_str(),
|
||||
vers: &ver,
|
||||
cksum: &hash,
|
||||
};
|
||||
registry.set_token(Some(auth::auth_token(
|
||||
&opts.gctx,
|
||||
&source_ids.original,
|
||||
None,
|
||||
operation,
|
||||
vec![],
|
||||
false,
|
||||
)?));
|
||||
}
|
||||
|
||||
transmit(
|
||||
opts.gctx,
|
||||
ws,
|
||||
pkg,
|
||||
tarball.file(),
|
||||
&mut registry,
|
||||
source_ids.original,
|
||||
opts.dry_run,
|
||||
)?;
|
||||
to_confirm.insert(pkg_id);
|
||||
|
||||
if !opts.dry_run {
|
||||
// Short does not include the registry name.
|
||||
let short_pkg_description = format!("{} v{}", pkg.name(), pkg.version());
|
||||
let source_description = source_ids.original.to_string();
|
||||
ws.gctx().shell().status(
|
||||
"Uploaded",
|
||||
format!("{short_pkg_description} to {source_description}"),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
let confirmed = if opts.dry_run {
|
||||
to_confirm.clone()
|
||||
} else {
|
||||
const DEFAULT_TIMEOUT: u64 = 60;
|
||||
let timeout = if opts.gctx.cli_unstable().publish_timeout {
|
||||
let timeout: Option<u64> = opts.gctx.get("publish.timeout")?;
|
||||
timeout.unwrap_or(DEFAULT_TIMEOUT)
|
||||
} else {
|
||||
DEFAULT_TIMEOUT
|
||||
};
|
||||
if 0 < timeout {
|
||||
let timeout = Duration::from_secs(timeout);
|
||||
wait_for_any_publish_confirmation(
|
||||
opts.gctx,
|
||||
source_ids.original,
|
||||
&to_confirm,
|
||||
timeout,
|
||||
)?
|
||||
} else {
|
||||
BTreeSet::new()
|
||||
}
|
||||
};
|
||||
if confirmed.is_empty() {
|
||||
// If nothing finished, it means we timed out while waiting for confirmation.
|
||||
// We're going to exit, but first we need to check: have we uploaded everything?
|
||||
if plan.is_empty() {
|
||||
// It's ok that we timed out, because nothing was waiting on dependencies to
|
||||
// be confirmed.
|
||||
break;
|
||||
} else {
|
||||
let failed_list = package_list(plan.iter(), "and");
|
||||
bail!("unable to publish {failed_list} due to time out while waiting for published dependencies to be available.");
|
||||
}
|
||||
}
|
||||
for id in &confirmed {
|
||||
to_confirm.remove(id);
|
||||
}
|
||||
plan.mark_confirmed(confirmed);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn wait_for_publish(
|
||||
/// Poll the registry for any packages that are ready for use.
|
||||
///
|
||||
/// Returns the subset of `pkgs` that are ready for use.
|
||||
/// This will be an empty set if we timed out before confirming anything.
|
||||
fn wait_for_any_publish_confirmation(
|
||||
gctx: &GlobalContext,
|
||||
registry_src: SourceId,
|
||||
pkg: &Package,
|
||||
pkgs: &BTreeSet<PackageId>,
|
||||
timeout: Duration,
|
||||
) -> CargoResult<()> {
|
||||
let version_req = format!("={}", pkg.version());
|
||||
) -> CargoResult<BTreeSet<PackageId>> {
|
||||
let mut source = SourceConfigMap::empty(gctx)?.load(registry_src, &HashSet::new())?;
|
||||
// Disable the source's built-in progress bars. Repeatedly showing a bunch
|
||||
// of independent progress bars can be a little confusing. There is an
|
||||
// overall progress bar managed here.
|
||||
source.set_quiet(true);
|
||||
let source_description = source.source_id().to_string();
|
||||
let query = Dependency::parse(pkg.name(), Some(&version_req), registry_src)?;
|
||||
|
||||
let now = std::time::Instant::now();
|
||||
let sleep_time = Duration::from_secs(1);
|
||||
let max = timeout.as_secs() as usize;
|
||||
// Short does not include the registry name.
|
||||
let short_pkg_description = format!("{} v{}", pkg.name(), pkg.version());
|
||||
gctx.shell().status(
|
||||
"Uploaded",
|
||||
format!("{short_pkg_description} to {source_description}"),
|
||||
)?;
|
||||
let short_pkg_descriptions = package_list(pkgs.iter().copied(), "or");
|
||||
gctx.shell().note(format!(
|
||||
"waiting for `{short_pkg_description}` to be available at {source_description}.\n\
|
||||
"waiting for {short_pkg_descriptions} to be available at {source_description}.\n\
|
||||
You may press ctrl-c to skip waiting; the crate should be available shortly."
|
||||
))?;
|
||||
let mut progress = Progress::with_style("Waiting", ProgressStyle::Ratio, gctx);
|
||||
progress.tick_now(0, max, "")?;
|
||||
let is_available = loop {
|
||||
let available = loop {
|
||||
{
|
||||
let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
|
||||
// Force re-fetching the source
|
||||
@ -247,43 +292,67 @@ fn wait_for_publish(
|
||||
// multiple times
|
||||
gctx.updated_sources().remove(&source.replaced_source_id());
|
||||
source.invalidate_cache();
|
||||
let summaries = loop {
|
||||
// Exact to avoid returning all for path/git
|
||||
match source.query_vec(&query, QueryKind::Exact) {
|
||||
std::task::Poll::Ready(res) => {
|
||||
break res?;
|
||||
}
|
||||
std::task::Poll::Pending => source.block_until_ready()?,
|
||||
let mut available = BTreeSet::new();
|
||||
for pkg in pkgs {
|
||||
if poll_one_package(registry_src, pkg, &mut source)? {
|
||||
available.insert(*pkg);
|
||||
}
|
||||
};
|
||||
if !summaries.is_empty() {
|
||||
break true;
|
||||
}
|
||||
|
||||
// As soon as any package is available, break this loop so we can see if another
|
||||
// one can be uploaded.
|
||||
if !available.is_empty() {
|
||||
break available;
|
||||
}
|
||||
}
|
||||
|
||||
let elapsed = now.elapsed();
|
||||
if timeout < elapsed {
|
||||
gctx.shell().warn(format!(
|
||||
"timed out waiting for `{short_pkg_description}` to be available in {source_description}",
|
||||
"timed out waiting for {short_pkg_descriptions} to be available in {source_description}",
|
||||
))?;
|
||||
gctx.shell().note(
|
||||
"the registry may have a backlog that is delaying making the \
|
||||
crate available. The crate should be available soon.",
|
||||
)?;
|
||||
break false;
|
||||
break BTreeSet::new();
|
||||
}
|
||||
|
||||
progress.tick_now(elapsed.as_secs() as usize, max, "")?;
|
||||
std::thread::sleep(sleep_time);
|
||||
};
|
||||
if is_available {
|
||||
if !available.is_empty() {
|
||||
let short_pkg_description = available
|
||||
.iter()
|
||||
.map(|pkg| format!("{} v{}", pkg.name(), pkg.version()))
|
||||
.sorted()
|
||||
.join(", ");
|
||||
gctx.shell().status(
|
||||
"Published",
|
||||
format!("{short_pkg_description} at {source_description}"),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(available)
|
||||
}
|
||||
|
||||
fn poll_one_package(
|
||||
registry_src: SourceId,
|
||||
pkg_id: &PackageId,
|
||||
source: &mut dyn Source,
|
||||
) -> CargoResult<bool> {
|
||||
let version_req = format!("={}", pkg_id.version());
|
||||
let query = Dependency::parse(pkg_id.name(), Some(&version_req), registry_src)?;
|
||||
let summaries = loop {
|
||||
// Exact to avoid returning all for path/git
|
||||
match source.query_vec(&query, QueryKind::Exact) {
|
||||
std::task::Poll::Ready(res) => {
|
||||
break res?;
|
||||
}
|
||||
std::task::Poll::Pending => source.block_until_ready()?,
|
||||
}
|
||||
};
|
||||
Ok(!summaries.is_empty())
|
||||
}
|
||||
|
||||
fn verify_dependencies(
|
||||
@ -498,3 +567,174 @@ fn transmit(
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// State for tracking dependencies during upload.
|
||||
struct PublishPlan {
|
||||
/// Graph of publishable packages where the edges are `(dependency -> dependent)`
|
||||
dependents: Graph<PackageId, ()>,
|
||||
/// The weight of a package is the number of unpublished dependencies it has.
|
||||
dependencies_count: HashMap<PackageId, usize>,
|
||||
}
|
||||
|
||||
impl PublishPlan {
|
||||
/// Given a package dependency graph, creates a `PublishPlan` for tracking state.
|
||||
fn new(graph: &Graph<PackageId, ()>) -> Self {
|
||||
let dependents = graph.reversed();
|
||||
|
||||
let dependencies_count: HashMap<_, _> = dependents
|
||||
.iter()
|
||||
.map(|id| (*id, graph.edges(id).count()))
|
||||
.collect();
|
||||
Self {
|
||||
dependents,
|
||||
dependencies_count,
|
||||
}
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl Iterator<Item = PackageId> + '_ {
|
||||
self.dependencies_count.iter().map(|(id, _)| *id)
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.dependencies_count.is_empty()
|
||||
}
|
||||
|
||||
/// Returns the set of packages that are ready for publishing (i.e. have no outstanding dependencies).
|
||||
///
|
||||
/// These will not be returned in future calls.
|
||||
fn take_ready(&mut self) -> BTreeSet<PackageId> {
|
||||
let ready: BTreeSet<_> = self
|
||||
.dependencies_count
|
||||
.iter()
|
||||
.filter_map(|(id, weight)| (*weight == 0).then_some(*id))
|
||||
.collect();
|
||||
for pkg in &ready {
|
||||
self.dependencies_count.remove(pkg);
|
||||
}
|
||||
ready
|
||||
}
|
||||
|
||||
/// Packages confirmed to be available in the registry, potentially allowing additional
|
||||
/// packages to be "ready".
|
||||
fn mark_confirmed(&mut self, published: impl IntoIterator<Item = PackageId>) {
|
||||
for id in published {
|
||||
for (dependent_id, _) in self.dependents.edges(&id) {
|
||||
if let Some(weight) = self.dependencies_count.get_mut(dependent_id) {
|
||||
*weight = weight.saturating_sub(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Format a collection of packages as a list
|
||||
///
|
||||
/// e.g. "foo v0.1.0, bar v0.2.0, and baz v0.3.0".
|
||||
///
|
||||
/// Note: the final separator (e.g. "and" in the previous example) can be chosen.
|
||||
fn package_list(pkgs: impl IntoIterator<Item = PackageId>, final_sep: &str) -> String {
|
||||
let mut names: Vec<_> = pkgs
|
||||
.into_iter()
|
||||
.map(|pkg| format!("`{} v{}`", pkg.name(), pkg.version()))
|
||||
.collect();
|
||||
names.sort();
|
||||
|
||||
match &names[..] {
|
||||
[] => String::new(),
|
||||
[a] => a.clone(),
|
||||
[a, b] => format!("{a} {final_sep} {b}"),
|
||||
[names @ .., last] => {
|
||||
format!("{}, {final_sep} {last}", names.join(", "))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_registry(pkgs: &[&Package], reg_or_index: Option<&RegistryOrIndex>) -> CargoResult<()> {
|
||||
for pkg in pkgs {
|
||||
if pkg.publish() == &Some(Vec::new()) {
|
||||
bail!(
|
||||
"`{}` cannot be published.\n\
|
||||
`package.publish` must be set to `true` or a non-empty list in Cargo.toml to publish.",
|
||||
pkg.name(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let reg_name = match reg_or_index {
|
||||
Some(RegistryOrIndex::Registry(r)) => Some(r.as_str()),
|
||||
None => Some(CRATES_IO_REGISTRY),
|
||||
Some(RegistryOrIndex::Index(_)) => None,
|
||||
};
|
||||
if let Some(reg_name) = reg_name {
|
||||
for pkg in pkgs {
|
||||
if let Some(allowed) = pkg.publish().as_ref() {
|
||||
if !allowed.iter().any(|a| a == reg_name) {
|
||||
bail!(
|
||||
"`{}` cannot be published.\n\
|
||||
The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
|
||||
pkg.name(),
|
||||
reg_name
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{
|
||||
core::{PackageId, SourceId},
|
||||
sources::CRATES_IO_INDEX,
|
||||
util::{Graph, IntoUrl},
|
||||
};
|
||||
|
||||
use super::PublishPlan;
|
||||
|
||||
fn pkg_id(name: &str) -> PackageId {
|
||||
let loc = CRATES_IO_INDEX.into_url().unwrap();
|
||||
PackageId::try_new(name, "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parallel_schedule() {
|
||||
let mut graph: Graph<PackageId, ()> = Graph::new();
|
||||
let a = pkg_id("a");
|
||||
let b = pkg_id("b");
|
||||
let c = pkg_id("c");
|
||||
let d = pkg_id("d");
|
||||
let e = pkg_id("e");
|
||||
|
||||
graph.add(a);
|
||||
graph.add(b);
|
||||
graph.add(c);
|
||||
graph.add(d);
|
||||
graph.add(e);
|
||||
graph.link(a, c);
|
||||
graph.link(b, c);
|
||||
graph.link(c, d);
|
||||
graph.link(c, e);
|
||||
|
||||
let mut order = PublishPlan::new(&graph);
|
||||
let ready: Vec<_> = order.take_ready().into_iter().collect();
|
||||
assert_eq!(ready, vec![d, e]);
|
||||
|
||||
order.mark_confirmed(vec![d]);
|
||||
let ready: Vec<_> = order.take_ready().into_iter().collect();
|
||||
assert!(ready.is_empty());
|
||||
|
||||
order.mark_confirmed(vec![e]);
|
||||
let ready: Vec<_> = order.take_ready().into_iter().collect();
|
||||
assert_eq!(ready, vec![c]);
|
||||
|
||||
order.mark_confirmed(vec![c]);
|
||||
let ready: Vec<_> = order.take_ready().into_iter().collect();
|
||||
assert_eq!(ready, vec![a, b]);
|
||||
|
||||
order.mark_confirmed(vec![a, b]);
|
||||
let ready: Vec<_> = order.take_ready().into_iter().collect();
|
||||
assert!(ready.is_empty());
|
||||
}
|
||||
}
|
||||
|
@ -25,6 +25,20 @@ impl<N: Eq + Ord + Clone, E: Default + Clone> Graph<N, E> {
|
||||
.or_insert_with(Default::default)
|
||||
}
|
||||
|
||||
/// Returns the graph obtained by reversing all edges.
|
||||
pub fn reversed(&self) -> Graph<N, E> {
|
||||
let mut ret = Graph::new();
|
||||
|
||||
for n in self.iter() {
|
||||
ret.add(n.clone());
|
||||
for (m, e) in self.edges(n) {
|
||||
*ret.link(m.clone(), n.clone()) = e.clone();
|
||||
}
|
||||
}
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn contains<Q: ?Sized>(&self, k: &Q) -> bool
|
||||
where
|
||||
N: Borrow<Q>,
|
||||
@ -206,6 +220,19 @@ fn path_to_self() {
|
||||
assert_eq!(new.path_to_bottom(&0), vec![(&0, Some(&()))]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reverse() {
|
||||
let mut new: Graph<i32, ()> = Graph::new();
|
||||
new.link(0, 1);
|
||||
new.link(0, 2);
|
||||
|
||||
let mut expected: Graph<i32, ()> = Graph::new();
|
||||
expected.add(0);
|
||||
expected.link(1, 0);
|
||||
expected.link(2, 0);
|
||||
assert_eq!(new.reversed(), expected);
|
||||
}
|
||||
|
||||
impl<N: Eq + Ord + Clone, E: Default + Clone> Default for Graph<N, E> {
|
||||
fn default() -> Graph<N, E> {
|
||||
Graph::new()
|
||||
|
@ -76,8 +76,8 @@ fn publish() {
|
||||
{"v":1,"registry":{"index-url":"[..]","name":"alternative","headers":[..]},"kind":"get","operation":"read"}
|
||||
[PACKAGING] foo v0.1.0 ([ROOT]/foo)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"}
|
||||
[UPLOADING] foo v0.1.0 ([ROOT]/foo)
|
||||
{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"}
|
||||
[UPLOADED] foo v0.1.0 to registry `alternative`
|
||||
[NOTE] waiting for `foo v0.1.0` to be available at registry `alternative`.
|
||||
You may press ctrl-c [..]
|
||||
@ -529,8 +529,8 @@ fn token_caching() {
|
||||
{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"read"}
|
||||
[PACKAGING] foo v0.1.0 ([ROOT]/foo)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"}
|
||||
[UPLOADING] foo v0.1.0 ([ROOT]/foo)
|
||||
{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"}
|
||||
[UPLOADED] foo v0.1.0 to registry `alternative`
|
||||
[NOTE] waiting [..]
|
||||
You may press ctrl-c [..]
|
||||
|
@ -4,10 +4,10 @@ use std::fs;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use cargo_test_support::git::{self, repo};
|
||||
use cargo_test_support::paths;
|
||||
use cargo_test_support::prelude::*;
|
||||
use cargo_test_support::registry::{self, Package, RegistryBuilder, Response};
|
||||
use cargo_test_support::{basic_manifest, project, publish, str};
|
||||
use cargo_test_support::{paths, Project};
|
||||
|
||||
const CLEAN_FOO_JSON: &str = r#"
|
||||
{
|
||||
@ -961,7 +961,6 @@ fn publish_failed_with_index_and_only_allowed_registry() {
|
||||
.arg(registry.index_url().as_str())
|
||||
.with_status(101)
|
||||
.with_stderr_data(str![[r#"
|
||||
[NOTE] found `alternative` as only allowed registry. Publishing to it automatically.
|
||||
[ERROR] command-line argument --index requires --token to be specified
|
||||
|
||||
"#]])
|
||||
@ -995,8 +994,7 @@ fn publish_fail_with_no_registry_specified() {
|
||||
p.cargo("publish")
|
||||
.with_status(101)
|
||||
.with_stderr_data(str![[r#"
|
||||
[ERROR] `foo` cannot be published.
|
||||
The registry `crates-io` is not listed in the `package.publish` value in Cargo.toml.
|
||||
[ERROR] --registry is required to disambiguate between "alternative" or "test" registries
|
||||
|
||||
"#]])
|
||||
.run();
|
||||
@ -3175,6 +3173,7 @@ See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for
|
||||
[PACKAGING] foo v0.0.1 ([ROOT]/foo)
|
||||
[PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[UPLOADING] foo v0.0.1 ([ROOT]/foo)
|
||||
[UPLOADED] foo v0.0.1 to registry `crates-io`
|
||||
|
||||
"#]])
|
||||
.run();
|
||||
@ -3232,6 +3231,106 @@ You may press ctrl-c to skip waiting; the crate should be available shortly.
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn timeout_waiting_for_dependency_publish() {
|
||||
// Publish doesn't happen within the timeout window.
|
||||
let registry = registry::RegistryBuilder::new()
|
||||
.http_api()
|
||||
.delayed_index_update(20)
|
||||
.build();
|
||||
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[workspace]
|
||||
members = ["main", "other", "dep"]
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"main/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "main"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "foo"
|
||||
|
||||
[dependencies]
|
||||
dep = { version = "0.0.1", path = "../dep" }
|
||||
"#,
|
||||
)
|
||||
.file("main/src/main.rs", "fn main() {}")
|
||||
.file(
|
||||
"other/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "other"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "foo"
|
||||
|
||||
[dependencies]
|
||||
dep = { version = "0.0.1", path = "../dep" }
|
||||
"#,
|
||||
)
|
||||
.file("other/src/main.rs", "fn main() {}")
|
||||
.file(
|
||||
"dep/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "dep"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "foo"
|
||||
"#,
|
||||
)
|
||||
.file("dep/src/lib.rs", "")
|
||||
.file(
|
||||
".cargo/config.toml",
|
||||
r#"
|
||||
[publish]
|
||||
timeout = 2
|
||||
"#,
|
||||
)
|
||||
.build();
|
||||
|
||||
p.cargo("publish --no-verify -Zpublish-timeout -Zpackage-workspace")
|
||||
.replace_crates_io(registry.index_url())
|
||||
.masquerade_as_nightly_cargo(&["publish-timeout", "package-workspace"])
|
||||
.with_status(101)
|
||||
.with_stderr_data(str![[r#"
|
||||
[UPDATING] crates.io index
|
||||
[WARNING] manifest has no documentation, homepage or repository.
|
||||
See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
|
||||
[PACKAGING] dep v0.0.1 ([ROOT]/foo/dep)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[WARNING] manifest has no documentation, homepage or repository.
|
||||
See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
|
||||
[PACKAGING] main v0.0.1 ([ROOT]/foo/main)
|
||||
[PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[WARNING] manifest has no documentation, homepage or repository.
|
||||
See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.
|
||||
[PACKAGING] other v0.0.1 ([ROOT]/foo/other)
|
||||
[PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[UPLOADING] dep v0.0.1 ([ROOT]/foo/dep)
|
||||
[UPLOADED] dep v0.0.1 to registry `crates-io`
|
||||
[NOTE] waiting for `dep v0.0.1` to be available at registry `crates-io`.
|
||||
You may press ctrl-c to skip waiting; the crate should be available shortly.
|
||||
[WARNING] timed out waiting for `dep v0.0.1` to be available in registry `crates-io`
|
||||
[NOTE] the registry may have a backlog that is delaying making the crate available. The crate should be available soon.
|
||||
[ERROR] unable to publish `main v0.0.1` and `other v0.0.1` due to time out while waiting for published dependencies to be available.
|
||||
|
||||
"#]])
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn wait_for_git_publish() {
|
||||
// Slow publish to an index with a git index.
|
||||
@ -3417,3 +3516,417 @@ You may press ctrl-c to skip waiting; the crate should be available shortly.
|
||||
|
||||
validate_upload_foo();
|
||||
}
|
||||
|
||||
// A workspace with three projects that depend on one another (level1 -> level2 -> level3).
|
||||
// level1 is a binary package, to test lockfile generation.
|
||||
fn workspace_with_local_deps_project() -> Project {
|
||||
project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[workspace]
|
||||
members = ["level1", "level2", "level3"]
|
||||
|
||||
[workspace.dependencies]
|
||||
level2 = { path = "level2", version = "0.0.1" }
|
||||
"#
|
||||
)
|
||||
.file(
|
||||
"level1/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "level1"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "level1"
|
||||
repository = "bar"
|
||||
|
||||
[dependencies]
|
||||
# Let one dependency also specify features, for the added test coverage when generating package files.
|
||||
level2 = { workspace = true, features = ["foo"] }
|
||||
"#,
|
||||
)
|
||||
.file("level1/src/main.rs", "fn main() {}")
|
||||
.file(
|
||||
"level2/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "level2"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "level2"
|
||||
repository = "bar"
|
||||
|
||||
[features]
|
||||
foo = []
|
||||
|
||||
[dependencies]
|
||||
level3 = { path = "../level3", version = "0.0.1" }
|
||||
"#
|
||||
)
|
||||
.file("level2/src/lib.rs", "")
|
||||
.file(
|
||||
"level3/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "level3"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "level3"
|
||||
repository = "bar"
|
||||
"#,
|
||||
)
|
||||
.file("level3/src/lib.rs", "")
|
||||
.build()
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn workspace_with_local_deps() {
|
||||
let crates_io = registry::init();
|
||||
let p = workspace_with_local_deps_project();
|
||||
|
||||
p.cargo("publish")
|
||||
.replace_crates_io(crates_io.index_url())
|
||||
.with_status(101)
|
||||
.with_stderr_data(str![[r#"
|
||||
[ERROR] the `-p` argument must be specified to select a single package to publish
|
||||
|
||||
"#]])
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn workspace_with_local_deps_nightly() {
|
||||
let registry = RegistryBuilder::new().http_api().http_index().build();
|
||||
let p = workspace_with_local_deps_project();
|
||||
|
||||
p.cargo("publish -Zpackage-workspace")
|
||||
.masquerade_as_nightly_cargo(&["package-workspace"])
|
||||
.replace_crates_io(registry.index_url())
|
||||
.with_stderr_data(str![[r#"
|
||||
[UPDATING] crates.io index
|
||||
[PACKAGING] level3 v0.0.1 ([ROOT]/foo/level3)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[PACKAGING] level2 v0.0.1 ([ROOT]/foo/level2)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[PACKAGING] level1 v0.0.1 ([ROOT]/foo/level1)
|
||||
[UPDATING] crates.io index
|
||||
[PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[VERIFYING] level3 v0.0.1 ([ROOT]/foo/level3)
|
||||
[COMPILING] level3 v0.0.1 ([ROOT]/foo/target/package/level3-0.0.1)
|
||||
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
|
||||
[VERIFYING] level2 v0.0.1 ([ROOT]/foo/level2)
|
||||
[UPDATING] crates.io index
|
||||
[UNPACKING] level3 v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`)
|
||||
[COMPILING] level3 v0.0.1
|
||||
[COMPILING] level2 v0.0.1 ([ROOT]/foo/target/package/level2-0.0.1)
|
||||
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
|
||||
[VERIFYING] level1 v0.0.1 ([ROOT]/foo/level1)
|
||||
[UPDATING] crates.io index
|
||||
[UNPACKING] level2 v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`)
|
||||
[COMPILING] level3 v0.0.1
|
||||
[COMPILING] level2 v0.0.1
|
||||
[COMPILING] level1 v0.0.1 ([ROOT]/foo/target/package/level1-0.0.1)
|
||||
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
|
||||
[UPLOADING] level3 v0.0.1 ([ROOT]/foo/level3)
|
||||
[UPLOADED] level3 v0.0.1 to registry `crates-io`
|
||||
[NOTE] waiting for `level3 v0.0.1` to be available at registry `crates-io`.
|
||||
You may press ctrl-c to skip waiting; the crate should be available shortly.
|
||||
[PUBLISHED] level3 v0.0.1 at registry `crates-io`
|
||||
[UPLOADING] level2 v0.0.1 ([ROOT]/foo/level2)
|
||||
[UPLOADED] level2 v0.0.1 to registry `crates-io`
|
||||
[NOTE] waiting for `level2 v0.0.1` to be available at registry `crates-io`.
|
||||
You may press ctrl-c to skip waiting; the crate should be available shortly.
|
||||
[PUBLISHED] level2 v0.0.1 at registry `crates-io`
|
||||
[UPLOADING] level1 v0.0.1 ([ROOT]/foo/level1)
|
||||
[UPLOADED] level1 v0.0.1 to registry `crates-io`
|
||||
[NOTE] waiting for `level1 v0.0.1` to be available at registry `crates-io`.
|
||||
You may press ctrl-c to skip waiting; the crate should be available shortly.
|
||||
[PUBLISHED] level1 v0.0.1 at registry `crates-io`
|
||||
|
||||
"#]])
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn workspace_parallel() {
|
||||
let registry = RegistryBuilder::new().http_api().http_index().build();
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[workspace]
|
||||
members = ["a", "b", "c"]
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"a/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "a"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "a"
|
||||
repository = "bar"
|
||||
"#,
|
||||
)
|
||||
.file("a/src/lib.rs", "")
|
||||
.file(
|
||||
"b/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "b"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "b"
|
||||
repository = "bar"
|
||||
"#,
|
||||
)
|
||||
.file("b/src/lib.rs", "")
|
||||
.file(
|
||||
"c/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "c"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "c"
|
||||
repository = "bar"
|
||||
|
||||
[dependencies]
|
||||
a = { path = "../a", version = "0.0.1" }
|
||||
b = { path = "../b", version = "0.0.1" }
|
||||
"#,
|
||||
)
|
||||
.file("c/src/lib.rs", "")
|
||||
.build();
|
||||
|
||||
p.cargo("publish -Zpackage-workspace")
|
||||
.masquerade_as_nightly_cargo(&["package-workspace"])
|
||||
.replace_crates_io(registry.index_url())
|
||||
.with_stderr_data(
|
||||
str![[r#"
|
||||
[UPDATING] crates.io index
|
||||
[PACKAGING] a v0.0.1 ([ROOT]/foo/a)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[PACKAGING] b v0.0.1 ([ROOT]/foo/b)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[PACKAGING] c v0.0.1 ([ROOT]/foo/c)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[VERIFYING] a v0.0.1 ([ROOT]/foo/a)
|
||||
[COMPILING] a v0.0.1 ([ROOT]/foo/target/package/a-0.0.1)
|
||||
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
|
||||
[VERIFYING] b v0.0.1 ([ROOT]/foo/b)
|
||||
[COMPILING] b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1)
|
||||
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
|
||||
[VERIFYING] c v0.0.1 ([ROOT]/foo/c)
|
||||
[UPDATING] crates.io index
|
||||
[UNPACKING] a v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`)
|
||||
[UNPACKING] b v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`)
|
||||
[COMPILING] a v0.0.1
|
||||
[COMPILING] b v0.0.1
|
||||
[COMPILING] c v0.0.1 ([ROOT]/foo/target/package/c-0.0.1)
|
||||
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
|
||||
[UPLOADED] b v0.0.1 to registry `crates-io`
|
||||
[UPLOADED] a v0.0.1 to registry `crates-io`
|
||||
[NOTE] waiting for `a v0.0.1` or `b v0.0.1` to be available at registry `crates-io`.
|
||||
You may press ctrl-c to skip waiting; the crate should be available shortly.
|
||||
[PUBLISHED] a v0.0.1, b v0.0.1 at registry `crates-io`
|
||||
[UPLOADING] c v0.0.1 ([ROOT]/foo/c)
|
||||
[UPLOADED] c v0.0.1 to registry `crates-io`
|
||||
[NOTE] waiting for `c v0.0.1` to be available at registry `crates-io`.
|
||||
You may press ctrl-c to skip waiting; the crate should be available shortly.
|
||||
[PUBLISHED] c v0.0.1 at registry `crates-io`
|
||||
[UPLOADING] a v0.0.1 ([ROOT]/foo/a)
|
||||
[UPLOADING] b v0.0.1 ([ROOT]/foo/b)
|
||||
|
||||
"#]]
|
||||
.unordered(),
|
||||
)
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn workspace_missing_dependency() {
|
||||
let registry = RegistryBuilder::new().http_api().http_index().build();
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[workspace]
|
||||
members = ["a", "b"]
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"a/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "a"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "a"
|
||||
repository = "bar"
|
||||
"#,
|
||||
)
|
||||
.file("a/src/lib.rs", "")
|
||||
.file(
|
||||
"b/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "b"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "b"
|
||||
repository = "bar"
|
||||
|
||||
[dependencies]
|
||||
a = { path = "../a", version = "0.0.1" }
|
||||
"#,
|
||||
)
|
||||
.file("b/src/lib.rs", "")
|
||||
.build();
|
||||
|
||||
p.cargo("publish -Zpackage-workspace -p b")
|
||||
.masquerade_as_nightly_cargo(&["package-workspace"])
|
||||
.replace_crates_io(registry.index_url())
|
||||
.with_status(101)
|
||||
.with_stderr_data(str![[r#"
|
||||
[UPDATING] crates.io index
|
||||
[PACKAGING] b v0.0.1 ([ROOT]/foo/b)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[VERIFYING] b v0.0.1 ([ROOT]/foo/b)
|
||||
[UPDATING] crates.io index
|
||||
[ERROR] failed to verify package tarball
|
||||
|
||||
Caused by:
|
||||
no matching package named `a` found
|
||||
location searched: registry `crates-io`
|
||||
required by package `b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1)`
|
||||
|
||||
"#]])
|
||||
.run();
|
||||
|
||||
p.cargo("publish -Zpackage-workspace -p a")
|
||||
.masquerade_as_nightly_cargo(&["package-workspace"])
|
||||
.replace_crates_io(registry.index_url())
|
||||
.with_stderr_data(str![[r#"
|
||||
[UPDATING] crates.io index
|
||||
[PACKAGING] a v0.0.1 ([ROOT]/foo/a)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[VERIFYING] a v0.0.1 ([ROOT]/foo/a)
|
||||
[COMPILING] a v0.0.1 ([ROOT]/foo/target/package/a-0.0.1)
|
||||
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
|
||||
[UPLOADING] a v0.0.1 ([ROOT]/foo/a)
|
||||
[UPLOADED] a v0.0.1 to registry `crates-io`
|
||||
[NOTE] waiting for `a v0.0.1` to be available at registry `crates-io`.
|
||||
You may press ctrl-c to skip waiting; the crate should be available shortly.
|
||||
[PUBLISHED] a v0.0.1 at registry `crates-io`
|
||||
|
||||
"#]])
|
||||
.run();
|
||||
|
||||
// Publishing the whole workspace now will fail, as `a` is already published.
|
||||
p.cargo("publish -Zpackage-workspace")
|
||||
.masquerade_as_nightly_cargo(&["package-workspace"])
|
||||
.replace_crates_io(registry.index_url())
|
||||
.with_status(101)
|
||||
.with_stderr_data(str![[r#"
|
||||
[UPDATING] crates.io index
|
||||
[PACKAGING] a v0.0.1 ([ROOT]/foo/a)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[PACKAGING] b v0.0.1 ([ROOT]/foo/b)
|
||||
[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed)
|
||||
[VERIFYING] a v0.0.1 ([ROOT]/foo/a)
|
||||
[COMPILING] a v0.0.1 ([ROOT]/foo/target/package/a-0.0.1)
|
||||
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
|
||||
[VERIFYING] b v0.0.1 ([ROOT]/foo/b)
|
||||
[UPDATING] crates.io index
|
||||
[ERROR] failed to verify package tarball
|
||||
|
||||
Caused by:
|
||||
failed to get `a` as a dependency of package `b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1)`
|
||||
|
||||
Caused by:
|
||||
found a package in the remote registry and the local overlay: a@0.0.1
|
||||
|
||||
"#]])
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn one_unpublishable_package() {
|
||||
let _alt_reg = registry::RegistryBuilder::new()
|
||||
.http_api()
|
||||
.http_index()
|
||||
.alternative()
|
||||
.build();
|
||||
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[workspace]
|
||||
members = ["dep", "main"]
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"main/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "main"
|
||||
version = "0.0.1"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "main"
|
||||
repository = "bar"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
dep = { path = "../dep", version = "0.1.0", registry = "alternative" }
|
||||
"#,
|
||||
)
|
||||
.file("main/src/main.rs", "fn main() {}")
|
||||
.file(
|
||||
"dep/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "dep"
|
||||
version = "0.1.0"
|
||||
edition = "2015"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
description = "dep"
|
||||
repository = "bar"
|
||||
publish = ["alternative"]
|
||||
"#,
|
||||
)
|
||||
.file("dep/src/lib.rs", "")
|
||||
.build();
|
||||
|
||||
p.cargo("publish -Zpackage-workspace")
|
||||
.masquerade_as_nightly_cargo(&["package-workspace"])
|
||||
.with_status(101)
|
||||
.with_stderr_data(str![[r#"
|
||||
[ERROR] `main` cannot be published.
|
||||
`package.publish` must be set to `true` or a non-empty list in Cargo.toml to publish.
|
||||
|
||||
"#]])
|
||||
.run();
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user