From cedc0ca97e684eb1c19fdb8f1e17da24aba63f9e Mon Sep 17 00:00:00 2001 From: boxdot Date: Sun, 3 Jun 2018 23:40:27 +0200 Subject: [PATCH] Improve error message on duplicate packages in lockfile. --- src/cargo/core/resolver/mod.rs | 38 +++++++++++++--------------- tests/testsuite/generate_lockfile.rs | 5 ++-- 2 files changed, 20 insertions(+), 23 deletions(-) diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index db3ed57c0..589b67e02 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -140,7 +140,7 @@ pub fn resolve( ); check_cycles(&resolve, &cx.activations)?; - check_duplicate_pkgs(&resolve)?; + check_duplicate_pkgs_in_lockfile(&resolve)?; trace!("resolved: {:?}", resolve); // If we have a shell, emit warnings about required deps used as feature. @@ -1100,27 +1100,23 @@ fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()> } } -fn get_duplicate_pkgs(resolve: &Resolve) -> Vec<&'static str> { - let mut unique_pkg_ids = HashSet::new(); - let mut result = HashSet::new(); +/// Checks that packages are unique when written to lockfile. +/// +/// When writing package id's to lockfile, we apply lossy encoding. In +/// particular, we don't store paths of path dependencies. That means that +/// *different* packages may collide in the lockfile, hence this check. +fn check_duplicate_pkgs_in_lockfile(resolve: &Resolve) -> CargoResult<()> { + let mut unique_pkg_ids = HashMap::new(); for pkg_id in resolve.iter() { - let mut encodable_pkd_id = encode::encodable_package_id(pkg_id); - if !unique_pkg_ids.insert(encodable_pkd_id) { - result.insert(pkg_id.name().as_str()); + let encodable_pkd_id = encode::encodable_package_id(pkg_id); + if let Some(prev_pkg_id) = unique_pkg_ids.insert(encodable_pkd_id, pkg_id) { + bail!( + "package collision in the lockfile: packages {} and {} are different, \ + but only one can be written to lockfile unambigiously", + prev_pkg_id, + pkg_id + ) } } - result.into_iter().collect() -} - -fn check_duplicate_pkgs(resolve: &Resolve) -> CargoResult<()> { - let names = get_duplicate_pkgs(resolve); - if names.is_empty() { - Ok(()) - } else { - bail!( - "dependencies contain duplicate package(s) in the \ - same namespace from the same source: {}", - names.join(", ") - ) - } + Ok(()) } diff --git a/tests/testsuite/generate_lockfile.rs b/tests/testsuite/generate_lockfile.rs index ec21fa110..2a6d09faa 100644 --- a/tests/testsuite/generate_lockfile.rs +++ b/tests/testsuite/generate_lockfile.rs @@ -307,8 +307,9 @@ fn duplicate_entries_in_lockfile() { assert_that( b.cargo("build"), execs().with_status(101).with_stderr_contains( - "[..]dependencies contain duplicate package(s) in the \ - same namespace from the same source: common", + "[..]package collision in the lockfile: packages common [..] and \ + common [..] are different, but only one can be written to \ + lockfile unambigiously", ), ); }