mirror of
https://github.com/rust-lang/cargo.git
synced 2025-10-01 11:30:39 +00:00
Auto merge of #9276 - ehuss:fix-collision-root-removal, r=alexcrichton
Fix doc duplicate removal of root units. The doc collision removal code didn't consider the possibility that there was a collision with a root unit. This caused problems in conjunction with #9142 where cargo would panic because a root unit got removed from the graph because of a filename collision. The solution here is to avoid removing root units during the doc collision sweep. This has a downside that this reintroduces a filename collision. An alternate solution would be to make the set of root units mutable, and remove from that list, but I figured this is simpler and more conservative. Fixes #9274
This commit is contained in:
commit
8b08998f11
@ -1558,8 +1558,11 @@ fn remove_duplicate_doc(
|
|||||||
// Keep track of units to remove so that they can be efficiently removed
|
// Keep track of units to remove so that they can be efficiently removed
|
||||||
// from the unit_deps.
|
// from the unit_deps.
|
||||||
let mut removed_units: HashSet<Unit> = HashSet::new();
|
let mut removed_units: HashSet<Unit> = HashSet::new();
|
||||||
let mut remove = |units: Vec<Unit>, reason: &str| {
|
let mut remove = |units: Vec<Unit>, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec<Unit> {
|
||||||
for unit in units {
|
let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) = units
|
||||||
|
.into_iter()
|
||||||
|
.partition(|unit| cb(unit) && !root_units.contains(unit));
|
||||||
|
for unit in to_remove {
|
||||||
log::debug!(
|
log::debug!(
|
||||||
"removing duplicate doc due to {} for package {} target `{}`",
|
"removing duplicate doc due to {} for package {} target `{}`",
|
||||||
reason,
|
reason,
|
||||||
@ -1569,6 +1572,7 @@ fn remove_duplicate_doc(
|
|||||||
unit_graph.remove(&unit);
|
unit_graph.remove(&unit);
|
||||||
removed_units.insert(unit);
|
removed_units.insert(unit);
|
||||||
}
|
}
|
||||||
|
remaining_units
|
||||||
};
|
};
|
||||||
// Iterate over the duplicates and try to remove them from unit_graph.
|
// Iterate over the duplicates and try to remove them from unit_graph.
|
||||||
for (_crate_name, mut units) in all_docs {
|
for (_crate_name, mut units) in all_docs {
|
||||||
@ -1581,14 +1585,11 @@ fn remove_duplicate_doc(
|
|||||||
.iter()
|
.iter()
|
||||||
.all(CompileKind::is_host)
|
.all(CompileKind::is_host)
|
||||||
{
|
{
|
||||||
let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) =
|
|
||||||
units.into_iter().partition(|unit| unit.kind.is_host());
|
|
||||||
// Note these duplicates may not be real duplicates, since they
|
// Note these duplicates may not be real duplicates, since they
|
||||||
// might get merged in rebuild_unit_graph_shared. Either way, it
|
// might get merged in rebuild_unit_graph_shared. Either way, it
|
||||||
// shouldn't hurt to remove them early (although the report in the
|
// shouldn't hurt to remove them early (although the report in the
|
||||||
// log might be confusing).
|
// log might be confusing).
|
||||||
remove(to_remove, "host/target merger");
|
units = remove(units, "host/target merger", &|unit| unit.kind.is_host());
|
||||||
units = remaining_units;
|
|
||||||
if units.len() == 1 {
|
if units.len() == 1 {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -1610,10 +1611,9 @@ fn remove_duplicate_doc(
|
|||||||
units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
|
units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
|
||||||
// Remove any entries with version < newest.
|
// Remove any entries with version < newest.
|
||||||
let newest_version = units.last().unwrap().pkg.version().clone();
|
let newest_version = units.last().unwrap().pkg.version().clone();
|
||||||
let (to_remove, keep_units): (Vec<Unit>, Vec<Unit>) = units
|
let keep_units = remove(units, "older version", &|unit| {
|
||||||
.into_iter()
|
unit.pkg.version() < &newest_version
|
||||||
.partition(|unit| unit.pkg.version() < &newest_version);
|
});
|
||||||
remove(to_remove, "older version");
|
|
||||||
remaining_units.extend(keep_units);
|
remaining_units.extend(keep_units);
|
||||||
} else {
|
} else {
|
||||||
remaining_units.extend(units);
|
remaining_units.extend(units);
|
||||||
|
@ -478,3 +478,69 @@ fn collision_doc_target() {
|
|||||||
)
|
)
|
||||||
.run();
|
.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cargo_test]
|
||||||
|
fn collision_with_root() {
|
||||||
|
// Check for a doc collision between a root package and a dependency.
|
||||||
|
// In this case, `foo-macro` comes from both the workspace and crates.io.
|
||||||
|
// This checks that the duplicate correction code doesn't choke on this
|
||||||
|
// by removing the root unit.
|
||||||
|
Package::new("foo-macro", "1.0.0").publish();
|
||||||
|
|
||||||
|
let p = project()
|
||||||
|
.file(
|
||||||
|
"Cargo.toml",
|
||||||
|
r#"
|
||||||
|
[workspace]
|
||||||
|
members = ["abc", "foo-macro"]
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.file(
|
||||||
|
"abc/Cargo.toml",
|
||||||
|
r#"
|
||||||
|
[package]
|
||||||
|
name = "abc"
|
||||||
|
version = "1.0.0"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
foo-macro = "1.0"
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.file("abc/src/lib.rs", "")
|
||||||
|
.file(
|
||||||
|
"foo-macro/Cargo.toml",
|
||||||
|
r#"
|
||||||
|
[package]
|
||||||
|
name = "foo-macro"
|
||||||
|
version = "1.0.0"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
proc-macro = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
abc = {path="../abc"}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.file("foo-macro/src/lib.rs", "")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
p.cargo("doc")
|
||||||
|
.with_stderr_unordered("\
|
||||||
|
[UPDATING] [..]
|
||||||
|
[DOWNLOADING] crates ...
|
||||||
|
[DOWNLOADED] foo-macro v1.0.0 [..]
|
||||||
|
warning: output filename collision.
|
||||||
|
The lib target `foo-macro` in package `foo-macro v1.0.0` has the same output filename as the lib target `foo-macro` in package `foo-macro v1.0.0 [..]`.
|
||||||
|
Colliding filename is: [CWD]/target/doc/foo_macro/index.html
|
||||||
|
The targets should have unique names.
|
||||||
|
This is a known bug where multiple crates with the same name use
|
||||||
|
the same path; see <https://github.com/rust-lang/cargo/issues/6313>.
|
||||||
|
[CHECKING] foo-macro v1.0.0
|
||||||
|
[DOCUMENTING] foo-macro v1.0.0
|
||||||
|
[CHECKING] abc v1.0.0 [..]
|
||||||
|
[DOCUMENTING] foo-macro v1.0.0 [..]
|
||||||
|
[DOCUMENTING] abc v1.0.0 [..]
|
||||||
|
[FINISHED] [..]
|
||||||
|
")
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user