mirror of
https://github.com/rust-lang/cargo.git
synced 2025-09-28 11:20:36 +00:00
Fix renaming crates as they come from 2 sources
Previously there was a verification in manifest parsing that the same dependency must come from the same source, but this erroneously triggered an error to get emitted when the `package` key was used to rename crates. The first change here was to update that clause to key off the `rename` field rather than the `name` field. Afterwards, though, this exposed an existing bug in the implementation. During compilation we have a `Resolve` which is a graph of crates, but we don't know *why* each edge in the dependency graph exists. In other words we don't know, when looking at an edge of the graph, what `Dependency` caused that edge to be drawn. We need to know this when passing `--extern` flags because the `Dependency` is what lists what's being renamed. This commit then primarily refactors `Resolve::deps` from an iterator of package ids to an iterator of a tuples. The first element is the package id from before and the second element is a list of `Dependency` directives which caused the edge to ber driven. This refactoring cleaned up a few places in the backend where we had to work around the lack of this knowledge. Namely this also fixes the extra test added here. Closes #5413
This commit is contained in:
parent
d998906598
commit
ce5bbbc7d6
@ -64,22 +64,20 @@ fn compute_deps<'a, 'b, 'cfg>(
|
||||
|
||||
let id = unit.pkg.package_id();
|
||||
let deps = cx.resolve.deps(id);
|
||||
let mut ret = deps.filter(|dep| {
|
||||
unit.pkg
|
||||
.dependencies()
|
||||
.iter()
|
||||
.filter(|d| d.name() == dep.name() && d.version_req().matches(dep.version()))
|
||||
.any(|d| {
|
||||
let mut ret = deps
|
||||
.filter(|&(_id, deps)| {
|
||||
assert!(deps.len() > 0);
|
||||
deps.iter().any(|dep| {
|
||||
// If this target is a build command, then we only want build
|
||||
// dependencies, otherwise we want everything *other than* build
|
||||
// dependencies.
|
||||
if unit.target.is_custom_build() != d.is_build() {
|
||||
if unit.target.is_custom_build() != dep.is_build() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If this dependency is *not* a transitive dependency, then it
|
||||
// only applies to test/example targets
|
||||
if !d.is_transitive() && !unit.target.is_test() && !unit.target.is_example()
|
||||
if !dep.is_transitive() && !unit.target.is_test() && !unit.target.is_example()
|
||||
&& !unit.profile.test
|
||||
{
|
||||
return false;
|
||||
@ -87,13 +85,13 @@ fn compute_deps<'a, 'b, 'cfg>(
|
||||
|
||||
// If this dependency is only available for certain platforms,
|
||||
// make sure we're only enabling it for that platform.
|
||||
if !cx.dep_platform_activated(d, unit.kind) {
|
||||
if !cx.dep_platform_activated(dep, unit.kind) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the dependency is optional, then we're only activating it
|
||||
// if the corresponding feature was activated
|
||||
if d.is_optional() && !cx.resolve.features(id).contains(&*d.name()) {
|
||||
if dep.is_optional() && !cx.resolve.features(id).contains(&*dep.name()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -101,17 +99,20 @@ fn compute_deps<'a, 'b, 'cfg>(
|
||||
// actually used!
|
||||
true
|
||||
})
|
||||
}).filter_map(|id| match cx.get_package(id) {
|
||||
Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
|
||||
let unit = Unit {
|
||||
pkg,
|
||||
target: t,
|
||||
profile: lib_or_check_profile(unit, t, cx),
|
||||
kind: unit.kind.for_target(t),
|
||||
};
|
||||
Ok(unit)
|
||||
}),
|
||||
Err(e) => Some(Err(e)),
|
||||
})
|
||||
.filter_map(|(id, _)| {
|
||||
match cx.get_package(id) {
|
||||
Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
|
||||
let unit = Unit {
|
||||
pkg,
|
||||
target: t,
|
||||
profile: lib_or_check_profile(unit, t, cx),
|
||||
kind: unit.kind.for_target(t),
|
||||
};
|
||||
Ok(unit)
|
||||
}),
|
||||
Err(e) => Some(Err(e)),
|
||||
}
|
||||
})
|
||||
.collect::<CargoResult<Vec<_>>>()?;
|
||||
|
||||
@ -205,17 +206,15 @@ fn compute_deps_doc<'a, 'cfg>(
|
||||
) -> CargoResult<Vec<Unit<'a>>> {
|
||||
let deps = cx.resolve
|
||||
.deps(unit.pkg.package_id())
|
||||
.filter(|dep| {
|
||||
unit.pkg
|
||||
.dependencies()
|
||||
.iter()
|
||||
.filter(|d| d.name() == dep.name() && d.version_req().matches(dep.version()))
|
||||
.any(|dep| match dep.kind() {
|
||||
.filter(|&(_id, deps)| {
|
||||
deps.iter().any(|dep| {
|
||||
match dep.kind() {
|
||||
DepKind::Normal => cx.dep_platform_activated(dep, unit.kind),
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
.map(|dep| cx.get_package(dep));
|
||||
.map(|(id, _deps)| cx.get_package(id));
|
||||
|
||||
// To document a library, we depend on dependencies actually being
|
||||
// built. If we're documenting *all* libraries, then we also depend on
|
||||
|
@ -1094,23 +1094,30 @@ fn build_deps_args<'a, 'cfg>(
|
||||
}
|
||||
let mut v = OsString::new();
|
||||
|
||||
// Unfortunately right now Cargo doesn't have a great way to get a
|
||||
// 1:1 mapping of entries in `dependencies()` to the actual crate
|
||||
// we're depending on. Instead we're left to do some guesswork here
|
||||
// to figure out what `Dependency` the `dep` unit corresponds to in
|
||||
// `current` to see if we're renaming it.
|
||||
//
|
||||
// This I believe mostly works out for now, but we'll likely want
|
||||
// to tighten up this in the future.
|
||||
let name = current
|
||||
.pkg
|
||||
.dependencies()
|
||||
.iter()
|
||||
.filter(|d| d.matches_ignoring_source(dep.pkg.package_id()))
|
||||
.filter_map(|d| d.rename())
|
||||
.next();
|
||||
let deps = {
|
||||
let a = current.pkg.package_id();
|
||||
let b = dep.pkg.package_id();
|
||||
if a == b {
|
||||
&[]
|
||||
} else {
|
||||
cx.resolve.dependencies_listed(a, b)
|
||||
}
|
||||
};
|
||||
|
||||
v.push(name.unwrap_or(&dep.target.crate_name()));
|
||||
let crate_name = dep.target.crate_name();
|
||||
let mut names = deps.iter()
|
||||
.map(|d| d.rename().unwrap_or(&crate_name));
|
||||
let name = names.next().unwrap_or(&crate_name);
|
||||
for n in names {
|
||||
if n == name {
|
||||
continue
|
||||
}
|
||||
bail!("multiple dependencies listed for the same crate must \
|
||||
all have the same name, but the dependency on `{}` \
|
||||
is listed as having different names", dep.pkg.package_id());
|
||||
}
|
||||
|
||||
v.push(name);
|
||||
v.push("=");
|
||||
v.push(cx.files().out_dir(dep));
|
||||
v.push(&path::MAIN_SEPARATOR.to_string());
|
||||
|
@ -269,17 +269,25 @@ impl Context {
|
||||
replacements
|
||||
}
|
||||
|
||||
pub fn graph(&self) -> Graph<PackageId> {
|
||||
pub fn graph(&self)
|
||||
-> (Graph<PackageId>, HashMap<(PackageId, PackageId), Vec<Dependency>>)
|
||||
{
|
||||
let mut graph = Graph::new();
|
||||
let mut deps = HashMap::new();
|
||||
let mut cur = &self.resolve_graph;
|
||||
while let Some(ref node) = cur.head {
|
||||
match node.0 {
|
||||
GraphNode::Add(ref p) => graph.add(p.clone(), &[]),
|
||||
GraphNode::Link(ref a, ref b) => graph.link(a.clone(), b.clone()),
|
||||
GraphNode::Add(ref p) => graph.add(p.clone()),
|
||||
GraphNode::Link(ref a, ref b, ref dep) => {
|
||||
graph.link(a.clone(), b.clone());
|
||||
deps.entry((a.clone(), b.clone()))
|
||||
.or_insert(Vec::new())
|
||||
.push(dep.clone());
|
||||
}
|
||||
}
|
||||
cur = &node.1;
|
||||
}
|
||||
graph
|
||||
(graph, deps)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -95,7 +95,7 @@ impl EncodableResolve {
|
||||
let mut g = Graph::new();
|
||||
|
||||
for &(ref id, _) in live_pkgs.values() {
|
||||
g.add(id.clone(), &[]);
|
||||
g.add(id.clone());
|
||||
}
|
||||
|
||||
for &(ref id, pkg) in live_pkgs.values() {
|
||||
@ -177,15 +177,15 @@ impl EncodableResolve {
|
||||
unused_patches.push(id);
|
||||
}
|
||||
|
||||
Ok(Resolve {
|
||||
graph: g,
|
||||
empty_features: HashSet::new(),
|
||||
features: HashMap::new(),
|
||||
Ok(Resolve::new(
|
||||
g,
|
||||
HashMap::new(),
|
||||
replacements,
|
||||
HashMap::new(),
|
||||
checksums,
|
||||
metadata,
|
||||
unused_patches,
|
||||
})
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@ -347,17 +347,17 @@ impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
|
||||
where
|
||||
S: ser::Serializer,
|
||||
{
|
||||
let mut ids: Vec<&PackageId> = self.resolve.graph.iter().collect();
|
||||
let mut ids: Vec<_> = self.resolve.iter().collect();
|
||||
ids.sort();
|
||||
|
||||
let encodable = ids.iter()
|
||||
.filter_map(|&id| Some(encodable_resolve_node(id, self.resolve)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut metadata = self.resolve.metadata.clone();
|
||||
let mut metadata = self.resolve.metadata().clone();
|
||||
|
||||
for id in ids.iter().filter(|id| !id.source_id().is_path()) {
|
||||
let checksum = match self.resolve.checksums[*id] {
|
||||
let checksum = match self.resolve.checksums()[*id] {
|
||||
Some(ref s) => &s[..],
|
||||
None => "<none>",
|
||||
};
|
||||
@ -398,10 +398,7 @@ fn encodable_resolve_node(id: &PackageId, resolve: &Resolve) -> EncodableDepende
|
||||
Some(id) => (Some(encodable_package_id(id)), None),
|
||||
None => {
|
||||
let mut deps = resolve
|
||||
.graph
|
||||
.edges(id)
|
||||
.into_iter()
|
||||
.flat_map(|a| a)
|
||||
.deps_not_replaced(id)
|
||||
.map(encodable_package_id)
|
||||
.collect::<Vec<_>>();
|
||||
deps.sort();
|
||||
|
@ -58,7 +58,6 @@ use core::{Dependency, PackageId, Registry, Summary};
|
||||
use core::PackageIdSpec;
|
||||
use core::interning::InternedString;
|
||||
use util::config::Config;
|
||||
use util::Graph;
|
||||
use util::errors::{CargoError, CargoResult};
|
||||
use util::profile;
|
||||
|
||||
@ -123,25 +122,25 @@ pub fn resolve(
|
||||
let mut registry = RegistryQueryer::new(registry, replacements, try_to_use, minimal_versions);
|
||||
let cx = activate_deps_loop(cx, &mut registry, summaries, config)?;
|
||||
|
||||
let mut resolve = Resolve {
|
||||
graph: cx.graph(),
|
||||
empty_features: HashSet::new(),
|
||||
checksums: HashMap::new(),
|
||||
metadata: BTreeMap::new(),
|
||||
replacements: cx.resolve_replacements(),
|
||||
features: cx.resolve_features
|
||||
let (graph, deps) = cx.graph();
|
||||
|
||||
let mut cksums = HashMap::new();
|
||||
for summary in cx.activations.values().flat_map(|v| v.iter()) {
|
||||
let cksum = summary.checksum().map(|s| s.to_string());
|
||||
cksums.insert(summary.package_id().clone(), cksum);
|
||||
}
|
||||
let resolve = Resolve::new(
|
||||
graph,
|
||||
deps,
|
||||
cx.resolve_replacements(),
|
||||
cx.resolve_features
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.iter().map(|x| x.to_string()).collect()))
|
||||
.collect(),
|
||||
unused_patches: Vec::new(),
|
||||
};
|
||||
|
||||
for summary in cx.activations.values().flat_map(|v| v.iter()) {
|
||||
let cksum = summary.checksum().map(|s| s.to_string());
|
||||
resolve
|
||||
.checksums
|
||||
.insert(summary.package_id().clone(), cksum);
|
||||
}
|
||||
cksums,
|
||||
BTreeMap::new(),
|
||||
Vec::new(),
|
||||
);
|
||||
|
||||
check_cycles(&resolve, &cx.activations)?;
|
||||
trace!("resolved: {:?}", resolve);
|
||||
@ -402,7 +401,13 @@ fn activate_deps_loop(
|
||||
dep.name(),
|
||||
candidate.summary.version()
|
||||
);
|
||||
let res = activate(&mut cx, registry, Some(&parent), candidate, &method);
|
||||
let res = activate(
|
||||
&mut cx,
|
||||
registry,
|
||||
Some((&parent, &dep)),
|
||||
candidate,
|
||||
&method,
|
||||
);
|
||||
|
||||
let successfully_activated = match res {
|
||||
// Success! We've now activated our `candidate` in our context
|
||||
@ -615,14 +620,15 @@ fn activate_deps_loop(
|
||||
fn activate(
|
||||
cx: &mut Context,
|
||||
registry: &mut RegistryQueryer,
|
||||
parent: Option<&Summary>,
|
||||
parent: Option<(&Summary, &Dependency)>,
|
||||
candidate: Candidate,
|
||||
method: &Method,
|
||||
) -> ActivateResult<Option<(DepsFrame, Duration)>> {
|
||||
if let Some(parent) = parent {
|
||||
if let Some((parent, dep)) = parent {
|
||||
cx.resolve_graph.push(GraphNode::Link(
|
||||
parent.package_id().clone(),
|
||||
candidate.summary.package_id().clone(),
|
||||
dep.clone(),
|
||||
));
|
||||
}
|
||||
|
||||
@ -654,7 +660,7 @@ fn activate(
|
||||
};
|
||||
|
||||
let now = Instant::now();
|
||||
let deps = cx.build_deps(registry, parent, &candidate, method)?;
|
||||
let deps = cx.build_deps(registry, parent.map(|p| p.0), &candidate, method)?;
|
||||
let frame = DepsFrame {
|
||||
parent: candidate,
|
||||
just_for_error_messages: false,
|
||||
@ -861,11 +867,11 @@ fn activation_error(
|
||||
candidates: &[Candidate],
|
||||
config: Option<&Config>,
|
||||
) -> CargoError {
|
||||
let graph = cx.graph();
|
||||
let (graph, _) = cx.graph();
|
||||
if !candidates.is_empty() {
|
||||
let mut msg = format!("failed to select a version for `{}`.", dep.name());
|
||||
msg.push_str("\n ... required by ");
|
||||
msg.push_str(&describe_path(&graph, parent.package_id()));
|
||||
msg.push_str(&describe_path(&graph.path_to_top(parent.package_id())));
|
||||
|
||||
msg.push_str("\nversions that meet the requirements `");
|
||||
msg.push_str(&dep.version_req().to_string());
|
||||
@ -894,7 +900,7 @@ fn activation_error(
|
||||
msg.push_str(link);
|
||||
msg.push_str("` as well:\n");
|
||||
}
|
||||
msg.push_str(&describe_path(&graph, p));
|
||||
msg.push_str(&describe_path(&graph.path_to_top(p)));
|
||||
}
|
||||
|
||||
let (features_errors, other_errors): (Vec<_>, Vec<_>) = other_errors
|
||||
@ -925,7 +931,7 @@ fn activation_error(
|
||||
|
||||
for &(p, _) in other_errors.iter() {
|
||||
msg.push_str("\n\n previously selected ");
|
||||
msg.push_str(&describe_path(&graph, p));
|
||||
msg.push_str(&describe_path(&graph.path_to_top(p)));
|
||||
}
|
||||
|
||||
msg.push_str("\n\nfailed to select a version for `");
|
||||
@ -976,7 +982,7 @@ fn activation_error(
|
||||
versions
|
||||
);
|
||||
msg.push_str("required by ");
|
||||
msg.push_str(&describe_path(&graph, parent.package_id()));
|
||||
msg.push_str(&describe_path(&graph.path_to_top(parent.package_id())));
|
||||
|
||||
// If we have a path dependency with a locked version, then this may
|
||||
// indicate that we updated a sub-package and forgot to run `cargo
|
||||
@ -997,7 +1003,7 @@ fn activation_error(
|
||||
dep.source_id()
|
||||
);
|
||||
msg.push_str("required by ");
|
||||
msg.push_str(&describe_path(&graph, parent.package_id()));
|
||||
msg.push_str(&describe_path(&graph.path_to_top(parent.package_id())));
|
||||
|
||||
msg
|
||||
};
|
||||
@ -1017,11 +1023,10 @@ fn activation_error(
|
||||
}
|
||||
|
||||
/// Returns String representation of dependency chain for a particular `pkgid`.
|
||||
fn describe_path(graph: &Graph<PackageId>, pkgid: &PackageId) -> String {
|
||||
fn describe_path(path: &[&PackageId]) -> String {
|
||||
use std::fmt::Write;
|
||||
let dep_path = graph.path_to_top(pkgid);
|
||||
let mut dep_path_desc = format!("package `{}`", dep_path[0]);
|
||||
for dep in dep_path.iter().skip(1) {
|
||||
let mut dep_path_desc = format!("package `{}`", path[0]);
|
||||
for dep in path[1..].iter() {
|
||||
write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap();
|
||||
}
|
||||
dep_path_desc
|
||||
@ -1056,7 +1061,7 @@ fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()>
|
||||
bail!(
|
||||
"cyclic package dependency: package `{}` depends on itself. Cycle:\n{}",
|
||||
id,
|
||||
describe_path(&resolve.graph, id)
|
||||
describe_path(&resolve.path_to_top(id))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -4,8 +4,7 @@ use std::iter::FromIterator;
|
||||
|
||||
use url::Url;
|
||||
|
||||
use core::PackageIdSpec;
|
||||
use core::{PackageId, Summary};
|
||||
use core::{Dependency, PackageId, PackageIdSpec, Summary};
|
||||
use util::Graph;
|
||||
use util::errors::CargoResult;
|
||||
use util::graph::{Edges, Nodes};
|
||||
@ -19,21 +18,50 @@ use super::encode::Metadata;
|
||||
/// for each package.
|
||||
#[derive(PartialEq)]
|
||||
pub struct Resolve {
|
||||
pub graph: Graph<PackageId>,
|
||||
pub replacements: HashMap<PackageId, PackageId>,
|
||||
pub empty_features: HashSet<String>,
|
||||
pub features: HashMap<PackageId, HashSet<String>>,
|
||||
pub checksums: HashMap<PackageId, Option<String>>,
|
||||
pub metadata: Metadata,
|
||||
pub unused_patches: Vec<PackageId>,
|
||||
graph: Graph<PackageId>,
|
||||
dependencies: HashMap<(PackageId, PackageId), Vec<Dependency>>,
|
||||
replacements: HashMap<PackageId, PackageId>,
|
||||
reverse_replacements: HashMap<PackageId, PackageId>,
|
||||
empty_features: HashSet<String>,
|
||||
features: HashMap<PackageId, HashSet<String>>,
|
||||
checksums: HashMap<PackageId, Option<String>>,
|
||||
metadata: Metadata,
|
||||
unused_patches: Vec<PackageId>,
|
||||
}
|
||||
|
||||
impl Resolve {
|
||||
pub fn new(
|
||||
graph: Graph<PackageId>,
|
||||
dependencies: HashMap<(PackageId, PackageId), Vec<Dependency>>,
|
||||
replacements: HashMap<PackageId, PackageId>,
|
||||
features: HashMap<PackageId, HashSet<String>>,
|
||||
checksums: HashMap<PackageId, Option<String>>,
|
||||
metadata: Metadata,
|
||||
unused_patches: Vec<PackageId>,
|
||||
) -> Resolve {
|
||||
let reverse_replacements = replacements
|
||||
.iter()
|
||||
.map(|p| (p.1.clone(), p.0.clone()))
|
||||
.collect();
|
||||
Resolve {
|
||||
graph,
|
||||
dependencies,
|
||||
replacements,
|
||||
features,
|
||||
checksums,
|
||||
metadata,
|
||||
unused_patches,
|
||||
empty_features: HashSet::new(),
|
||||
reverse_replacements,
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves one of the paths from the given dependent package up to
|
||||
/// the root.
|
||||
pub fn path_to_top<'a>(&'a self, pkg: &'a PackageId) -> Vec<&'a PackageId> {
|
||||
self.graph.path_to_top(pkg)
|
||||
}
|
||||
|
||||
pub fn register_used_patches(&mut self, patches: &HashMap<Url, Vec<Summary>>) {
|
||||
for summary in patches.values().flat_map(|v| v) {
|
||||
if self.iter().any(|id| id == summary.package_id()) {
|
||||
@ -141,6 +169,7 @@ unable to verify that `{0}` is the same as when the lockfile was generated
|
||||
pub fn deps(&self, pkg: &PackageId) -> Deps {
|
||||
Deps {
|
||||
edges: self.graph.edges(pkg),
|
||||
id: pkg.clone(),
|
||||
resolve: self,
|
||||
}
|
||||
}
|
||||
@ -176,6 +205,35 @@ unable to verify that `{0}` is the same as when the lockfile was generated
|
||||
pub fn unused_patches(&self) -> &[PackageId] {
|
||||
&self.unused_patches
|
||||
}
|
||||
|
||||
pub fn checksums(&self) -> &HashMap<PackageId, Option<String>> {
|
||||
&self.checksums
|
||||
}
|
||||
|
||||
pub fn metadata(&self) -> &Metadata {
|
||||
&self.metadata
|
||||
}
|
||||
|
||||
pub fn dependencies_listed(&self, from: &PackageId, to: &PackageId) -> &[Dependency] {
|
||||
// We've got a dependency on `from` to `to`, but this dependency edge
|
||||
// may be affected by [replace]. If the `to` package is listed as the
|
||||
// target of a replacement (aka the key of a reverse replacement map)
|
||||
// then we try to find our dependency edge through that. If that fails
|
||||
// then we go down below assuming it's not replaced.
|
||||
//
|
||||
// Note that we don't treat `from` as if it's been replaced because
|
||||
// that's where the dependency originates from, and we only replace
|
||||
// targets of dependencies not the originator.
|
||||
if let Some(replace) = self.reverse_replacements.get(to) {
|
||||
if let Some(deps) = self.dependencies.get(&(from.clone(), replace.clone())) {
|
||||
return deps;
|
||||
}
|
||||
}
|
||||
match self.dependencies.get(&(from.clone(), to.clone())) {
|
||||
Some(ret) => ret,
|
||||
None => panic!("no Dependency listed for `{}` => `{}`", from, to),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Resolve {
|
||||
@ -191,17 +249,18 @@ impl fmt::Debug for Resolve {
|
||||
|
||||
pub struct Deps<'a> {
|
||||
edges: Option<Edges<'a, PackageId>>,
|
||||
id: PackageId,
|
||||
resolve: &'a Resolve,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Deps<'a> {
|
||||
type Item = &'a PackageId;
|
||||
type Item = (&'a PackageId, &'a [Dependency]);
|
||||
|
||||
fn next(&mut self) -> Option<&'a PackageId> {
|
||||
self.edges
|
||||
.as_mut()
|
||||
.and_then(|e| e.next())
|
||||
.map(|id| self.resolve.replacement(id).unwrap_or(id))
|
||||
fn next(&mut self) -> Option<(&'a PackageId, &'a [Dependency])> {
|
||||
let id = self.edges.as_mut()?.next()?;
|
||||
let id_ret = self.resolve.replacement(id).unwrap_or(id);
|
||||
let deps = &self.resolve.dependencies[&(self.id.clone(), id.clone())];
|
||||
Some((id_ret, deps))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
|
@ -394,5 +394,5 @@ impl<T> Drop for RcList<T> {
|
||||
|
||||
pub enum GraphNode {
|
||||
Add(PackageId),
|
||||
Link(PackageId, PackageId),
|
||||
Link(PackageId, PackageId, Dependency),
|
||||
}
|
||||
|
@ -397,8 +397,7 @@ pub fn compile_ws<'a>(
|
||||
|
||||
// Include features enabled for use by dependencies so targets can also use them with the
|
||||
// required-features field when deciding whether to be built or skipped.
|
||||
let deps = resolve_with_overrides.deps(package_id);
|
||||
for dep in deps {
|
||||
for (dep, _) in resolve_with_overrides.deps(package_id) {
|
||||
for feature in resolve_with_overrides.features(dep) {
|
||||
features.insert(dep.name().to_string() + "/" + feature);
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use core::compiler::{BuildConfig, Kind, TargetInfo};
|
||||
use core::{Package, PackageId, PackageSet, Resolve, Workspace};
|
||||
use core::{PackageSet, Resolve, Workspace};
|
||||
use ops;
|
||||
use std::collections::HashSet;
|
||||
use util::CargoResult;
|
||||
@ -18,57 +18,45 @@ pub fn fetch<'a>(
|
||||
) -> CargoResult<(Resolve, PackageSet<'a>)> {
|
||||
let (packages, resolve) = ops::resolve_ws(ws)?;
|
||||
|
||||
fetch_for_target(ws, options.config, &options.target, &resolve, &packages)?;
|
||||
let jobs = Some(1);
|
||||
let build_config = BuildConfig::new(ws.config(), jobs, &options.target, None)?;
|
||||
let target_info = TargetInfo::new(ws.config(), &build_config, Kind::Target)?;
|
||||
{
|
||||
let mut fetched_packages = HashSet::new();
|
||||
let mut deps_to_fetch = ws.members()
|
||||
.map(|p| p.package_id())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
while let Some(id) = deps_to_fetch.pop() {
|
||||
if !fetched_packages.insert(id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
packages.get(id)?;
|
||||
let deps = resolve.deps(id)
|
||||
.filter(|&(_id, deps)| {
|
||||
deps.iter()
|
||||
.any(|d| {
|
||||
// If no target was specified then all dependencies can
|
||||
// be fetched.
|
||||
let target = match options.target {
|
||||
Some(ref t) => t,
|
||||
None => return true,
|
||||
};
|
||||
// If this dependency is only available for certain
|
||||
// platforms, make sure we're only fetching it for that
|
||||
// platform.
|
||||
let platform = match d.platform() {
|
||||
Some(p) => p,
|
||||
None => return true,
|
||||
};
|
||||
platform.matches(target, target_info.cfg())
|
||||
})
|
||||
})
|
||||
.map(|(id, _deps)| id);
|
||||
deps_to_fetch.extend(deps);
|
||||
}
|
||||
}
|
||||
|
||||
Ok((resolve, packages))
|
||||
}
|
||||
|
||||
fn fetch_for_target<'a, 'cfg: 'a>(
|
||||
ws: &'a Workspace<'cfg>,
|
||||
config: &'cfg Config,
|
||||
target: &Option<String>,
|
||||
resolve: &'a Resolve,
|
||||
packages: &'a PackageSet<'cfg>,
|
||||
) -> CargoResult<HashSet<&'a PackageId>> {
|
||||
let mut fetched_packages = HashSet::new();
|
||||
let mut deps_to_fetch = Vec::new();
|
||||
let jobs = Some(1);
|
||||
let build_config = BuildConfig::new(config, jobs, target, None)?;
|
||||
let target_info = TargetInfo::new(config, &build_config, Kind::Target)?;
|
||||
let root_package_ids = ws.members().map(Package::package_id).collect::<Vec<_>>();
|
||||
|
||||
deps_to_fetch.extend(root_package_ids);
|
||||
|
||||
while let Some(id) = deps_to_fetch.pop() {
|
||||
if !fetched_packages.insert(id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let package = packages.get(id)?;
|
||||
let deps = resolve.deps(id);
|
||||
let dependency_ids = deps.filter(|dep| {
|
||||
package
|
||||
.dependencies()
|
||||
.iter()
|
||||
.filter(|d| d.name() == dep.name() && d.version_req().matches(dep.version()))
|
||||
.any(|d| {
|
||||
// If no target was specified then all dependencies can be fetched.
|
||||
let target = match *target {
|
||||
Some(ref t) => t,
|
||||
None => return true,
|
||||
};
|
||||
// If this dependency is only available for certain platforms,
|
||||
// make sure we're only fetching it for that platform.
|
||||
let platform = match d.platform() {
|
||||
Some(p) => p,
|
||||
None => return true,
|
||||
};
|
||||
platform.matches(target, target_info.cfg())
|
||||
})
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
deps_to_fetch.extend(dependency_ids);
|
||||
}
|
||||
|
||||
Ok(fetched_packages)
|
||||
}
|
||||
|
@ -132,7 +132,7 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()>
|
||||
return;
|
||||
}
|
||||
set.insert(dep);
|
||||
for dep in resolve.deps(dep) {
|
||||
for dep in resolve.deps_not_replaced(dep) {
|
||||
fill_with_deps(resolve, dep, set, visited);
|
||||
}
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ where
|
||||
.iter()
|
||||
.map(|id| Node {
|
||||
id,
|
||||
dependencies: resolve.deps(id).collect(),
|
||||
dependencies: resolve.deps(id).map(|p| p.0).collect(),
|
||||
features: resolve.features_sorted(id),
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
|
@ -22,11 +22,10 @@ impl<N: Eq + Hash + Clone> Graph<N> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(&mut self, node: N, children: &[N]) {
|
||||
pub fn add(&mut self, node: N) {
|
||||
self.nodes
|
||||
.entry(node)
|
||||
.or_insert_with(HashSet::new)
|
||||
.extend(children.iter().cloned());
|
||||
.or_insert_with(HashSet::new);
|
||||
}
|
||||
|
||||
pub fn link(&mut self, node: N, child: N) {
|
||||
|
@ -742,8 +742,8 @@ impl TomlManifest {
|
||||
{
|
||||
let mut names_sources = BTreeMap::new();
|
||||
for dep in &deps {
|
||||
let name = dep.name();
|
||||
let prev = names_sources.insert(name, dep.source_id());
|
||||
let name = dep.rename().unwrap_or(dep.name().as_str());
|
||||
let prev = names_sources.insert(name.to_string(), dep.source_id());
|
||||
if prev.is_some() && prev != Some(dep.source_id()) {
|
||||
bail!(
|
||||
"Dependency '{}' has different source paths depending on the build \
|
||||
|
@ -1,6 +1,8 @@
|
||||
use cargotest::support::{execs, project};
|
||||
use cargotest::support::registry::Package;
|
||||
use cargotest::ChannelChanger;
|
||||
use cargotest::support::git;
|
||||
use cargotest::support::paths;
|
||||
use cargotest::support::registry::Package;
|
||||
use cargotest::support::{execs, project};
|
||||
use hamcrest::assert_that;
|
||||
|
||||
#[test]
|
||||
@ -145,3 +147,171 @@ fn rename_with_different_names() {
|
||||
execs().with_status(0),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lots_of_names() {
|
||||
Package::new("foo", "0.1.0")
|
||||
.file("src/lib.rs", "pub fn foo1() {}")
|
||||
.publish();
|
||||
Package::new("foo", "0.2.0")
|
||||
.file("src/lib.rs", "pub fn foo() {}")
|
||||
.publish();
|
||||
Package::new("foo", "0.1.0")
|
||||
.file("src/lib.rs", "pub fn foo2() {}")
|
||||
.alternative(true)
|
||||
.publish();
|
||||
|
||||
let g = git::repo(&paths::root().join("another"))
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
authors = []
|
||||
"#,
|
||||
)
|
||||
.file("src/lib.rs", "pub fn foo3() {}")
|
||||
.build();
|
||||
|
||||
let p = project("foo")
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
&format!(r#"
|
||||
cargo-features = ["alternative-registries", "rename-dependency"]
|
||||
|
||||
[package]
|
||||
name = "test"
|
||||
version = "0.1.0"
|
||||
authors = []
|
||||
|
||||
[dependencies]
|
||||
foo = "0.2"
|
||||
foo1 = {{ version = "0.1", package = "foo" }}
|
||||
foo2 = {{ version = "0.1", registry = "alternative", package = "foo" }}
|
||||
foo3 = {{ git = '{}', package = "foo" }}
|
||||
foo4 = {{ path = "foo", package = "foo" }}
|
||||
"#,
|
||||
g.url())
|
||||
)
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
"
|
||||
extern crate foo;
|
||||
extern crate foo1;
|
||||
extern crate foo2;
|
||||
extern crate foo3;
|
||||
extern crate foo4;
|
||||
|
||||
pub fn foo() {
|
||||
foo::foo();
|
||||
foo1::foo1();
|
||||
foo2::foo2();
|
||||
foo3::foo3();
|
||||
foo4::foo4();
|
||||
}
|
||||
",
|
||||
)
|
||||
.file(
|
||||
"foo/Cargo.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
authors = []
|
||||
"#,
|
||||
)
|
||||
.file("foo/src/lib.rs", "pub fn foo4() {}")
|
||||
.build();
|
||||
|
||||
assert_that(
|
||||
p.cargo("build -v").masquerade_as_nightly_cargo(),
|
||||
execs().with_status(0),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_and_patch() {
|
||||
Package::new("foo", "0.1.0").publish();
|
||||
|
||||
let p = project("foo")
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
cargo-features = ["rename-dependency"]
|
||||
|
||||
[package]
|
||||
name = "test"
|
||||
version = "0.1.0"
|
||||
authors = []
|
||||
|
||||
[dependencies]
|
||||
bar = { version = "0.1", package = "foo" }
|
||||
|
||||
[patch.crates-io]
|
||||
foo = { path = "foo" }
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
"
|
||||
extern crate bar;
|
||||
|
||||
pub fn foo() {
|
||||
bar::foo();
|
||||
}
|
||||
",
|
||||
)
|
||||
.file(
|
||||
"foo/Cargo.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
authors = []
|
||||
"#,
|
||||
)
|
||||
.file("foo/src/lib.rs", "pub fn foo() {}")
|
||||
.build();
|
||||
|
||||
assert_that(
|
||||
p.cargo("build -v").masquerade_as_nightly_cargo(),
|
||||
execs().with_status(0),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_twice() {
|
||||
Package::new("foo", "0.1.0").publish();
|
||||
|
||||
let p = project("foo")
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
cargo-features = ["rename-dependency"]
|
||||
|
||||
[package]
|
||||
name = "test"
|
||||
version = "0.1.0"
|
||||
authors = []
|
||||
|
||||
[dependencies]
|
||||
bar = { version = "0.1", package = "foo" }
|
||||
[build-dependencies]
|
||||
foo = { version = "0.1" }
|
||||
"#,
|
||||
)
|
||||
.file("src/lib.rs", "",)
|
||||
.build();
|
||||
|
||||
assert_that(
|
||||
p.cargo("build -v").masquerade_as_nightly_cargo(),
|
||||
execs().with_status(101)
|
||||
.with_stderr("\
|
||||
[UPDATING] registry `[..]`
|
||||
[DOWNLOADING] foo v0.1.0 (registry [..])
|
||||
error: multiple dependencies listed for the same crate must all have the same \
|
||||
name, but the dependency on `foo v0.1.0` is listed as having different names
|
||||
")
|
||||
);
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user