diff --git a/src/cargo/ops/cargo_rustc/context.rs b/src/cargo/ops/cargo_rustc/context.rs index 56a5ab63b..338f81756 100644 --- a/src/cargo/ops/cargo_rustc/context.rs +++ b/src/cargo/ops/cargo_rustc/context.rs @@ -18,6 +18,7 @@ pub struct Context<'a, 'b> { pub primary: bool, pub rustc_version: String, pub config: &'b mut Config<'b>, + pub resolve: &'a Resolve, dest: Path, host_dest: Path, @@ -25,7 +26,6 @@ pub struct Context<'a, 'b> { host_deps_dir: Path, host_dylib: (String, String), package_set: &'a PackageSet, - resolve: &'a Resolve, target_dylib: (String, String), requirements: HashMap<(&'a PackageId, &'a str), PlatformRequirement>, } diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs index 68b121d0c..26cd66512 100644 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -4,6 +4,7 @@ use std::io::File; use core::{Package, Target}; use util; +use util::hex::short_hash; use util::{CargoResult, Fresh, Dirty, Freshness}; use super::job::Job; @@ -18,8 +19,10 @@ use super::context::Context; /// compilation, returning the job as the second part of the tuple. pub fn prepare(cx: &mut Context, pkg: &Package, targets: &[&Target]) -> CargoResult<(Freshness, Job)> { - let fingerprint_loc = cx.dest(false).join(format!(".{}.fingerprint", - pkg.get_name())); + let short = short_hash(pkg.get_package_id()); + let fingerprint_loc = cx.dest(false).join(format!(".{}.{}.fingerprint", + pkg.get_name(), + short)); let (is_fresh, fingerprint) = try!(is_fresh(pkg, &fingerprint_loc, cx, targets)); diff --git a/src/cargo/ops/cargo_rustc/job_queue.rs b/src/cargo/ops/cargo_rustc/job_queue.rs index 99a21c17b..7488f24f0 100644 --- a/src/cargo/ops/cargo_rustc/job_queue.rs +++ b/src/cargo/ops/cargo_rustc/job_queue.rs @@ -1,7 +1,8 @@ use std::collections::HashMap; +use std::iter::AdditiveIterator; use term::color::YELLOW; -use core::Package; +use core::{Package, PackageId, Resolve}; use util::{Config, TaskPool, DependencyQueue, Fresh, Dirty, Freshness}; use util::CargoResult; @@ -9,17 +10,18 @@ use super::job::Job; pub struct JobQueue<'a, 'b> { pool: TaskPool, - queue: DependencyQueue<(&'a Package, Job)>, + queue: DependencyQueue<'a, (&'a Package, Job)>, tx: Sender, rx: Receiver, - active: HashMap, + active: HashMap<&'a PackageId, uint>, config: &'b mut Config<'b>, } -type Message = (String, Freshness, CargoResult>); +type Message = (PackageId, Freshness, CargoResult>); impl<'a, 'b> JobQueue<'a, 'b> { pub fn new(config: &'b mut Config<'b>, + resolve: &'a Resolve, jobs: Vec<(&'a Package, Freshness, Job)>) -> JobQueue<'a, 'b> { let (tx, rx) = channel(); let mut queue = DependencyQueue::new(); @@ -27,7 +29,9 @@ impl<'a, 'b> JobQueue<'a, 'b> { queue.register(pkg); } for (pkg, fresh, job) in jobs.move_iter() { - queue.enqueue(pkg, fresh, (pkg, job)); + let mut deps = resolve.deps(pkg.get_package_id()) + .move_iter().flat_map(|a| a); + queue.enqueue(pkg, deps.collect(), fresh, (pkg, job)); } JobQueue { @@ -52,16 +56,17 @@ impl<'a, 'b> JobQueue<'a, 'b> { while self.queue.len() > 0 { loop { match self.queue.dequeue() { - Some((name, Fresh, (pkg, _))) => { - assert!(self.active.insert(name.clone(), 1u)); + Some((id, Fresh, (pkg, _))) => { + assert!(self.active.insert(id, 1u)); try!(self.config.shell().status("Fresh", pkg)); - self.tx.send((name, Fresh, Ok(Vec::new()))); + self.tx.send((id.clone(), Fresh, Ok(Vec::new()))); } - Some((name, Dirty, (pkg, job))) => { - assert!(self.active.insert(name.clone(), 1)); + Some((id, Dirty, (pkg, job))) => { + assert!(self.active.insert(id, 1)); try!(self.config.shell().status("Compiling", pkg)); let my_tx = self.tx.clone(); - self.pool.execute(proc() my_tx.send((name, Dirty, job.run()))); + let id = id.clone(); + self.pool.execute(proc() my_tx.send((id, Dirty, job.run()))); } None => break, } @@ -70,32 +75,35 @@ impl<'a, 'b> JobQueue<'a, 'b> { // Now that all possible work has been scheduled, wait for a piece // of work to finish. If any package fails to build then we stop // scheduling work as quickly as possibly. - let (name, fresh, result) = self.rx.recv(); - *self.active.get_mut(&name) -= 1; + let (id, fresh, result) = self.rx.recv(); + let id = self.active.iter().map(|(&k, _)| k).find(|&k| k == &id) + .unwrap(); + *self.active.get_mut(&id) -= 1; match result { Ok(v) => { for job in v.move_iter() { - *self.active.get_mut(&name) += 1; + *self.active.get_mut(&id) += 1; let my_tx = self.tx.clone(); - let my_name = name.clone(); + let my_id = id.clone(); self.pool.execute(proc() { - my_tx.send((my_name, fresh, job.run())); + my_tx.send((my_id, fresh, job.run())); }); } - if *self.active.get(&name) == 0 { - self.active.remove(&name); - self.queue.finish(&name, fresh); + if *self.active.get(&id) == 0 { + self.active.remove(&id); + self.queue.finish(id, fresh); } } Err(e) => { - if *self.active.get(&name) == 0 { - self.active.remove(&name); + if *self.active.get(&id) == 0 { + self.active.remove(&id); } if self.active.len() > 0 && self.config.jobs() > 1 { try!(self.config.shell().say( "Build failed, waiting for other \ jobs to finish...", YELLOW)); - for _ in self.rx.iter() {} + let amt = self.active.iter().map(|(_, v)| *v).sum(); + for _ in self.rx.iter().take(amt) {} } return Err(e) } diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs index e19daae8f..e2b115a14 100644 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -81,7 +81,7 @@ pub fn compile_targets<'a>(env: &str, targets: &[&'a Target], pkg: &'a Package, try!(compile(targets, pkg, &mut cx, &mut jobs)); // Now that we've figured out everything that we're going to do, do it! - JobQueue::new(cx.config, jobs).execute() + JobQueue::new(cx.config, cx.resolve, jobs).execute() } fn compile<'a, 'b>(targets: &[&'a Target], pkg: &'a Package, diff --git a/src/cargo/util/dependency_queue.rs b/src/cargo/util/dependency_queue.rs index bb651207e..c65bbcd64 100644 --- a/src/cargo/util/dependency_queue.rs +++ b/src/cargo/util/dependency_queue.rs @@ -6,34 +6,31 @@ use std::collections::{HashMap, HashSet}; -use core::Package; +use core::{Package, PackageId}; -// TODO: For now, assume that the package set contains only one package -// with a given name - -pub struct DependencyQueue { +pub struct DependencyQueue<'a, T> { /// A list of all known packages to build. /// /// The value of the hash map is list of dependencies which still need to be /// built before the package can be built. Note that the set is dynamically /// updated as more dependencies are built. - pkgs: HashMap, T)>, + pkgs: HashMap<&'a PackageId, (HashSet<&'a PackageId>, T)>, /// A reverse mapping of a package to all packages that depend on that /// package. /// /// This map is statically known and does not get updated throughout the /// lifecycle of the DependencyQueue. - reverse_dep_map: HashMap>, + reverse_dep_map: HashMap<&'a PackageId, HashSet<&'a PackageId>>, /// A set of dirty packages. /// /// Packages may become dirty over time if their dependencies are rebuilt. - dirty: HashSet, + dirty: HashSet<&'a PackageId>, /// The packages which are currently being built, waiting for a call to /// `finish`. - pending: HashSet, + pending: HashSet<&'a PackageId>, } /// Indication of the freshness of a package. @@ -46,9 +43,9 @@ pub enum Freshness { Dirty, } -impl DependencyQueue { +impl<'a, T> DependencyQueue<'a, T> { /// Creates a new dependency queue with 0 packages. - pub fn new() -> DependencyQueue { + pub fn new() -> DependencyQueue<'a, T> { DependencyQueue { pkgs: HashMap::new(), reverse_dep_map: HashMap::new(), @@ -60,36 +57,36 @@ impl DependencyQueue { /// Registers a package with this queue. /// /// Only registered packages will be returned from dequeue(). - pub fn register(&mut self, pkg: &Package) { - self.reverse_dep_map.insert(pkg.get_name().to_string(), HashSet::new()); + pub fn register(&mut self, pkg: &'a Package) { + self.reverse_dep_map.insert(pkg.get_package_id(), HashSet::new()); } /// Adds a new package to this dependency queue. /// /// It is assumed that any dependencies of this package will eventually also /// be added to the dependency queue. - pub fn enqueue(&mut self, pkg: &Package, fresh: Freshness, data: T) { + pub fn enqueue(&mut self, pkg: &'a Package, deps: Vec<&'a PackageId>, + fresh: Freshness, data: T) { // ignore self-deps - if self.pkgs.contains_key(&pkg.get_name().to_string()) { return } + if self.pkgs.contains_key(&pkg.get_package_id()) { return } if fresh == Dirty { - self.dirty.insert(pkg.get_name().to_string()); + self.dirty.insert(pkg.get_package_id()); } let mut my_dependencies = HashSet::new(); - for dep in pkg.get_dependencies().iter() { - if dep.get_name() == pkg.get_name() { continue } + for &dep in deps.iter() { + if dep == pkg.get_package_id() { continue } // skip deps which were filtered out as part of resolve - if !self.reverse_dep_map.find_equiv(&dep.get_name()).is_some() { + if !self.reverse_dep_map.find(&dep).is_some() { continue } - let name = dep.get_name().to_string(); - assert!(my_dependencies.insert(name.clone())); - let rev = self.reverse_dep_map.find_or_insert(name, HashSet::new()); - assert!(rev.insert(pkg.get_name().to_string())); + assert!(my_dependencies.insert(dep)); + let rev = self.reverse_dep_map.find_or_insert(dep, HashSet::new()); + assert!(rev.insert(pkg.get_package_id())); } - assert!(self.pkgs.insert(pkg.get_name().to_string(), + assert!(self.pkgs.insert(pkg.get_package_id(), (my_dependencies, data))); } @@ -97,15 +94,15 @@ impl DependencyQueue { /// /// A package is ready to be built when it has 0 un-built dependencies. If /// `None` is returned then no packages are ready to be built. - pub fn dequeue(&mut self) -> Option<(String, Freshness, T)> { + pub fn dequeue(&mut self) -> Option<(&'a PackageId, Freshness, T)> { let pkg = match self.pkgs.iter() .find(|&(_, &(ref deps, _))| deps.len() == 0) - .map(|(ref name, _)| name.to_string()) { + .map(|(name, _)| *name) { Some(pkg) => pkg, None => return None }; let (_, data) = self.pkgs.pop(&pkg).unwrap(); - self.pending.insert(pkg.clone()); + self.pending.insert(pkg); let fresh = if self.dirty.contains(&pkg) {Dirty} else {Fresh}; Some((pkg, fresh, data)) } @@ -125,9 +122,9 @@ impl DependencyQueue { /// should be `Fresh`. If a package was rebuilt, `Dirty` should be /// specified, and the dirtiness will be propagated properly to all /// dependencies. - pub fn finish(&mut self, pkg: &String, fresh: Freshness) { - assert!(self.pending.remove(pkg)); - let reverse_deps = match self.reverse_dep_map.find(pkg) { + pub fn finish(&mut self, pkg: &'a PackageId, fresh: Freshness) { + assert!(self.pending.remove(&pkg)); + let reverse_deps = match self.reverse_dep_map.find(&pkg) { Some(deps) => deps, None => return, }; @@ -135,7 +132,7 @@ impl DependencyQueue { if fresh == Dirty { self.dirty.insert(dep.clone()); } - assert!(self.pkgs.get_mut(dep).mut0().remove(pkg)); + assert!(self.pkgs.get_mut(dep).mut0().remove(&pkg)); } } } diff --git a/tests/test_cargo_compile.rs b/tests/test_cargo_compile.rs index 489c77753..a4a5d62c2 100644 --- a/tests/test_cargo_compile.rs +++ b/tests/test_cargo_compile.rs @@ -100,6 +100,31 @@ Could not execute process \ filename = format!("src{}foo.rs", path::SEP)).as_slice())); }) +test!(cargo_compile_with_invalid_code_in_deps { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + [dependencies.baz] + path = "../baz" + "#) + .file("src/main.rs", "invalid rust code!"); + let bar = project("bar") + .file("Cargo.toml", basic_bin_manifest("bar").as_slice()) + .file("src/lib.rs", "invalid rust code!"); + let baz = project("baz") + .file("Cargo.toml", basic_bin_manifest("baz").as_slice()) + .file("src/lib.rs", "invalid rust code!"); + bar.build(); + baz.build(); + assert_that(p.cargo_process("cargo-build"), execs().with_status(101)); +}) + test!(cargo_compile_with_warnings_in_the_root_package { let p = project("foo") .file("Cargo.toml", basic_bin_manifest("foo").as_slice()) diff --git a/tests/test_cargo_compile_git_deps.rs b/tests/test_cargo_compile_git_deps.rs index 58e57dcf5..01a39aa08 100644 --- a/tests/test_cargo_compile_git_deps.rs +++ b/tests/test_cargo_compile_git_deps.rs @@ -404,6 +404,81 @@ test!(cargo_compile_with_short_ssh_git { invalid url `{}`: `url: Invalid character in scheme.\n", url))); }) +test!(two_revs_same_deps { + let bar = git_repo("meta-dep", |project| { + project.file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn bar() -> int { 1 }") + }).assert(); + + // Commit the changes and make sure we trigger a recompile + let rev1 = bar.process("git").args(["rev-parse", "HEAD"]) + .exec_with_output().assert(); + File::create(&bar.root().join("src/lib.rs")).write_str(r#" + pub fn bar() -> int { 2 } + "#).assert(); + bar.process("git").args(["add", "."]).exec_with_output().assert(); + bar.process("git").args(["commit", "-m", "test"]).exec_with_output() + .assert(); + let rev2 = bar.process("git").args(["rev-parse", "HEAD"]) + .exec_with_output().assert(); + + let rev1 = String::from_utf8(rev1.output).unwrap(); + let rev2 = String::from_utf8(rev2.output).unwrap(); + + let foo = project("foo") + .file("Cargo.toml", format!(r#" + [project] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = "file:{}" + rev = "{}" + + [dependencies.baz] + path = "../baz" + "#, escape_path(&bar.root()), rev1.as_slice().trim()).as_slice()) + .file("src/main.rs", r#" + extern crate bar; + extern crate baz; + + fn main() { + assert_eq!(bar::bar(), 1); + assert_eq!(baz::baz(), 2); + } + "#); + + let baz = project("baz") + .file("Cargo.toml", format!(r#" + [package] + name = "baz" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = "file:{}" + rev = "{}" + "#, escape_path(&bar.root()), rev2.as_slice().trim()).as_slice()) + .file("src/lib.rs", r#" + extern crate bar; + pub fn baz() -> int { bar::bar() } + "#); + + baz.build(); + + // TODO: -j1 is a hack + assert_that(foo.cargo_process("cargo-build").arg("-j").arg("1"), + execs().with_status(0)); + assert_that(&foo.bin("main"), existing_file()); + assert_that(foo.process(foo.bin("main")), execs().with_status(0)); +}) + test!(recompilation { let git_project = git_repo("bar", |project| { project