diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs index b84aad16c..f3f232985 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/context/compilation_files.rs @@ -186,7 +186,8 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { self.layout(unit.kind).fingerprint().join(dir) } - /// Returns the appropriate directory layout for either a plugin or not. + /// Returns the directory where a compiled build script is stored. + /// `/path/to/target/{debug,release}/build/PKG-HASH` pub fn build_script_dir(&self, unit: &Unit<'a>) -> PathBuf { assert!(unit.target.is_custom_build()); assert!(!unit.mode.is_run_custom_build()); @@ -194,12 +195,20 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { self.layout(Kind::Host).build().join(dir) } - /// Returns the appropriate directory layout for either a plugin or not. - pub fn build_script_out_dir(&self, unit: &Unit<'a>) -> PathBuf { + /// Returns the directory where information about running a build script + /// is stored. + /// `/path/to/target/{debug,release}/build/PKG-HASH` + pub fn build_script_run_dir(&self, unit: &Unit<'a>) -> PathBuf { assert!(unit.target.is_custom_build()); assert!(unit.mode.is_run_custom_build()); let dir = self.pkg_dir(unit); - self.layout(unit.kind).build().join(dir).join("out") + self.layout(unit.kind).build().join(dir) + } + + /// Returns the "OUT_DIR" directory for running a build script. + /// `/path/to/target/{debug,release}/build/PKG-HASH/out` + pub fn build_script_out_dir(&self, unit: &Unit<'a>) -> PathBuf { + self.build_script_run_dir(unit).join("out") } /// Returns the file stem for a given target/profile combo (with metadata). diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index a0046b732..24658c781 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -132,6 +132,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes .expect("running a script not depending on an actual script"); let script_dir = cx.files().build_script_dir(build_script_unit); let script_out_dir = cx.files().build_script_out_dir(unit); + let script_run_dir = cx.files().build_script_run_dir(unit); let build_plan = bcx.build_config.build_plan; let invocation_name = unit.buildkey(); @@ -241,13 +242,9 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes let pkg_name = unit.pkg.to_string(); let build_state = Arc::clone(&cx.build_state); let id = unit.pkg.package_id(); - let (output_file, err_file, root_output_file) = { - let build_output_parent = script_out_dir.parent().unwrap(); - let output_file = build_output_parent.join("output"); - let err_file = build_output_parent.join("stderr"); - let root_output_file = build_output_parent.join("root-output"); - (output_file, err_file, root_output_file) - }; + let output_file = script_run_dir.join("output"); + let err_file = script_run_dir.join("stderr"); + let root_output_file = script_run_dir.join("root-output"); let host_target_root = cx.files().target_root().to_path_buf(); let all = ( id, @@ -332,7 +329,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes state.build_plan(invocation_name, cmd.clone(), Arc::new(Vec::new())); } else { state.running(&cmd); - let timestamp = paths::get_current_filesystem_time(&output_file)?; + let timestamp = paths::set_invocation_time(&script_run_dir)?; let output = if extra_verbose { let prefix = format!("[{} {}] ", id.name(), id.version()); state.capture_output(&cmd, Some(prefix), true) diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs index 4e23fb0fd..48f343d74 100644 --- a/src/cargo/core/compiler/fingerprint.rs +++ b/src/cargo/core/compiler/fingerprint.rs @@ -455,13 +455,12 @@ fn calculate<'a, 'cfg>( // Next, recursively calculate the fingerprint for all of our dependencies. // - // Skip the fingerprints of build scripts as they may not always be - // available and the dirtiness propagation for modification is tracked - // elsewhere. Also skip fingerprints of binaries because they don't actually - // induce a recompile, they're just dependencies in the sense that they need - // to be built. - let deps = cx.dep_targets(unit); - let deps = deps + // Skip the fingerprints of build scripts, they are included below in the + // `local` vec. Also skip fingerprints of binaries because they don't + // actually induce a recompile, they're just dependencies in the sense + // that they need to be built. + let mut deps = cx + .dep_targets(unit) .iter() .filter(|u| !u.target.is_custom_build() && !u.target.is_bin()) .map(|dep| { @@ -475,8 +474,9 @@ fn calculate<'a, 'cfg>( }) }) .collect::>>()?; + deps.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id)); - // And finally, calculate what our own local fingerprint is + // And finally, calculate what our own local fingerprint is. let local = if use_dep_info(unit) { let dep_info = dep_info_loc(cx, unit); let mtime = dep_info_mtime_if_fresh(unit.pkg, &dep_info)?; @@ -485,8 +485,32 @@ fn calculate<'a, 'cfg>( let fingerprint = pkg_fingerprint(&cx.bcx, unit.pkg)?; LocalFingerprint::Precalculated(fingerprint) }; - let mut deps = deps; - deps.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id)); + let mut local = vec![local]; + // Include fingerprint for any build scripts this unit requires. + local.extend( + cx.dep_targets(unit) + .iter() + .filter(|u| u.mode.is_run_custom_build()) + .map(|dep| { + // If the build script is overridden, use the override info as + // the override. Otherwise, use the last invocation time of + // the build script. If the build script re-runs during this + // run, dirty propagation within the JobQueue will ensure that + // this gets invalidated. This is only here to catch the + // situation when cargo is run a second time for another + // target that wasn't built previously (such as `cargo build` + // then `cargo test`). + build_script_override_fingerprint(cx, unit).unwrap_or_else(|| { + let ts_path = cx + .files() + .build_script_run_dir(dep) + .join("invoked.timestamp"); + let ts_path_mtime = paths::mtime(&ts_path).ok(); + LocalFingerprint::mtime(cx.files().target_root(), ts_path_mtime, &ts_path) + }) + }), + ); + let extra_flags = if unit.mode.is_doc() { bcx.rustdocflags_args(unit)? } else { @@ -502,7 +526,7 @@ fn calculate<'a, 'cfg>( path: util::hash_u64(&super::path_args(&cx.bcx, unit).0), features: format!("{:?}", bcx.resolve.features_sorted(unit.pkg.package_id())), deps, - local: vec![local], + local, memoized_hash: Mutex::new(None), edition: unit.target.edition(), rustflags: extra_flags, @@ -595,19 +619,13 @@ fn build_script_local_fingerprints<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>, ) -> CargoResult<(Vec, Option)> { - let state = cx.build_state.outputs.lock().unwrap(); // First up, if this build script is entirely overridden, then we just // return the hash of what we overrode it with. - // - // Note that the `None` here means that we don't want to update the local - // fingerprint afterwards because this is all just overridden. - if let Some(output) = state.get(&(unit.pkg.package_id(), unit.kind)) { + if let Some(fingerprint) = build_script_override_fingerprint(cx, unit) { debug!("override local fingerprints deps"); - let s = format!( - "overridden build state with hash: {}", - util::hash_u64(output) - ); - return Ok((vec![LocalFingerprint::Precalculated(s)], None)); + // Note that the `None` here means that we don't want to update the local + // fingerprint afterwards because this is all just overridden. + return Ok((vec![fingerprint], None)); } // Next up we look at the previously listed dependencies for the build @@ -633,6 +651,24 @@ fn build_script_local_fingerprints<'a, 'cfg>( )) } +/// Create a local fingerprint for an overridden build script. +/// Returns None if it is not overridden. +fn build_script_override_fingerprint<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> Option { + let state = cx.build_state.outputs.lock().unwrap(); + state + .get(&(unit.pkg.package_id(), unit.kind)) + .map(|output| { + let s = format!( + "overridden build state with hash: {}", + util::hash_u64(output) + ); + LocalFingerprint::Precalculated(s) + }) +} + fn local_fingerprints_deps( deps: &BuildDeps, target_root: &Path, diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 0ab2ac8e7..22f70bf67 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -239,6 +239,7 @@ fn rustc<'a, 'cfg>( .get_cwd() .unwrap_or_else(|| cx.bcx.config.cwd()) .to_path_buf(); + let fingerprint_dir = cx.files().fingerprint_dir(unit); return Ok(Work::new(move |state| { // Only at runtime have we discovered what the extra -L and -l @@ -289,7 +290,7 @@ fn rustc<'a, 'cfg>( } state.running(&rustc); - let timestamp = paths::get_current_filesystem_time(&dep_info_loc)?; + let timestamp = paths::set_invocation_time(&fingerprint_dir)?; if json_messages { exec.exec_json( rustc, diff --git a/src/cargo/util/paths.rs b/src/cargo/util/paths.rs index 961ded48c..c61b71b8f 100644 --- a/src/cargo/util/paths.rs +++ b/src/cargo/util/paths.rs @@ -187,16 +187,17 @@ pub fn mtime(path: &Path) -> CargoResult { Ok(FileTime::from_last_modification_time(&meta)) } -/// get `FileTime::from_system_time(SystemTime::now());` using the exact clock that this file system is using. -pub fn get_current_filesystem_time(path: &Path) -> CargoResult { +/// Record the current time on the filesystem (using the filesystem's clock) +/// using a file at the given directory. Returns the current time. +pub fn set_invocation_time(path: &Path) -> CargoResult { // note that if `FileTime::from_system_time(SystemTime::now());` is determined to be sufficient, // then this can be removed. - let timestamp = path.with_file_name("invoked.timestamp"); + let timestamp = path.join("invoked.timestamp"); write( ×tamp, b"This file has an mtime of when this was started.", )?; - Ok(mtime(×tamp)?) + mtime(×tamp) } #[cfg(unix)] diff --git a/tests/testsuite/freshness.rs b/tests/testsuite/freshness.rs index 5083b63b3..2ebb4affd 100644 --- a/tests/testsuite/freshness.rs +++ b/tests/testsuite/freshness.rs @@ -1616,3 +1616,91 @@ fn rebuild_on_mid_build_file_modification() { t.join().ok().unwrap(); } + +#[test] +fn dirty_both_lib_and_test() { + // This tests that all artifacts that depend on the results of a build + // script will get rebuilt when the build script reruns, even for separate + // commands. It does the following: + // + // 1. Project "foo" has a build script which will compile a small + // staticlib to link against. Normally this would use the `cc` crate, + // but here we just use rustc to avoid the `cc` dependency. + // 2. Build the library. + // 3. Build the unit test. The staticlib intentionally has a bad value. + // 4. Rewrite the staticlib with the correct value. + // 5. Build the library again. + // 6. Build the unit test. This should recompile. + + let slib = |n| { + format!( + r#" + #[no_mangle] + pub extern "C" fn doit() -> i32 {{ + return {}; + }} + "#, + n + ) + }; + + let p = project() + .file( + "src/lib.rs", + r#" + extern "C" { + fn doit() -> i32; + } + + #[test] + fn t1() { + assert_eq!(unsafe { doit() }, 1, "doit assert failure"); + } + "#, + ) + .file( + "build.rs", + r#" + use std::env; + use std::path::PathBuf; + use std::process::Command; + + fn main() { + let rustc = env::var_os("RUSTC").unwrap(); + let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); + assert!( + Command::new(rustc) + .args(&[ + "--crate-type=staticlib", + "--out-dir", + out_dir.to_str().unwrap(), + "slib.rs" + ]) + .status() + .unwrap() + .success(), + "slib build failed" + ); + println!("cargo:rustc-link-lib=slib"); + println!("cargo:rustc-link-search={}", out_dir.display()); + } + "#, + ) + .file("slib.rs", &slib(2)) + .build(); + + p.cargo("build").run(); + + // 2 != 1 + p.cargo("test --lib") + .with_status(101) + .with_stdout_contains("[..]doit assert failure[..]") + .run(); + + // Fix the mistake. + p.change_file("slib.rs", &slib(1)); + + p.cargo("build").run(); + // This should recompile with the new static lib, and the test should pass. + p.cargo("test --lib").run(); +}