Fix prepare race condition in workspaces (#2069)

* Separate offline query metadata per crate

* Update sqlx-cli prepare to use separate query metadata per crate

* Add resolve.root to metadata test fixture

* Simplify root package resolution

* Fix prepare --merged
This commit is contained in:
cycraig 2022-08-27 02:30:09 +02:00 committed by GitHub
parent 4c9d23960a
commit 0823e1139c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 49 additions and 4 deletions

View File

@ -56,6 +56,9 @@ pub struct Metadata {
///
/// Typically `target` at the workspace root, but can be overridden
target_directory: PathBuf,
/// Package metadata for the crate in the current working directory, None if run from
/// a workspace with the `merged` flag.
current_package: Option<Package>,
}
impl Metadata {
@ -75,6 +78,10 @@ impl Metadata {
&self.target_directory
}
pub fn current_package(&self) -> Option<&Package> {
self.current_package.as_ref()
}
/// Gets all dependents (direct and transitive) of `id`
pub fn all_dependents_of(&self, id: &MetadataId) -> BTreeSet<&MetadataId> {
let mut dependents = BTreeSet::new();
@ -101,13 +108,19 @@ impl FromStr for Metadata {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let cargo_metadata: CargoMetadata = serde_json::from_str(s)?;
// Extract the package for the current working directory, will be empty if running
// from a workspace root.
let current_package: Option<Package> = cargo_metadata.root_package().map(Package::from);
let CargoMetadata {
packages: metadata_packages,
workspace_members,
resolve,
target_directory,
..
} = serde_json::from_str(s)?;
} = cargo_metadata;
let mut packages = BTreeMap::new();
for metadata_package in metadata_packages {
@ -136,6 +149,7 @@ impl FromStr for Metadata {
workspace_members,
reverse_deps,
target_directory,
current_package,
})
}
}

View File

@ -180,7 +180,23 @@ hint: This command only works in the manifest directory of a Cargo package."#
bail!("`cargo check` failed with status: {}", check_status);
}
let pattern = metadata.target_directory().join("sqlx/query-*.json");
let package_dir = if merge {
// Merge queries from all workspace crates.
"**"
} else {
// Use a separate sub-directory for each crate in a workspace. This avoids a race condition
// where `prepare` can pull in queries from multiple crates if they happen to be generated
// simultaneously (e.g. Rust Analyzer building in the background).
metadata
.current_package()
.map(|pkg| pkg.name())
.context("Resolving the crate package for the current working directory failed")?
};
let pattern = metadata
.target_directory()
.join("sqlx")
.join(package_dir)
.join("query-*.json");
let mut data = BTreeMap::new();

View File

@ -30506,7 +30506,7 @@
"features": []
}
],
"root": null
"root": "b_in_workspace_lib 0.1.0 (path+file:///home/user/problematic/workspace/b_in_workspace_lib)"
},
"target_directory": "/home/user/problematic/workspace/target",
"version": 1,

View File

@ -31,6 +31,8 @@ struct Metadata {
offline: bool,
database_url: Option<String>,
#[cfg(feature = "offline")]
package_name: String,
#[cfg(feature = "offline")]
target_dir: PathBuf,
#[cfg(feature = "offline")]
workspace_root: Arc<Mutex<Option<PathBuf>>>,
@ -74,6 +76,11 @@ static METADATA: Lazy<Metadata> = Lazy::new(|| {
.expect("`CARGO_MANIFEST_DIR` must be set")
.into();
#[cfg(feature = "offline")]
let package_name: String = env("CARGO_PKG_NAME")
.expect("`CARGO_PKG_NAME` must be set")
.into();
#[cfg(feature = "offline")]
let target_dir = env("CARGO_TARGET_DIR").map_or_else(|_| "target".into(), |dir| dir.into());
@ -110,6 +117,8 @@ static METADATA: Lazy<Metadata> = Lazy::new(|| {
offline,
database_url,
#[cfg(feature = "offline")]
package_name,
#[cfg(feature = "offline")]
target_dir,
#[cfg(feature = "offline")]
workspace_root: Arc::new(Mutex::new(None)),
@ -402,7 +411,13 @@ where
// If the build is offline, the cache is our input so it's pointless to also write data for it.
#[cfg(feature = "offline")]
if !offline {
let save_dir = METADATA.target_dir.join("sqlx");
// Use a separate sub-directory for each crate in a workspace. This avoids a race condition
// where `prepare` can pull in queries from multiple crates if they happen to be generated
// simultaneously (e.g. Rust Analyzer building in the background).
let save_dir = METADATA
.target_dir
.join("sqlx")
.join(&METADATA.package_name);
std::fs::create_dir_all(&save_dir)?;
data.save_in(save_dir, input.src_span)?;
}