diff --git a/crates/cargo-util-schemas/lockfile.schema.json b/crates/cargo-util-schemas/lockfile.schema.json index fe82ef6f3..5d7149d55 100644 --- a/crates/cargo-util-schemas/lockfile.schema.json +++ b/crates/cargo-util-schemas/lockfile.schema.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2020-12/schema", - "title": "EncodableResolve", + "title": "TomlLockfile", "description": "The `Cargo.lock` structure.", "type": "object", "properties": { @@ -18,14 +18,14 @@ "null" ], "items": { - "$ref": "#/$defs/EncodableDependency" + "$ref": "#/$defs/TomlLockfileDependency" } }, "root": { "description": "`root` is optional to allow backward compatibility.", "anyOf": [ { - "$ref": "#/$defs/EncodableDependency" + "$ref": "#/$defs/TomlLockfileDependency" }, { "type": "null" @@ -42,11 +42,11 @@ } }, "patch": { - "$ref": "#/$defs/Patch" + "$ref": "#/$defs/TomlLockfilePatch" } }, "$defs": { - "EncodableDependency": { + "TomlLockfileDependency": { "type": "object", "properties": { "name": { @@ -73,13 +73,13 @@ "null" ], "items": { - "$ref": "#/$defs/EncodablePackageId" + "$ref": "#/$defs/TomlLockfilePackageId" } }, "replace": { "anyOf": [ { - "$ref": "#/$defs/EncodablePackageId" + "$ref": "#/$defs/TomlLockfilePackageId" }, { "type": "null" @@ -92,7 +92,7 @@ "version" ] }, - "EncodablePackageId": { + "TomlLockfilePackageId": { "type": "object", "properties": { "name": { @@ -115,13 +115,13 @@ "name" ] }, - "Patch": { + "TomlLockfilePatch": { "type": "object", "properties": { "unused": { "type": "array", "items": { - "$ref": "#/$defs/EncodableDependency" + "$ref": "#/$defs/TomlLockfileDependency" } } }, diff --git a/crates/cargo-util-schemas/src/lockfile.rs b/crates/cargo-util-schemas/src/lockfile.rs index ddcca0998..8e949d027 100644 --- a/crates/cargo-util-schemas/src/lockfile.rs +++ b/crates/cargo-util-schemas/src/lockfile.rs @@ -10,25 +10,25 @@ use crate::core::{GitReference, SourceKind}; /// The `Cargo.lock` structure. #[derive(Serialize, Deserialize, Debug)] #[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))] -pub struct EncodableResolve { +pub struct TomlLockfile { pub version: Option, - pub package: Option>, + pub package: Option>, /// `root` is optional to allow backward compatibility. - pub root: Option, - pub metadata: Option, - #[serde(default, skip_serializing_if = "Patch::is_empty")] - pub patch: Patch, + pub root: Option, + pub metadata: Option, + #[serde(default, skip_serializing_if = "TomlLockfilePatch::is_empty")] + pub patch: TomlLockfilePatch, } #[derive(Serialize, Deserialize, Debug, Default)] #[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))] -pub struct Patch { - pub unused: Vec, +pub struct TomlLockfilePatch { + pub unused: Vec, } -pub type Metadata = BTreeMap; +pub type TomlLockfileMetadata = BTreeMap; -impl Patch { +impl TomlLockfilePatch { fn is_empty(&self) -> bool { self.unused.is_empty() } @@ -36,13 +36,13 @@ impl Patch { #[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)] #[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))] -pub struct EncodableDependency { +pub struct TomlLockfileDependency { pub name: String, pub version: String, - pub source: Option, + pub source: Option, pub checksum: Option, - pub dependencies: Option>, - pub replace: Option, + pub dependencies: Option>, + pub replace: Option, } #[derive(Debug, Clone)] @@ -51,7 +51,7 @@ pub struct EncodableDependency { derive(schemars::JsonSchema), schemars(with = "String") )] -pub struct EncodableSourceId { +pub struct TomlLockfileSourceId { /// Full string of the source source_str: String, /// Used for sources ordering @@ -60,7 +60,7 @@ pub struct EncodableSourceId { url: Url, } -impl EncodableSourceId { +impl TomlLockfileSourceId { pub fn new(source: String) -> Result { let source_str = source.clone(); let (kind, url) = source.split_once('+').ok_or_else(|| { @@ -109,7 +109,7 @@ impl EncodableSourceId { } } -impl ser::Serialize for EncodableSourceId { +impl ser::Serialize for TomlLockfileSourceId { fn serialize(&self, s: S) -> Result where S: ser::Serializer, @@ -118,39 +118,39 @@ impl ser::Serialize for EncodableSourceId { } } -impl<'de> de::Deserialize<'de> for EncodableSourceId { +impl<'de> de::Deserialize<'de> for TomlLockfileSourceId { fn deserialize(d: D) -> Result where D: de::Deserializer<'de>, { let s = String::deserialize(d)?; - Ok(EncodableSourceId::new(s).map_err(de::Error::custom)?) + Ok(TomlLockfileSourceId::new(s).map_err(de::Error::custom)?) } } -impl std::hash::Hash for EncodableSourceId { +impl std::hash::Hash for TomlLockfileSourceId { fn hash(&self, state: &mut H) { self.kind.hash(state); self.url.hash(state); } } -impl std::cmp::PartialEq for EncodableSourceId { +impl std::cmp::PartialEq for TomlLockfileSourceId { fn eq(&self, other: &Self) -> bool { self.kind == other.kind && self.url == other.url } } -impl std::cmp::Eq for EncodableSourceId {} +impl std::cmp::Eq for TomlLockfileSourceId {} -impl PartialOrd for EncodableSourceId { - fn partial_cmp(&self, other: &EncodableSourceId) -> Option { +impl PartialOrd for TomlLockfileSourceId { + fn partial_cmp(&self, other: &TomlLockfileSourceId) -> Option { Some(self.cmp(other)) } } -impl Ord for EncodableSourceId { - fn cmp(&self, other: &EncodableSourceId) -> Ordering { +impl Ord for TomlLockfileSourceId { + fn cmp(&self, other: &TomlLockfileSourceId) -> Ordering { self.kind .cmp(&other.kind) .then_with(|| self.url.cmp(&other.url)) @@ -159,13 +159,13 @@ impl Ord for EncodableSourceId { #[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)] #[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))] -pub struct EncodablePackageId { +pub struct TomlLockfilePackageId { pub name: String, pub version: Option, - pub source: Option, + pub source: Option, } -impl fmt::Display for EncodablePackageId { +impl fmt::Display for TomlLockfilePackageId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.name)?; if let Some(s) = &self.version { @@ -178,17 +178,17 @@ impl fmt::Display for EncodablePackageId { } } -impl FromStr for EncodablePackageId { +impl FromStr for TomlLockfilePackageId { type Err = EncodablePackageIdError; - fn from_str(s: &str) -> Result { + fn from_str(s: &str) -> Result { let mut s = s.splitn(3, ' '); let name = s.next().unwrap(); let version = s.next(); let source_id = match s.next() { Some(s) => { if let Some(s) = s.strip_prefix('(').and_then(|s| s.strip_suffix(')')) { - Some(EncodableSourceId::new(s.to_string())?) + Some(TomlLockfileSourceId::new(s.to_string())?) } else { return Err(EncodablePackageIdErrorKind::InvalidSerializedPackageId.into()); } @@ -196,7 +196,7 @@ impl FromStr for EncodablePackageId { None => None, }; - Ok(EncodablePackageId { + Ok(TomlLockfilePackageId { name: name.to_string(), version: version.map(|v| v.to_string()), source: source_id, @@ -204,7 +204,7 @@ impl FromStr for EncodablePackageId { } } -impl ser::Serialize for EncodablePackageId { +impl ser::Serialize for TomlLockfilePackageId { fn serialize(&self, s: S) -> Result where S: ser::Serializer, @@ -213,14 +213,14 @@ impl ser::Serialize for EncodablePackageId { } } -impl<'de> de::Deserialize<'de> for EncodablePackageId { - fn deserialize(d: D) -> Result +impl<'de> de::Deserialize<'de> for TomlLockfilePackageId { + fn deserialize(d: D) -> Result where D: de::Deserializer<'de>, { String::deserialize(d).and_then(|string| { string - .parse::() + .parse::() .map_err(de::Error::custom) }) } @@ -266,7 +266,7 @@ enum EncodablePackageIdErrorKind { #[cfg(feature = "unstable-schema")] #[test] fn dump_lockfile_schema() { - let schema = schemars::schema_for!(crate::lockfile::EncodableResolve); + let schema = schemars::schema_for!(crate::lockfile::TomlLockfile); let dump = serde_json::to_string_pretty(&schema).unwrap(); snapbox::assert_data_eq!(dump, snapbox::file!("../lockfile.schema.json").raw()); } diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 793c311c1..d7c44de3d 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -118,7 +118,8 @@ use crate::util::interning::InternedString; use crate::util::{Graph, internal}; use anyhow::{Context as _, bail}; use cargo_util_schemas::lockfile::{ - EncodableDependency, EncodablePackageId, EncodableResolve, EncodableSourceId, Patch, + TomlLockfile, TomlLockfileDependency, TomlLockfilePackageId, TomlLockfilePatch, + TomlLockfileSourceId, }; use serde::ser; use std::collections::{HashMap, HashSet}; @@ -133,7 +134,7 @@ use tracing::debug; /// primary uses is to be used with `resolve_with_previous` to guide the /// resolver to create a complete Resolve. pub fn into_resolve( - resolve: EncodableResolve, + resolve: TomlLockfile, original: &str, ws: &Workspace<'_>, ) -> CargoResult { @@ -176,7 +177,7 @@ pub fn into_resolve( let mut live_pkgs = HashMap::new(); let mut all_pkgs = HashSet::new(); for pkg in packages.iter() { - let enc_id = EncodablePackageId { + let enc_id = TomlLockfilePackageId { name: pkg.name.clone(), version: Some(pkg.version.clone()), source: pkg.source.clone(), @@ -228,7 +229,7 @@ pub fn into_resolve( .insert(id.source_id(), *id); } - let mut lookup_id = |enc_id: &EncodablePackageId| -> Option { + let mut lookup_id = |enc_id: &TomlLockfilePackageId| -> Option { // The name of this package should always be in the larger list of // all packages. let by_version = map.get(enc_id.name.as_str())?; @@ -329,7 +330,7 @@ pub fn into_resolve( for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { to_remove.push(k.to_string()); let k = k.strip_prefix(prefix).unwrap(); - let enc_id: EncodablePackageId = k + let enc_id: TomlLockfilePackageId = k .parse() .with_context(|| internal("invalid encoding of checksum in lockfile"))?; let Some(id) = lookup_id(&enc_id) else { @@ -405,7 +406,7 @@ pub fn into_resolve( fn get_source_id<'a>( path_deps: &'a HashMap>, - pkg: &'a EncodableDependency, + pkg: &'a TomlLockfileDependency, ) -> Option<&'a SourceId> { path_deps.iter().find_map(|(name, version_source)| { if name != &pkg.name || version_source.len() == 0 { @@ -535,11 +536,11 @@ impl ser::Serialize for Resolve { Some(metadata) }; - let patch = Patch { + let patch = TomlLockfilePatch { unused: self .unused_patches() .iter() - .map(|id| EncodableDependency { + .map(|id| TomlLockfileDependency { name: id.name().to_string(), version: id.version().to_string(), source: encodable_source_id(id.source_id(), self.version()), @@ -553,7 +554,7 @@ impl ser::Serialize for Resolve { }) .collect(), }; - EncodableResolve { + TomlLockfile { package: Some(encodable), root: None, metadata, @@ -597,7 +598,7 @@ fn encodable_resolve_node( id: PackageId, resolve: &Resolve, state: &EncodeState<'_>, -) -> EncodableDependency { +) -> TomlLockfileDependency { let (replace, deps) = match resolve.replacement(id) { Some(id) => ( Some(encodable_package_id(id, state, resolve.version())), @@ -613,7 +614,7 @@ fn encodable_resolve_node( } }; - EncodableDependency { + TomlLockfileDependency { name: id.name().to_string(), version: id.version().to_string(), source: encodable_source_id(id.source_id(), resolve.version()), @@ -631,7 +632,7 @@ pub fn encodable_package_id( id: PackageId, state: &EncodeState<'_>, resolve_version: ResolveVersion, -) -> EncodablePackageId { +) -> TomlLockfilePackageId { let mut version = Some(id.version().to_string()); let mut id_to_encode = id.source_id(); if resolve_version <= ResolveVersion::V2 { @@ -652,22 +653,22 @@ pub fn encodable_package_id( } } } - EncodablePackageId { + TomlLockfilePackageId { name: id.name().to_string(), version, source, } } -fn encodable_source_id(id: SourceId, version: ResolveVersion) -> Option { +fn encodable_source_id(id: SourceId, version: ResolveVersion) -> Option { if id.is_path() { None } else { Some( if version >= ResolveVersion::V4 { - EncodableSourceId::new(id.as_encoded_url().to_string()) + TomlLockfileSourceId::new(id.as_encoded_url().to_string()) } else { - EncodableSourceId::new(id.as_url().to_string()) + TomlLockfileSourceId::new(id.as_url().to_string()) } .expect("source ID should have valid URLs"), ) diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs index 536a22631..fb472d99d 100644 --- a/src/cargo/core/resolver/resolve.rs +++ b/src/cargo/core/resolver/resolve.rs @@ -6,7 +6,7 @@ use crate::core::{Dependency, PackageId, PackageIdSpec, PackageIdSpecQuery, Summ use crate::util::Graph; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use cargo_util_schemas::lockfile::Metadata; +use cargo_util_schemas::lockfile::TomlLockfileMetadata; use std::borrow::Borrow; use std::collections::{HashMap, HashSet}; use std::fmt; @@ -34,7 +34,7 @@ pub struct Resolve { /// "Unknown" metadata. This is a collection of extra, unrecognized data /// found in the `[metadata]` section of `Cargo.lock`, preserved for /// forwards compatibility. - metadata: Metadata, + metadata: TomlLockfileMetadata, /// `[patch]` entries that did not match anything, preserved in /// `Cargo.lock` as the `[[patch.unused]]` table array. Tracking unused /// patches helps prevent Cargo from being forced to re-update the @@ -156,7 +156,7 @@ impl Resolve { replacements: HashMap, features: HashMap>, checksums: HashMap>, - metadata: Metadata, + metadata: TomlLockfileMetadata, unused_patches: Vec, version: ResolveVersion, summaries: HashMap, @@ -394,7 +394,7 @@ unable to verify that `{0}` is the same as when the lockfile was generated self.checksums.insert(pkg_id, Some(checksum)); } - pub fn metadata(&self) -> &Metadata { + pub fn metadata(&self) -> &TomlLockfileMetadata { &self.metadata } diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs index 9f441283c..9066f1dec 100644 --- a/src/cargo/ops/lockfile.rs +++ b/src/cargo/ops/lockfile.rs @@ -6,7 +6,7 @@ use crate::util::Filesystem; use crate::util::errors::CargoResult; use anyhow::Context as _; -use cargo_util_schemas::lockfile::EncodableResolve; +use cargo_util_schemas::lockfile::TomlLockfile; pub const LOCKFILE_NAME: &str = "Cargo.lock"; @@ -24,7 +24,7 @@ pub fn load_pkg_lockfile(ws: &Workspace<'_>) -> CargoResult> { .with_context(|| format!("failed to read file: {}", f.path().display()))?; let resolve = (|| -> CargoResult> { - let v: EncodableResolve = toml::from_str(&s)?; + let v: TomlLockfile = toml::from_str(&s)?; Ok(Some(into_resolve(v, &s, ws)?)) })() .with_context(|| format!("failed to parse lock file at: {}", f.path().display()))?; @@ -208,8 +208,8 @@ fn are_equal_lockfiles(orig: &str, current: &str, ws: &Workspace<'_>) -> bool { // common case where we can update lock files. if !ws.gctx().lock_update_allowed() { let res: CargoResult = (|| { - let old: EncodableResolve = toml::from_str(orig)?; - let new: EncodableResolve = toml::from_str(current)?; + let old: TomlLockfile = toml::from_str(orig)?; + let new: TomlLockfile = toml::from_str(current)?; Ok(into_resolve(old, orig, ws)? == into_resolve(new, current, ws)?) })(); if let Ok(true) = res {