refactor: rename lockfile schemas scheme

Manifests schemas use `TomlManifest`. To match, we rename the lockfile
schemas to `TomlLockfile`
This commit is contained in:
Vito Secona 2025-09-19 06:05:02 +07:00
parent d51d7f6cfc
commit 703988f0c2
5 changed files with 73 additions and 72 deletions

View File

@ -1,6 +1,6 @@
{ {
"$schema": "https://json-schema.org/draft/2020-12/schema", "$schema": "https://json-schema.org/draft/2020-12/schema",
"title": "EncodableResolve", "title": "TomlLockfile",
"description": "The `Cargo.lock` structure.", "description": "The `Cargo.lock` structure.",
"type": "object", "type": "object",
"properties": { "properties": {
@ -18,14 +18,14 @@
"null" "null"
], ],
"items": { "items": {
"$ref": "#/$defs/EncodableDependency" "$ref": "#/$defs/TomlLockfileDependency"
} }
}, },
"root": { "root": {
"description": "`root` is optional to allow backward compatibility.", "description": "`root` is optional to allow backward compatibility.",
"anyOf": [ "anyOf": [
{ {
"$ref": "#/$defs/EncodableDependency" "$ref": "#/$defs/TomlLockfileDependency"
}, },
{ {
"type": "null" "type": "null"
@ -42,11 +42,11 @@
} }
}, },
"patch": { "patch": {
"$ref": "#/$defs/Patch" "$ref": "#/$defs/TomlLockfilePatch"
} }
}, },
"$defs": { "$defs": {
"EncodableDependency": { "TomlLockfileDependency": {
"type": "object", "type": "object",
"properties": { "properties": {
"name": { "name": {
@ -73,13 +73,13 @@
"null" "null"
], ],
"items": { "items": {
"$ref": "#/$defs/EncodablePackageId" "$ref": "#/$defs/TomlLockfilePackageId"
} }
}, },
"replace": { "replace": {
"anyOf": [ "anyOf": [
{ {
"$ref": "#/$defs/EncodablePackageId" "$ref": "#/$defs/TomlLockfilePackageId"
}, },
{ {
"type": "null" "type": "null"
@ -92,7 +92,7 @@
"version" "version"
] ]
}, },
"EncodablePackageId": { "TomlLockfilePackageId": {
"type": "object", "type": "object",
"properties": { "properties": {
"name": { "name": {
@ -115,13 +115,13 @@
"name" "name"
] ]
}, },
"Patch": { "TomlLockfilePatch": {
"type": "object", "type": "object",
"properties": { "properties": {
"unused": { "unused": {
"type": "array", "type": "array",
"items": { "items": {
"$ref": "#/$defs/EncodableDependency" "$ref": "#/$defs/TomlLockfileDependency"
} }
} }
}, },

View File

@ -10,25 +10,25 @@ use crate::core::{GitReference, SourceKind};
/// The `Cargo.lock` structure. /// The `Cargo.lock` structure.
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
#[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))] #[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))]
pub struct EncodableResolve { pub struct TomlLockfile {
pub version: Option<u32>, pub version: Option<u32>,
pub package: Option<Vec<EncodableDependency>>, pub package: Option<Vec<TomlLockfileDependency>>,
/// `root` is optional to allow backward compatibility. /// `root` is optional to allow backward compatibility.
pub root: Option<EncodableDependency>, pub root: Option<TomlLockfileDependency>,
pub metadata: Option<Metadata>, pub metadata: Option<TomlLockfileMetadata>,
#[serde(default, skip_serializing_if = "Patch::is_empty")] #[serde(default, skip_serializing_if = "TomlLockfilePatch::is_empty")]
pub patch: Patch, pub patch: TomlLockfilePatch,
} }
#[derive(Serialize, Deserialize, Debug, Default)] #[derive(Serialize, Deserialize, Debug, Default)]
#[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))] #[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))]
pub struct Patch { pub struct TomlLockfilePatch {
pub unused: Vec<EncodableDependency>, pub unused: Vec<TomlLockfileDependency>,
} }
pub type Metadata = BTreeMap<String, String>; pub type TomlLockfileMetadata = BTreeMap<String, String>;
impl Patch { impl TomlLockfilePatch {
fn is_empty(&self) -> bool { fn is_empty(&self) -> bool {
self.unused.is_empty() self.unused.is_empty()
} }
@ -36,13 +36,13 @@ impl Patch {
#[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)] #[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)]
#[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))] #[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))]
pub struct EncodableDependency { pub struct TomlLockfileDependency {
pub name: String, pub name: String,
pub version: String, pub version: String,
pub source: Option<EncodableSourceId>, pub source: Option<TomlLockfileSourceId>,
pub checksum: Option<String>, pub checksum: Option<String>,
pub dependencies: Option<Vec<EncodablePackageId>>, pub dependencies: Option<Vec<TomlLockfilePackageId>>,
pub replace: Option<EncodablePackageId>, pub replace: Option<TomlLockfilePackageId>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -51,7 +51,7 @@ pub struct EncodableDependency {
derive(schemars::JsonSchema), derive(schemars::JsonSchema),
schemars(with = "String") schemars(with = "String")
)] )]
pub struct EncodableSourceId { pub struct TomlLockfileSourceId {
/// Full string of the source /// Full string of the source
source_str: String, source_str: String,
/// Used for sources ordering /// Used for sources ordering
@ -60,7 +60,7 @@ pub struct EncodableSourceId {
url: Url, url: Url,
} }
impl EncodableSourceId { impl TomlLockfileSourceId {
pub fn new(source: String) -> Result<Self, EncodableSourceIdError> { pub fn new(source: String) -> Result<Self, EncodableSourceIdError> {
let source_str = source.clone(); let source_str = source.clone();
let (kind, url) = source.split_once('+').ok_or_else(|| { let (kind, url) = source.split_once('+').ok_or_else(|| {
@ -109,7 +109,7 @@ impl EncodableSourceId {
} }
} }
impl ser::Serialize for EncodableSourceId { impl ser::Serialize for TomlLockfileSourceId {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where where
S: ser::Serializer, S: ser::Serializer,
@ -118,39 +118,39 @@ impl ser::Serialize for EncodableSourceId {
} }
} }
impl<'de> de::Deserialize<'de> for EncodableSourceId { impl<'de> de::Deserialize<'de> for TomlLockfileSourceId {
fn deserialize<D>(d: D) -> Result<Self, D::Error> fn deserialize<D>(d: D) -> Result<Self, D::Error>
where where
D: de::Deserializer<'de>, D: de::Deserializer<'de>,
{ {
let s = String::deserialize(d)?; let s = String::deserialize(d)?;
Ok(EncodableSourceId::new(s).map_err(de::Error::custom)?) Ok(TomlLockfileSourceId::new(s).map_err(de::Error::custom)?)
} }
} }
impl std::hash::Hash for EncodableSourceId { impl std::hash::Hash for TomlLockfileSourceId {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.kind.hash(state); self.kind.hash(state);
self.url.hash(state); self.url.hash(state);
} }
} }
impl std::cmp::PartialEq for EncodableSourceId { impl std::cmp::PartialEq for TomlLockfileSourceId {
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.kind == other.kind && self.url == other.url self.kind == other.kind && self.url == other.url
} }
} }
impl std::cmp::Eq for EncodableSourceId {} impl std::cmp::Eq for TomlLockfileSourceId {}
impl PartialOrd for EncodableSourceId { impl PartialOrd for TomlLockfileSourceId {
fn partial_cmp(&self, other: &EncodableSourceId) -> Option<Ordering> { fn partial_cmp(&self, other: &TomlLockfileSourceId) -> Option<Ordering> {
Some(self.cmp(other)) Some(self.cmp(other))
} }
} }
impl Ord for EncodableSourceId { impl Ord for TomlLockfileSourceId {
fn cmp(&self, other: &EncodableSourceId) -> Ordering { fn cmp(&self, other: &TomlLockfileSourceId) -> Ordering {
self.kind self.kind
.cmp(&other.kind) .cmp(&other.kind)
.then_with(|| self.url.cmp(&other.url)) .then_with(|| self.url.cmp(&other.url))
@ -159,13 +159,13 @@ impl Ord for EncodableSourceId {
#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)] #[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)]
#[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))] #[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))]
pub struct EncodablePackageId { pub struct TomlLockfilePackageId {
pub name: String, pub name: String,
pub version: Option<String>, pub version: Option<String>,
pub source: Option<EncodableSourceId>, pub source: Option<TomlLockfileSourceId>,
} }
impl fmt::Display for EncodablePackageId { impl fmt::Display for TomlLockfilePackageId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.name)?; write!(f, "{}", self.name)?;
if let Some(s) = &self.version { if let Some(s) = &self.version {
@ -178,17 +178,17 @@ impl fmt::Display for EncodablePackageId {
} }
} }
impl FromStr for EncodablePackageId { impl FromStr for TomlLockfilePackageId {
type Err = EncodablePackageIdError; type Err = EncodablePackageIdError;
fn from_str(s: &str) -> Result<EncodablePackageId, Self::Err> { fn from_str(s: &str) -> Result<TomlLockfilePackageId, Self::Err> {
let mut s = s.splitn(3, ' '); let mut s = s.splitn(3, ' ');
let name = s.next().unwrap(); let name = s.next().unwrap();
let version = s.next(); let version = s.next();
let source_id = match s.next() { let source_id = match s.next() {
Some(s) => { Some(s) => {
if let Some(s) = s.strip_prefix('(').and_then(|s| s.strip_suffix(')')) { if let Some(s) = s.strip_prefix('(').and_then(|s| s.strip_suffix(')')) {
Some(EncodableSourceId::new(s.to_string())?) Some(TomlLockfileSourceId::new(s.to_string())?)
} else { } else {
return Err(EncodablePackageIdErrorKind::InvalidSerializedPackageId.into()); return Err(EncodablePackageIdErrorKind::InvalidSerializedPackageId.into());
} }
@ -196,7 +196,7 @@ impl FromStr for EncodablePackageId {
None => None, None => None,
}; };
Ok(EncodablePackageId { Ok(TomlLockfilePackageId {
name: name.to_string(), name: name.to_string(),
version: version.map(|v| v.to_string()), version: version.map(|v| v.to_string()),
source: source_id, source: source_id,
@ -204,7 +204,7 @@ impl FromStr for EncodablePackageId {
} }
} }
impl ser::Serialize for EncodablePackageId { impl ser::Serialize for TomlLockfilePackageId {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where where
S: ser::Serializer, S: ser::Serializer,
@ -213,14 +213,14 @@ impl ser::Serialize for EncodablePackageId {
} }
} }
impl<'de> de::Deserialize<'de> for EncodablePackageId { impl<'de> de::Deserialize<'de> for TomlLockfilePackageId {
fn deserialize<D>(d: D) -> Result<EncodablePackageId, D::Error> fn deserialize<D>(d: D) -> Result<TomlLockfilePackageId, D::Error>
where where
D: de::Deserializer<'de>, D: de::Deserializer<'de>,
{ {
String::deserialize(d).and_then(|string| { String::deserialize(d).and_then(|string| {
string string
.parse::<EncodablePackageId>() .parse::<TomlLockfilePackageId>()
.map_err(de::Error::custom) .map_err(de::Error::custom)
}) })
} }
@ -266,7 +266,7 @@ enum EncodablePackageIdErrorKind {
#[cfg(feature = "unstable-schema")] #[cfg(feature = "unstable-schema")]
#[test] #[test]
fn dump_lockfile_schema() { fn dump_lockfile_schema() {
let schema = schemars::schema_for!(crate::lockfile::EncodableResolve); let schema = schemars::schema_for!(crate::lockfile::TomlLockfile);
let dump = serde_json::to_string_pretty(&schema).unwrap(); let dump = serde_json::to_string_pretty(&schema).unwrap();
snapbox::assert_data_eq!(dump, snapbox::file!("../lockfile.schema.json").raw()); snapbox::assert_data_eq!(dump, snapbox::file!("../lockfile.schema.json").raw());
} }

View File

@ -118,7 +118,8 @@ use crate::util::interning::InternedString;
use crate::util::{Graph, internal}; use crate::util::{Graph, internal};
use anyhow::{Context as _, bail}; use anyhow::{Context as _, bail};
use cargo_util_schemas::lockfile::{ use cargo_util_schemas::lockfile::{
EncodableDependency, EncodablePackageId, EncodableResolve, EncodableSourceId, Patch, TomlLockfile, TomlLockfileDependency, TomlLockfilePackageId, TomlLockfilePatch,
TomlLockfileSourceId,
}; };
use serde::ser; use serde::ser;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
@ -133,7 +134,7 @@ use tracing::debug;
/// primary uses is to be used with `resolve_with_previous` to guide the /// primary uses is to be used with `resolve_with_previous` to guide the
/// resolver to create a complete Resolve. /// resolver to create a complete Resolve.
pub fn into_resolve( pub fn into_resolve(
resolve: EncodableResolve, resolve: TomlLockfile,
original: &str, original: &str,
ws: &Workspace<'_>, ws: &Workspace<'_>,
) -> CargoResult<Resolve> { ) -> CargoResult<Resolve> {
@ -176,7 +177,7 @@ pub fn into_resolve(
let mut live_pkgs = HashMap::new(); let mut live_pkgs = HashMap::new();
let mut all_pkgs = HashSet::new(); let mut all_pkgs = HashSet::new();
for pkg in packages.iter() { for pkg in packages.iter() {
let enc_id = EncodablePackageId { let enc_id = TomlLockfilePackageId {
name: pkg.name.clone(), name: pkg.name.clone(),
version: Some(pkg.version.clone()), version: Some(pkg.version.clone()),
source: pkg.source.clone(), source: pkg.source.clone(),
@ -228,7 +229,7 @@ pub fn into_resolve(
.insert(id.source_id(), *id); .insert(id.source_id(), *id);
} }
let mut lookup_id = |enc_id: &EncodablePackageId| -> Option<PackageId> { let mut lookup_id = |enc_id: &TomlLockfilePackageId| -> Option<PackageId> {
// The name of this package should always be in the larger list of // The name of this package should always be in the larger list of
// all packages. // all packages.
let by_version = map.get(enc_id.name.as_str())?; let by_version = map.get(enc_id.name.as_str())?;
@ -329,7 +330,7 @@ pub fn into_resolve(
for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) {
to_remove.push(k.to_string()); to_remove.push(k.to_string());
let k = k.strip_prefix(prefix).unwrap(); let k = k.strip_prefix(prefix).unwrap();
let enc_id: EncodablePackageId = k let enc_id: TomlLockfilePackageId = k
.parse() .parse()
.with_context(|| internal("invalid encoding of checksum in lockfile"))?; .with_context(|| internal("invalid encoding of checksum in lockfile"))?;
let Some(id) = lookup_id(&enc_id) else { let Some(id) = lookup_id(&enc_id) else {
@ -405,7 +406,7 @@ pub fn into_resolve(
fn get_source_id<'a>( fn get_source_id<'a>(
path_deps: &'a HashMap<String, HashMap<semver::Version, SourceId>>, path_deps: &'a HashMap<String, HashMap<semver::Version, SourceId>>,
pkg: &'a EncodableDependency, pkg: &'a TomlLockfileDependency,
) -> Option<&'a SourceId> { ) -> Option<&'a SourceId> {
path_deps.iter().find_map(|(name, version_source)| { path_deps.iter().find_map(|(name, version_source)| {
if name != &pkg.name || version_source.len() == 0 { if name != &pkg.name || version_source.len() == 0 {
@ -535,11 +536,11 @@ impl ser::Serialize for Resolve {
Some(metadata) Some(metadata)
}; };
let patch = Patch { let patch = TomlLockfilePatch {
unused: self unused: self
.unused_patches() .unused_patches()
.iter() .iter()
.map(|id| EncodableDependency { .map(|id| TomlLockfileDependency {
name: id.name().to_string(), name: id.name().to_string(),
version: id.version().to_string(), version: id.version().to_string(),
source: encodable_source_id(id.source_id(), self.version()), source: encodable_source_id(id.source_id(), self.version()),
@ -553,7 +554,7 @@ impl ser::Serialize for Resolve {
}) })
.collect(), .collect(),
}; };
EncodableResolve { TomlLockfile {
package: Some(encodable), package: Some(encodable),
root: None, root: None,
metadata, metadata,
@ -597,7 +598,7 @@ fn encodable_resolve_node(
id: PackageId, id: PackageId,
resolve: &Resolve, resolve: &Resolve,
state: &EncodeState<'_>, state: &EncodeState<'_>,
) -> EncodableDependency { ) -> TomlLockfileDependency {
let (replace, deps) = match resolve.replacement(id) { let (replace, deps) = match resolve.replacement(id) {
Some(id) => ( Some(id) => (
Some(encodable_package_id(id, state, resolve.version())), Some(encodable_package_id(id, state, resolve.version())),
@ -613,7 +614,7 @@ fn encodable_resolve_node(
} }
}; };
EncodableDependency { TomlLockfileDependency {
name: id.name().to_string(), name: id.name().to_string(),
version: id.version().to_string(), version: id.version().to_string(),
source: encodable_source_id(id.source_id(), resolve.version()), source: encodable_source_id(id.source_id(), resolve.version()),
@ -631,7 +632,7 @@ pub fn encodable_package_id(
id: PackageId, id: PackageId,
state: &EncodeState<'_>, state: &EncodeState<'_>,
resolve_version: ResolveVersion, resolve_version: ResolveVersion,
) -> EncodablePackageId { ) -> TomlLockfilePackageId {
let mut version = Some(id.version().to_string()); let mut version = Some(id.version().to_string());
let mut id_to_encode = id.source_id(); let mut id_to_encode = id.source_id();
if resolve_version <= ResolveVersion::V2 { if resolve_version <= ResolveVersion::V2 {
@ -652,22 +653,22 @@ pub fn encodable_package_id(
} }
} }
} }
EncodablePackageId { TomlLockfilePackageId {
name: id.name().to_string(), name: id.name().to_string(),
version, version,
source, source,
} }
} }
fn encodable_source_id(id: SourceId, version: ResolveVersion) -> Option<EncodableSourceId> { fn encodable_source_id(id: SourceId, version: ResolveVersion) -> Option<TomlLockfileSourceId> {
if id.is_path() { if id.is_path() {
None None
} else { } else {
Some( Some(
if version >= ResolveVersion::V4 { if version >= ResolveVersion::V4 {
EncodableSourceId::new(id.as_encoded_url().to_string()) TomlLockfileSourceId::new(id.as_encoded_url().to_string())
} else { } else {
EncodableSourceId::new(id.as_url().to_string()) TomlLockfileSourceId::new(id.as_url().to_string())
} }
.expect("source ID should have valid URLs"), .expect("source ID should have valid URLs"),
) )

View File

@ -6,7 +6,7 @@ use crate::core::{Dependency, PackageId, PackageIdSpec, PackageIdSpecQuery, Summ
use crate::util::Graph; use crate::util::Graph;
use crate::util::errors::CargoResult; use crate::util::errors::CargoResult;
use crate::util::interning::InternedString; use crate::util::interning::InternedString;
use cargo_util_schemas::lockfile::Metadata; use cargo_util_schemas::lockfile::TomlLockfileMetadata;
use std::borrow::Borrow; use std::borrow::Borrow;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::fmt; use std::fmt;
@ -34,7 +34,7 @@ pub struct Resolve {
/// "Unknown" metadata. This is a collection of extra, unrecognized data /// "Unknown" metadata. This is a collection of extra, unrecognized data
/// found in the `[metadata]` section of `Cargo.lock`, preserved for /// found in the `[metadata]` section of `Cargo.lock`, preserved for
/// forwards compatibility. /// forwards compatibility.
metadata: Metadata, metadata: TomlLockfileMetadata,
/// `[patch]` entries that did not match anything, preserved in /// `[patch]` entries that did not match anything, preserved in
/// `Cargo.lock` as the `[[patch.unused]]` table array. Tracking unused /// `Cargo.lock` as the `[[patch.unused]]` table array. Tracking unused
/// patches helps prevent Cargo from being forced to re-update the /// patches helps prevent Cargo from being forced to re-update the
@ -156,7 +156,7 @@ impl Resolve {
replacements: HashMap<PackageId, PackageId>, replacements: HashMap<PackageId, PackageId>,
features: HashMap<PackageId, Vec<InternedString>>, features: HashMap<PackageId, Vec<InternedString>>,
checksums: HashMap<PackageId, Option<String>>, checksums: HashMap<PackageId, Option<String>>,
metadata: Metadata, metadata: TomlLockfileMetadata,
unused_patches: Vec<PackageId>, unused_patches: Vec<PackageId>,
version: ResolveVersion, version: ResolveVersion,
summaries: HashMap<PackageId, Summary>, summaries: HashMap<PackageId, Summary>,
@ -394,7 +394,7 @@ unable to verify that `{0}` is the same as when the lockfile was generated
self.checksums.insert(pkg_id, Some(checksum)); self.checksums.insert(pkg_id, Some(checksum));
} }
pub fn metadata(&self) -> &Metadata { pub fn metadata(&self) -> &TomlLockfileMetadata {
&self.metadata &self.metadata
} }

View File

@ -6,7 +6,7 @@ use crate::util::Filesystem;
use crate::util::errors::CargoResult; use crate::util::errors::CargoResult;
use anyhow::Context as _; use anyhow::Context as _;
use cargo_util_schemas::lockfile::EncodableResolve; use cargo_util_schemas::lockfile::TomlLockfile;
pub const LOCKFILE_NAME: &str = "Cargo.lock"; pub const LOCKFILE_NAME: &str = "Cargo.lock";
@ -24,7 +24,7 @@ pub fn load_pkg_lockfile(ws: &Workspace<'_>) -> CargoResult<Option<Resolve>> {
.with_context(|| format!("failed to read file: {}", f.path().display()))?; .with_context(|| format!("failed to read file: {}", f.path().display()))?;
let resolve = (|| -> CargoResult<Option<Resolve>> { let resolve = (|| -> CargoResult<Option<Resolve>> {
let v: EncodableResolve = toml::from_str(&s)?; let v: TomlLockfile = toml::from_str(&s)?;
Ok(Some(into_resolve(v, &s, ws)?)) Ok(Some(into_resolve(v, &s, ws)?))
})() })()
.with_context(|| format!("failed to parse lock file at: {}", f.path().display()))?; .with_context(|| format!("failed to parse lock file at: {}", f.path().display()))?;
@ -208,8 +208,8 @@ fn are_equal_lockfiles(orig: &str, current: &str, ws: &Workspace<'_>) -> bool {
// common case where we can update lock files. // common case where we can update lock files.
if !ws.gctx().lock_update_allowed() { if !ws.gctx().lock_update_allowed() {
let res: CargoResult<bool> = (|| { let res: CargoResult<bool> = (|| {
let old: EncodableResolve = toml::from_str(orig)?; let old: TomlLockfile = toml::from_str(orig)?;
let new: EncodableResolve = toml::from_str(current)?; let new: TomlLockfile = toml::from_str(current)?;
Ok(into_resolve(old, orig, ws)? == into_resolve(new, current, ws)?) Ok(into_resolve(old, orig, ws)? == into_resolve(new, current, ws)?)
})(); })();
if let Ok(true) = res { if let Ok(true) = res {