mirror of
https://github.com/rust-lang/cargo.git
synced 2025-09-25 11:14:46 +00:00
feat(cargo-util-schemas): Move lockfile schemas (#15980)
### What does this PR try to resolve? As part of cargo plumbing commands, we're trying to make lockfiles more accessible to third-party uses. This change moves the lockfile schemas to `cargo-util-schemas`, where it is previously under `cargo` and are relatively hidden. See also: https://github.com/crate-ci/cargo-plumbing/issues/82 ### How to test and review this PR? Commit by commit to see the changes made. My main concern is performance as the implementation repeatedly calls `SourceId::from_url` and I'm not sure if its negligible. r? @epage
This commit is contained in:
commit
24ef070a95
6
Cargo.lock
generated
6
Cargo.lock
generated
@ -334,7 +334,7 @@ dependencies = [
|
||||
"cargo-platform 0.3.1",
|
||||
"cargo-test-support",
|
||||
"cargo-util",
|
||||
"cargo-util-schemas 0.10.1",
|
||||
"cargo-util-schemas 0.10.2",
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"color-print",
|
||||
@ -537,7 +537,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cargo-util-schemas"
|
||||
version = "0.10.1"
|
||||
version = "0.10.2"
|
||||
dependencies = [
|
||||
"schemars",
|
||||
"semver",
|
||||
@ -3712,7 +3712,7 @@ dependencies = [
|
||||
"cargo",
|
||||
"cargo-platform 0.3.1",
|
||||
"cargo-util",
|
||||
"cargo-util-schemas 0.10.1",
|
||||
"cargo-util-schemas 0.10.2",
|
||||
"proptest",
|
||||
"varisat",
|
||||
]
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "cargo-util-schemas"
|
||||
version = "0.10.1"
|
||||
version = "0.10.2"
|
||||
rust-version = "1.89" # MSRV:1
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
133
crates/cargo-util-schemas/lockfile.schema.json
Normal file
133
crates/cargo-util-schemas/lockfile.schema.json
Normal file
@ -0,0 +1,133 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "TomlLockfile",
|
||||
"description": "The `Cargo.lock` structure.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"version": {
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
],
|
||||
"format": "uint32",
|
||||
"minimum": 0
|
||||
},
|
||||
"package": {
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/$defs/TomlLockfileDependency"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"description": "`root` is optional to allow backward compatibility.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/$defs/TomlLockfileDependency"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"metadata": {
|
||||
"type": [
|
||||
"object",
|
||||
"null"
|
||||
],
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"patch": {
|
||||
"$ref": "#/$defs/TomlLockfilePatch"
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"TomlLockfileDependency": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
},
|
||||
"source": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"checksum": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/$defs/TomlLockfilePackageId"
|
||||
}
|
||||
},
|
||||
"replace": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/$defs/TomlLockfilePackageId"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"version"
|
||||
]
|
||||
},
|
||||
"TomlLockfilePackageId": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"version": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"source": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name"
|
||||
]
|
||||
},
|
||||
"TomlLockfilePatch": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"unused": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/$defs/TomlLockfileDependency"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"unused"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
@ -10,6 +10,7 @@
|
||||
|
||||
pub mod core;
|
||||
pub mod index;
|
||||
pub mod lockfile;
|
||||
pub mod manifest;
|
||||
pub mod messages;
|
||||
#[cfg(feature = "unstable-schema")]
|
||||
|
272
crates/cargo-util-schemas/src/lockfile.rs
Normal file
272
crates/cargo-util-schemas/src/lockfile.rs
Normal file
@ -0,0 +1,272 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use std::{cmp::Ordering, str::FromStr};
|
||||
|
||||
use serde::{Deserialize, Serialize, de, ser};
|
||||
use url::Url;
|
||||
|
||||
use crate::core::{GitReference, SourceKind};
|
||||
|
||||
/// The `Cargo.lock` structure.
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))]
|
||||
pub struct TomlLockfile {
|
||||
pub version: Option<u32>,
|
||||
pub package: Option<Vec<TomlLockfileDependency>>,
|
||||
/// `root` is optional to allow backward compatibility.
|
||||
pub root: Option<TomlLockfileDependency>,
|
||||
pub metadata: Option<TomlLockfileMetadata>,
|
||||
#[serde(default, skip_serializing_if = "TomlLockfilePatch::is_empty")]
|
||||
pub patch: TomlLockfilePatch,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||
#[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))]
|
||||
pub struct TomlLockfilePatch {
|
||||
pub unused: Vec<TomlLockfileDependency>,
|
||||
}
|
||||
|
||||
pub type TomlLockfileMetadata = BTreeMap<String, String>;
|
||||
|
||||
impl TomlLockfilePatch {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.unused.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))]
|
||||
pub struct TomlLockfileDependency {
|
||||
pub name: String,
|
||||
pub version: String,
|
||||
pub source: Option<TomlLockfileSourceId>,
|
||||
pub checksum: Option<String>,
|
||||
pub dependencies: Option<Vec<TomlLockfilePackageId>>,
|
||||
pub replace: Option<TomlLockfilePackageId>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[cfg_attr(
|
||||
feature = "unstable-schema",
|
||||
derive(schemars::JsonSchema),
|
||||
schemars(with = "String")
|
||||
)]
|
||||
pub struct TomlLockfileSourceId {
|
||||
/// Full string of the source
|
||||
source_str: String,
|
||||
/// Used for sources ordering
|
||||
kind: SourceKind,
|
||||
/// Used for sources ordering
|
||||
url: Url,
|
||||
}
|
||||
|
||||
impl TomlLockfileSourceId {
|
||||
pub fn new(source: String) -> Result<Self, EncodableSourceIdError> {
|
||||
let source_str = source.clone();
|
||||
let (kind, url) = source.split_once('+').ok_or_else(|| {
|
||||
EncodableSourceIdError(EncodableSourceIdErrorKind::InvalidSource(source.clone()).into())
|
||||
})?;
|
||||
|
||||
let url = Url::parse(url).map_err(|msg| EncodableSourceIdErrorKind::InvalidUrl {
|
||||
url: url.to_string(),
|
||||
msg: msg.to_string(),
|
||||
})?;
|
||||
|
||||
let kind = match kind {
|
||||
"git" => {
|
||||
let reference = GitReference::from_query(url.query_pairs());
|
||||
SourceKind::Git(reference)
|
||||
}
|
||||
"registry" => SourceKind::Registry,
|
||||
"sparse" => SourceKind::SparseRegistry,
|
||||
"path" => SourceKind::Path,
|
||||
kind => {
|
||||
return Err(EncodableSourceIdErrorKind::UnsupportedSource(kind.to_string()).into());
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
source_str,
|
||||
kind,
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> &SourceKind {
|
||||
&self.kind
|
||||
}
|
||||
|
||||
pub fn url(&self) -> &Url {
|
||||
&self.url
|
||||
}
|
||||
|
||||
pub fn source_str(&self) -> &String {
|
||||
&self.source_str
|
||||
}
|
||||
|
||||
pub fn as_url(&self) -> impl fmt::Display + '_ {
|
||||
self.source_str.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl ser::Serialize for TomlLockfileSourceId {
|
||||
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: ser::Serializer,
|
||||
{
|
||||
s.collect_str(&self.as_url())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> de::Deserialize<'de> for TomlLockfileSourceId {
|
||||
fn deserialize<D>(d: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: de::Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(d)?;
|
||||
Ok(TomlLockfileSourceId::new(s).map_err(de::Error::custom)?)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for TomlLockfileSourceId {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.kind.hash(state);
|
||||
self.url.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl std::cmp::PartialEq for TomlLockfileSourceId {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.kind == other.kind && self.url == other.url
|
||||
}
|
||||
}
|
||||
|
||||
impl std::cmp::Eq for TomlLockfileSourceId {}
|
||||
|
||||
impl PartialOrd for TomlLockfileSourceId {
|
||||
fn partial_cmp(&self, other: &TomlLockfileSourceId) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for TomlLockfileSourceId {
|
||||
fn cmp(&self, other: &TomlLockfileSourceId) -> Ordering {
|
||||
self.kind
|
||||
.cmp(&other.kind)
|
||||
.then_with(|| self.url.cmp(&other.url))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)]
|
||||
#[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))]
|
||||
pub struct TomlLockfilePackageId {
|
||||
pub name: String,
|
||||
pub version: Option<String>,
|
||||
pub source: Option<TomlLockfileSourceId>,
|
||||
}
|
||||
|
||||
impl fmt::Display for TomlLockfilePackageId {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.name)?;
|
||||
if let Some(s) = &self.version {
|
||||
write!(f, " {}", s)?;
|
||||
}
|
||||
if let Some(s) = &self.source {
|
||||
write!(f, " ({})", s.as_url())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for TomlLockfilePackageId {
|
||||
type Err = EncodablePackageIdError;
|
||||
|
||||
fn from_str(s: &str) -> Result<TomlLockfilePackageId, Self::Err> {
|
||||
let mut s = s.splitn(3, ' ');
|
||||
let name = s.next().unwrap();
|
||||
let version = s.next();
|
||||
let source_id = match s.next() {
|
||||
Some(s) => {
|
||||
if let Some(s) = s.strip_prefix('(').and_then(|s| s.strip_suffix(')')) {
|
||||
Some(TomlLockfileSourceId::new(s.to_string())?)
|
||||
} else {
|
||||
return Err(EncodablePackageIdErrorKind::InvalidSerializedPackageId.into());
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok(TomlLockfilePackageId {
|
||||
name: name.to_string(),
|
||||
version: version.map(|v| v.to_string()),
|
||||
source: source_id,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ser::Serialize for TomlLockfilePackageId {
|
||||
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: ser::Serializer,
|
||||
{
|
||||
s.collect_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> de::Deserialize<'de> for TomlLockfilePackageId {
|
||||
fn deserialize<D>(d: D) -> Result<TomlLockfilePackageId, D::Error>
|
||||
where
|
||||
D: de::Deserializer<'de>,
|
||||
{
|
||||
String::deserialize(d).and_then(|string| {
|
||||
string
|
||||
.parse::<TomlLockfilePackageId>()
|
||||
.map_err(de::Error::custom)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error(transparent)]
|
||||
pub struct EncodableSourceIdError(#[from] EncodableSourceIdErrorKind);
|
||||
|
||||
#[non_exhaustive]
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
enum EncodableSourceIdErrorKind {
|
||||
#[error("invalid source `{0}`")]
|
||||
InvalidSource(String),
|
||||
|
||||
#[error("invalid url `{url}`: {msg}")]
|
||||
InvalidUrl { url: String, msg: String },
|
||||
|
||||
#[error("unsupported source protocol: {0}")]
|
||||
UnsupportedSource(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error(transparent)]
|
||||
pub struct EncodablePackageIdError(#[from] EncodablePackageIdErrorKind);
|
||||
|
||||
impl From<EncodableSourceIdError> for EncodablePackageIdError {
|
||||
fn from(value: EncodableSourceIdError) -> Self {
|
||||
EncodablePackageIdErrorKind::Source(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
#[non_exhaustive]
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
enum EncodablePackageIdErrorKind {
|
||||
#[error("invalid serialied PackageId")]
|
||||
InvalidSerializedPackageId,
|
||||
|
||||
#[error(transparent)]
|
||||
Source(#[from] EncodableSourceIdError),
|
||||
}
|
||||
|
||||
#[cfg(feature = "unstable-schema")]
|
||||
#[test]
|
||||
fn dump_lockfile_schema() {
|
||||
let schema = schemars::schema_for!(crate::lockfile::TomlLockfile);
|
||||
let dump = serde_json::to_string_pretty(&schema).unwrap();
|
||||
snapbox::assert_data_eq!(dump, snapbox::file!("../lockfile.schema.json").raw());
|
||||
}
|
@ -117,324 +117,313 @@ use crate::util::errors::CargoResult;
|
||||
use crate::util::interning::InternedString;
|
||||
use crate::util::{Graph, internal};
|
||||
use anyhow::{Context as _, bail};
|
||||
use serde::de;
|
||||
use cargo_util_schemas::lockfile::{
|
||||
TomlLockfile, TomlLockfileDependency, TomlLockfilePackageId, TomlLockfilePatch,
|
||||
TomlLockfileSourceId,
|
||||
};
|
||||
use serde::ser;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use tracing::debug;
|
||||
|
||||
/// The `Cargo.lock` structure.
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct EncodableResolve {
|
||||
version: Option<u32>,
|
||||
package: Option<Vec<EncodableDependency>>,
|
||||
/// `root` is optional to allow backward compatibility.
|
||||
root: Option<EncodableDependency>,
|
||||
metadata: Option<Metadata>,
|
||||
#[serde(default, skip_serializing_if = "Patch::is_empty")]
|
||||
patch: Patch,
|
||||
}
|
||||
/// Convert a `Cargo.lock` to a Resolve.
|
||||
///
|
||||
/// Note that this `Resolve` is not "complete". For example, the
|
||||
/// dependencies do not know the difference between regular/dev/build
|
||||
/// dependencies, so they are not filled in. It also does not include
|
||||
/// `features`. Care should be taken when using this Resolve. One of the
|
||||
/// primary uses is to be used with `resolve_with_previous` to guide the
|
||||
/// resolver to create a complete Resolve.
|
||||
pub fn into_resolve(
|
||||
resolve: TomlLockfile,
|
||||
original: &str,
|
||||
ws: &Workspace<'_>,
|
||||
) -> CargoResult<Resolve> {
|
||||
let path_deps: HashMap<String, HashMap<semver::Version, SourceId>> = build_path_deps(ws)?;
|
||||
let mut checksums = HashMap::new();
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||
struct Patch {
|
||||
unused: Vec<EncodableDependency>,
|
||||
}
|
||||
|
||||
pub type Metadata = BTreeMap<String, String>;
|
||||
|
||||
impl EncodableResolve {
|
||||
/// Convert a `Cargo.lock` to a Resolve.
|
||||
///
|
||||
/// Note that this `Resolve` is not "complete". For example, the
|
||||
/// dependencies do not know the difference between regular/dev/build
|
||||
/// dependencies, so they are not filled in. It also does not include
|
||||
/// `features`. Care should be taken when using this Resolve. One of the
|
||||
/// primary uses is to be used with `resolve_with_previous` to guide the
|
||||
/// resolver to create a complete Resolve.
|
||||
pub fn into_resolve(self, original: &str, ws: &Workspace<'_>) -> CargoResult<Resolve> {
|
||||
let path_deps: HashMap<String, HashMap<semver::Version, SourceId>> = build_path_deps(ws)?;
|
||||
let mut checksums = HashMap::new();
|
||||
|
||||
let mut version = match self.version {
|
||||
Some(n @ 5) if ws.gctx().nightly_features_allowed => {
|
||||
if ws.gctx().cli_unstable().next_lockfile_bump {
|
||||
ResolveVersion::V5
|
||||
} else {
|
||||
anyhow::bail!("lock file version `{n}` requires `-Znext-lockfile-bump`");
|
||||
}
|
||||
}
|
||||
Some(4) => ResolveVersion::V4,
|
||||
Some(3) => ResolveVersion::V3,
|
||||
Some(n) => bail!(
|
||||
"lock file version `{}` was found, but this version of Cargo \
|
||||
does not understand this lock file, perhaps Cargo needs \
|
||||
to be updated?",
|
||||
n,
|
||||
),
|
||||
// Historically Cargo did not have a version indicator in lock
|
||||
// files, so this could either be the V1 or V2 encoding. We assume
|
||||
// an older format is being parsed until we see so otherwise.
|
||||
None => ResolveVersion::V1,
|
||||
};
|
||||
|
||||
let packages = {
|
||||
let mut packages = self.package.unwrap_or_default();
|
||||
if let Some(root) = self.root {
|
||||
packages.insert(0, root);
|
||||
}
|
||||
packages
|
||||
};
|
||||
|
||||
// `PackageId`s in the lock file don't include the `source` part
|
||||
// for workspace members, so we reconstruct proper IDs.
|
||||
let live_pkgs = {
|
||||
let mut live_pkgs = HashMap::new();
|
||||
let mut all_pkgs = HashSet::new();
|
||||
for pkg in packages.iter() {
|
||||
let enc_id = EncodablePackageId {
|
||||
name: pkg.name.clone(),
|
||||
version: Some(pkg.version.clone()),
|
||||
source: pkg.source.clone(),
|
||||
};
|
||||
|
||||
if !all_pkgs.insert(enc_id.clone()) {
|
||||
anyhow::bail!("package `{}` is specified twice in the lockfile", pkg.name);
|
||||
}
|
||||
let id = match pkg
|
||||
.source
|
||||
.as_deref()
|
||||
.or_else(|| get_source_id(&path_deps, pkg))
|
||||
{
|
||||
// We failed to find a local package in the workspace.
|
||||
// It must have been removed and should be ignored.
|
||||
None => {
|
||||
debug!("path dependency now missing {} v{}", pkg.name, pkg.version);
|
||||
continue;
|
||||
}
|
||||
Some(&source) => PackageId::try_new(&pkg.name, &pkg.version, source)?,
|
||||
};
|
||||
|
||||
// If a package has a checksum listed directly on it then record
|
||||
// that here, and we also bump our version up to 2 since V1
|
||||
// didn't ever encode this field.
|
||||
if let Some(cksum) = &pkg.checksum {
|
||||
version = version.max(ResolveVersion::V2);
|
||||
checksums.insert(id, Some(cksum.clone()));
|
||||
}
|
||||
|
||||
assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none())
|
||||
}
|
||||
live_pkgs
|
||||
};
|
||||
|
||||
// When decoding a V2 version the edges in `dependencies` aren't
|
||||
// guaranteed to have either version or source information. This `map`
|
||||
// is used to find package ids even if dependencies have missing
|
||||
// information. This map is from name to version to source to actual
|
||||
// package ID. (various levels to drill down step by step)
|
||||
let mut map = HashMap::new();
|
||||
for (id, _) in live_pkgs.values() {
|
||||
map.entry(id.name().as_str())
|
||||
.or_insert_with(HashMap::new)
|
||||
.entry(id.version().to_string())
|
||||
.or_insert_with(HashMap::new)
|
||||
.insert(id.source_id(), *id);
|
||||
}
|
||||
|
||||
let mut lookup_id = |enc_id: &EncodablePackageId| -> Option<PackageId> {
|
||||
// The name of this package should always be in the larger list of
|
||||
// all packages.
|
||||
let by_version = map.get(enc_id.name.as_str())?;
|
||||
|
||||
// If the version is provided, look that up. Otherwise if the
|
||||
// version isn't provided this is a V2 manifest and we should only
|
||||
// have one version for this name. If we have more than one version
|
||||
// for the name then it's ambiguous which one we'd use. That
|
||||
// shouldn't ever actually happen but in theory bad git merges could
|
||||
// produce invalid lock files, so silently ignore these cases.
|
||||
let by_source = match &enc_id.version {
|
||||
Some(version) => by_version.get(version)?,
|
||||
None => {
|
||||
version = version.max(ResolveVersion::V2);
|
||||
if by_version.len() == 1 {
|
||||
by_version.values().next().unwrap()
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// This is basically the same as above. Note though that `source` is
|
||||
// always missing for path dependencies regardless of serialization
|
||||
// format. That means we have to handle the `None` case a bit more
|
||||
// carefully.
|
||||
match &enc_id.source {
|
||||
Some(source) => by_source.get(source).cloned(),
|
||||
None => {
|
||||
// Look through all possible packages ids for this
|
||||
// name/version. If there's only one `path` dependency then
|
||||
// we are hardcoded to use that since `path` dependencies
|
||||
// can't have a source listed.
|
||||
let mut path_packages = by_source.values().filter(|p| p.source_id().is_path());
|
||||
if let Some(path) = path_packages.next() {
|
||||
if path_packages.next().is_some() {
|
||||
return None;
|
||||
}
|
||||
Some(*path)
|
||||
|
||||
// ... otherwise if there's only one then we must be
|
||||
// implicitly using that one due to a V2 serialization of
|
||||
// the lock file
|
||||
} else if by_source.len() == 1 {
|
||||
let id = by_source.values().next().unwrap();
|
||||
version = version.max(ResolveVersion::V2);
|
||||
Some(*id)
|
||||
|
||||
// ... and failing that we probably had a bad git merge of
|
||||
// `Cargo.lock` or something like that, so just ignore this.
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let mut g = Graph::new();
|
||||
|
||||
for (id, _) in live_pkgs.values() {
|
||||
g.add(*id);
|
||||
}
|
||||
|
||||
for &(ref id, pkg) in live_pkgs.values() {
|
||||
let Some(ref deps) = pkg.dependencies else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for edge in deps.iter() {
|
||||
if let Some(to_depend_on) = lookup_id(edge) {
|
||||
g.link(*id, to_depend_on);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let replacements = {
|
||||
let mut replacements = HashMap::new();
|
||||
for &(ref id, pkg) in live_pkgs.values() {
|
||||
if let Some(ref replace) = pkg.replace {
|
||||
assert!(pkg.dependencies.is_none());
|
||||
if let Some(replace_id) = lookup_id(replace) {
|
||||
replacements.insert(*id, replace_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
replacements
|
||||
};
|
||||
|
||||
let mut metadata = self.metadata.unwrap_or_default();
|
||||
|
||||
// In the V1 serialization formats all checksums were listed in the lock
|
||||
// file in the `[metadata]` section, so if we're still V1 then look for
|
||||
// that here.
|
||||
let prefix = "checksum ";
|
||||
let mut to_remove = Vec::new();
|
||||
for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) {
|
||||
to_remove.push(k.to_string());
|
||||
let k = k.strip_prefix(prefix).unwrap();
|
||||
let enc_id: EncodablePackageId = k
|
||||
.parse()
|
||||
.with_context(|| internal("invalid encoding of checksum in lockfile"))?;
|
||||
let Some(id) = lookup_id(&enc_id) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let v = if v == "<none>" {
|
||||
None
|
||||
let mut version = match resolve.version {
|
||||
Some(n @ 5) if ws.gctx().nightly_features_allowed => {
|
||||
if ws.gctx().cli_unstable().next_lockfile_bump {
|
||||
ResolveVersion::V5
|
||||
} else {
|
||||
Some(v.to_string())
|
||||
};
|
||||
checksums.insert(id, v);
|
||||
}
|
||||
// If `checksum` was listed in `[metadata]` but we were previously
|
||||
// listed as `V2` then assume some sort of bad git merge happened, so
|
||||
// discard all checksums and let's regenerate them later.
|
||||
if !to_remove.is_empty() && version >= ResolveVersion::V2 {
|
||||
checksums.drain();
|
||||
}
|
||||
for k in to_remove {
|
||||
metadata.remove(&k);
|
||||
anyhow::bail!("lock file version `{n}` requires `-Znext-lockfile-bump`");
|
||||
}
|
||||
}
|
||||
Some(4) => ResolveVersion::V4,
|
||||
Some(3) => ResolveVersion::V3,
|
||||
Some(n) => bail!(
|
||||
"lock file version `{}` was found, but this version of Cargo \
|
||||
does not understand this lock file, perhaps Cargo needs \
|
||||
to be updated?",
|
||||
n,
|
||||
),
|
||||
// Historically Cargo did not have a version indicator in lock
|
||||
// files, so this could either be the V1 or V2 encoding. We assume
|
||||
// an older format is being parsed until we see so otherwise.
|
||||
None => ResolveVersion::V1,
|
||||
};
|
||||
|
||||
let mut unused_patches = Vec::new();
|
||||
for pkg in self.patch.unused {
|
||||
let packages = {
|
||||
let mut packages = resolve.package.unwrap_or_default();
|
||||
if let Some(root) = resolve.root {
|
||||
packages.insert(0, root);
|
||||
}
|
||||
packages
|
||||
};
|
||||
|
||||
// `PackageId`s in the lock file don't include the `source` part
|
||||
// for workspace members, so we reconstruct proper IDs.
|
||||
let live_pkgs = {
|
||||
let mut live_pkgs = HashMap::new();
|
||||
let mut all_pkgs = HashSet::new();
|
||||
for pkg in packages.iter() {
|
||||
let enc_id = TomlLockfilePackageId {
|
||||
name: pkg.name.clone(),
|
||||
version: Some(pkg.version.clone()),
|
||||
source: pkg.source.clone(),
|
||||
};
|
||||
|
||||
if !all_pkgs.insert(enc_id.clone()) {
|
||||
anyhow::bail!("package `{}` is specified twice in the lockfile", pkg.name);
|
||||
}
|
||||
let id = match pkg
|
||||
.source
|
||||
.as_deref()
|
||||
.or_else(|| get_source_id(&path_deps, &pkg))
|
||||
.as_ref()
|
||||
.map(|source| SourceId::from_url(&source.source_str()))
|
||||
.transpose()?
|
||||
.or_else(|| get_source_id(&path_deps, &pkg).copied())
|
||||
{
|
||||
Some(&src) => PackageId::try_new(&pkg.name, &pkg.version, src)?,
|
||||
None => continue,
|
||||
// We failed to find a local package in the workspace.
|
||||
// It must have been removed and should be ignored.
|
||||
None => {
|
||||
debug!("path dependency now missing {} v{}", pkg.name, pkg.version);
|
||||
continue;
|
||||
}
|
||||
Some(source) => PackageId::try_new(&pkg.name, &pkg.version, source)?,
|
||||
};
|
||||
unused_patches.push(id);
|
||||
|
||||
// If a package has a checksum listed directly on it then record
|
||||
// that here, and we also bump our version up to 2 since V1
|
||||
// didn't ever encode this field.
|
||||
if let Some(cksum) = &pkg.checksum {
|
||||
version = version.max(ResolveVersion::V2);
|
||||
checksums.insert(id, Some(cksum.clone()));
|
||||
}
|
||||
|
||||
assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none())
|
||||
}
|
||||
live_pkgs
|
||||
};
|
||||
|
||||
// We have a curious issue where in the "v1 format" we buggily had a
|
||||
// trailing blank line at the end of lock files under some specific
|
||||
// conditions.
|
||||
//
|
||||
// Cargo is trying to write new lockfies in the "v2 format" but if you
|
||||
// have no dependencies, for example, then the lockfile encoded won't
|
||||
// really have any indicator that it's in the new format (no
|
||||
// dependencies or checksums listed). This means that if you type `cargo
|
||||
// new` followed by `cargo build` it will generate a "v2 format" lock
|
||||
// file since none previously existed. When reading this on the next
|
||||
// `cargo build`, however, it generates a new lock file because when
|
||||
// reading in that lockfile we think it's the v1 format.
|
||||
//
|
||||
// To help fix this issue we special case here. If our lockfile only has
|
||||
// one trailing newline, not two, *and* it only has one package, then
|
||||
// this is actually the v2 format.
|
||||
if original.ends_with('\n')
|
||||
&& !original.ends_with("\n\n")
|
||||
&& version == ResolveVersion::V1
|
||||
&& g.iter().count() == 1
|
||||
{
|
||||
version = ResolveVersion::V2;
|
||||
}
|
||||
// When decoding a V2 version the edges in `dependencies` aren't
|
||||
// guaranteed to have either version or source information. This `map`
|
||||
// is used to find package ids even if dependencies have missing
|
||||
// information. This map is from name to version to source to actual
|
||||
// package ID. (various levels to drill down step by step)
|
||||
let mut map = HashMap::new();
|
||||
for (id, _) in live_pkgs.values() {
|
||||
map.entry(id.name().as_str())
|
||||
.or_insert_with(HashMap::new)
|
||||
.entry(id.version().to_string())
|
||||
.or_insert_with(HashMap::new)
|
||||
.insert(id.source_id(), *id);
|
||||
}
|
||||
|
||||
return Ok(Resolve::new(
|
||||
g,
|
||||
replacements,
|
||||
HashMap::new(),
|
||||
checksums,
|
||||
metadata,
|
||||
unused_patches,
|
||||
version,
|
||||
HashMap::new(),
|
||||
));
|
||||
let mut lookup_id = |enc_id: &TomlLockfilePackageId| -> Option<PackageId> {
|
||||
// The name of this package should always be in the larger list of
|
||||
// all packages.
|
||||
let by_version = map.get(enc_id.name.as_str())?;
|
||||
|
||||
fn get_source_id<'a>(
|
||||
path_deps: &'a HashMap<String, HashMap<semver::Version, SourceId>>,
|
||||
pkg: &'a EncodableDependency,
|
||||
) -> Option<&'a SourceId> {
|
||||
path_deps.iter().find_map(|(name, version_source)| {
|
||||
if name != &pkg.name || version_source.len() == 0 {
|
||||
// If the version is provided, look that up. Otherwise if the
|
||||
// version isn't provided this is a V2 manifest and we should only
|
||||
// have one version for this name. If we have more than one version
|
||||
// for the name then it's ambiguous which one we'd use. That
|
||||
// shouldn't ever actually happen but in theory bad git merges could
|
||||
// produce invalid lock files, so silently ignore these cases.
|
||||
let by_source = match &enc_id.version {
|
||||
Some(version) => by_version.get(version)?,
|
||||
None => {
|
||||
version = version.max(ResolveVersion::V2);
|
||||
if by_version.len() == 1 {
|
||||
by_version.values().next().unwrap()
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
if version_source.len() == 1 {
|
||||
return Some(version_source.values().next().unwrap());
|
||||
}
|
||||
// If there are multiple candidates for the same name, it needs to be determined by combining versions (See #13405).
|
||||
if let Ok(pkg_version) = pkg.version.parse::<semver::Version>() {
|
||||
if let Some(source_id) = version_source.get(&pkg_version) {
|
||||
return Some(source_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
None
|
||||
})
|
||||
// This is basically the same as above. Note though that `source` is
|
||||
// always missing for path dependencies regardless of serialization
|
||||
// format. That means we have to handle the `None` case a bit more
|
||||
// carefully.
|
||||
match &enc_id.source {
|
||||
Some(source) => by_source
|
||||
.get(&SourceId::from_url(&source.source_str()).unwrap())
|
||||
.cloned(),
|
||||
None => {
|
||||
// Look through all possible packages ids for this
|
||||
// name/version. If there's only one `path` dependency then
|
||||
// we are hardcoded to use that since `path` dependencies
|
||||
// can't have a source listed.
|
||||
let mut path_packages = by_source.values().filter(|p| p.source_id().is_path());
|
||||
if let Some(path) = path_packages.next() {
|
||||
if path_packages.next().is_some() {
|
||||
return None;
|
||||
}
|
||||
Some(*path)
|
||||
|
||||
// ... otherwise if there's only one then we must be
|
||||
// implicitly using that one due to a V2 serialization of
|
||||
// the lock file
|
||||
} else if by_source.len() == 1 {
|
||||
let id = by_source.values().next().unwrap();
|
||||
version = version.max(ResolveVersion::V2);
|
||||
Some(*id)
|
||||
|
||||
// ... and failing that we probably had a bad git merge of
|
||||
// `Cargo.lock` or something like that, so just ignore this.
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let mut g = Graph::new();
|
||||
|
||||
for (id, _) in live_pkgs.values() {
|
||||
g.add(*id);
|
||||
}
|
||||
|
||||
for &(ref id, pkg) in live_pkgs.values() {
|
||||
let Some(ref deps) = pkg.dependencies else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for edge in deps.iter() {
|
||||
if let Some(to_depend_on) = lookup_id(edge) {
|
||||
g.link(*id, to_depend_on);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let replacements = {
|
||||
let mut replacements = HashMap::new();
|
||||
for &(ref id, pkg) in live_pkgs.values() {
|
||||
if let Some(ref replace) = pkg.replace {
|
||||
assert!(pkg.dependencies.is_none());
|
||||
if let Some(replace_id) = lookup_id(replace) {
|
||||
replacements.insert(*id, replace_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
replacements
|
||||
};
|
||||
|
||||
let mut metadata = resolve.metadata.unwrap_or_default();
|
||||
|
||||
// In the V1 serialization formats all checksums were listed in the lock
|
||||
// file in the `[metadata]` section, so if we're still V1 then look for
|
||||
// that here.
|
||||
let prefix = "checksum ";
|
||||
let mut to_remove = Vec::new();
|
||||
for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) {
|
||||
to_remove.push(k.to_string());
|
||||
let k = k.strip_prefix(prefix).unwrap();
|
||||
let enc_id: TomlLockfilePackageId = k
|
||||
.parse()
|
||||
.with_context(|| internal("invalid encoding of checksum in lockfile"))?;
|
||||
let Some(id) = lookup_id(&enc_id) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let v = if v == "<none>" {
|
||||
None
|
||||
} else {
|
||||
Some(v.to_string())
|
||||
};
|
||||
checksums.insert(id, v);
|
||||
}
|
||||
// If `checksum` was listed in `[metadata]` but we were previously
|
||||
// listed as `V2` then assume some sort of bad git merge happened, so
|
||||
// discard all checksums and let's regenerate them later.
|
||||
if !to_remove.is_empty() && version >= ResolveVersion::V2 {
|
||||
checksums.drain();
|
||||
}
|
||||
for k in to_remove {
|
||||
metadata.remove(&k);
|
||||
}
|
||||
|
||||
let mut unused_patches = Vec::new();
|
||||
for pkg in resolve.patch.unused {
|
||||
let id = match pkg
|
||||
.source
|
||||
.as_ref()
|
||||
.map(|source| SourceId::from_url(&source.source_str()))
|
||||
.transpose()?
|
||||
.or_else(|| get_source_id(&path_deps, &pkg).copied())
|
||||
{
|
||||
Some(src) => PackageId::try_new(&pkg.name, &pkg.version, src)?,
|
||||
None => continue,
|
||||
};
|
||||
unused_patches.push(id);
|
||||
}
|
||||
|
||||
// We have a curious issue where in the "v1 format" we buggily had a
|
||||
// trailing blank line at the end of lock files under some specific
|
||||
// conditions.
|
||||
//
|
||||
// Cargo is trying to write new lockfies in the "v2 format" but if you
|
||||
// have no dependencies, for example, then the lockfile encoded won't
|
||||
// really have any indicator that it's in the new format (no
|
||||
// dependencies or checksums listed). This means that if you type `cargo
|
||||
// new` followed by `cargo build` it will generate a "v2 format" lock
|
||||
// file since none previously existed. When reading this on the next
|
||||
// `cargo build`, however, it generates a new lock file because when
|
||||
// reading in that lockfile we think it's the v1 format.
|
||||
//
|
||||
// To help fix this issue we special case here. If our lockfile only has
|
||||
// one trailing newline, not two, *and* it only has one package, then
|
||||
// this is actually the v2 format.
|
||||
if original.ends_with('\n')
|
||||
&& !original.ends_with("\n\n")
|
||||
&& version == ResolveVersion::V1
|
||||
&& g.iter().count() == 1
|
||||
{
|
||||
version = ResolveVersion::V2;
|
||||
}
|
||||
|
||||
return Ok(Resolve::new(
|
||||
g,
|
||||
replacements,
|
||||
HashMap::new(),
|
||||
checksums,
|
||||
metadata,
|
||||
unused_patches,
|
||||
version,
|
||||
HashMap::new(),
|
||||
));
|
||||
|
||||
fn get_source_id<'a>(
|
||||
path_deps: &'a HashMap<String, HashMap<semver::Version, SourceId>>,
|
||||
pkg: &'a TomlLockfileDependency,
|
||||
) -> Option<&'a SourceId> {
|
||||
path_deps.iter().find_map(|(name, version_source)| {
|
||||
if name != &pkg.name || version_source.len() == 0 {
|
||||
return None;
|
||||
}
|
||||
if version_source.len() == 1 {
|
||||
return Some(version_source.values().next().unwrap());
|
||||
}
|
||||
// If there are multiple candidates for the same name, it needs to be determined by combining versions (See #13405).
|
||||
if let Ok(pkg_version) = pkg.version.parse::<semver::Version>() {
|
||||
if let Some(source_id) = version_source.get(&pkg_version) {
|
||||
return Some(source_id);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -512,167 +501,6 @@ fn build_path_deps(
|
||||
}
|
||||
}
|
||||
|
||||
impl Patch {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.unused.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)]
|
||||
pub struct EncodableDependency {
|
||||
name: String,
|
||||
version: String,
|
||||
source: Option<EncodableSourceId>,
|
||||
checksum: Option<String>,
|
||||
dependencies: Option<Vec<EncodablePackageId>>,
|
||||
replace: Option<EncodablePackageId>,
|
||||
}
|
||||
|
||||
/// Pretty much equivalent to [`SourceId`] with a different serialization method.
|
||||
///
|
||||
/// The serialization for `SourceId` doesn't do URL encode for parameters.
|
||||
/// In contrast, this type is aware of that whenever [`ResolveVersion`] allows
|
||||
/// us to do so (v4 or later).
|
||||
#[derive(Deserialize, Debug, PartialOrd, Ord, Clone)]
|
||||
#[serde(transparent)]
|
||||
pub struct EncodableSourceId {
|
||||
inner: SourceId,
|
||||
/// We don't care about the deserialization of this, as the `url` crate
|
||||
/// will always decode as the URL was encoded. Only when a [`Resolve`]
|
||||
/// turns into a [`EncodableResolve`] will it set the value accordingly
|
||||
/// via [`encodable_source_id`].
|
||||
#[serde(skip)]
|
||||
encoded: bool,
|
||||
}
|
||||
|
||||
impl EncodableSourceId {
|
||||
/// Creates a `EncodableSourceId` that always encodes URL params.
|
||||
fn new(inner: SourceId) -> Self {
|
||||
Self {
|
||||
inner,
|
||||
encoded: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a `EncodableSourceId` that doesn't encode URL params. This is
|
||||
/// for backward compatibility for order lockfile version.
|
||||
fn without_url_encoded(inner: SourceId) -> Self {
|
||||
Self {
|
||||
inner,
|
||||
encoded: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Encodes the inner [`SourceId`] as a URL.
|
||||
fn as_url(&self) -> impl fmt::Display + '_ {
|
||||
if self.encoded {
|
||||
self.inner.as_encoded_url()
|
||||
} else {
|
||||
self.inner.as_url()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for EncodableSourceId {
|
||||
type Target = SourceId;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl ser::Serialize for EncodableSourceId {
|
||||
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: ser::Serializer,
|
||||
{
|
||||
s.collect_str(&self.as_url())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for EncodableSourceId {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.inner.hash(state)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::cmp::PartialEq for EncodableSourceId {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.inner == other.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl std::cmp::Eq for EncodableSourceId {}
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)]
|
||||
pub struct EncodablePackageId {
|
||||
name: String,
|
||||
version: Option<String>,
|
||||
source: Option<EncodableSourceId>,
|
||||
}
|
||||
|
||||
impl fmt::Display for EncodablePackageId {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.name)?;
|
||||
if let Some(s) = &self.version {
|
||||
write!(f, " {}", s)?;
|
||||
}
|
||||
if let Some(s) = &self.source {
|
||||
write!(f, " ({})", s.as_url())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for EncodablePackageId {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> CargoResult<EncodablePackageId> {
|
||||
let mut s = s.splitn(3, ' ');
|
||||
let name = s.next().unwrap();
|
||||
let version = s.next();
|
||||
let source_id = match s.next() {
|
||||
Some(s) => {
|
||||
if let Some(s) = s.strip_prefix('(').and_then(|s| s.strip_suffix(')')) {
|
||||
Some(SourceId::from_url(s)?)
|
||||
} else {
|
||||
anyhow::bail!("invalid serialized PackageId")
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok(EncodablePackageId {
|
||||
name: name.to_string(),
|
||||
version: version.map(|v| v.to_string()),
|
||||
// Default to url encoded.
|
||||
source: source_id.map(EncodableSourceId::new),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ser::Serialize for EncodablePackageId {
|
||||
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: ser::Serializer,
|
||||
{
|
||||
s.collect_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> de::Deserialize<'de> for EncodablePackageId {
|
||||
fn deserialize<D>(d: D) -> Result<EncodablePackageId, D::Error>
|
||||
where
|
||||
D: de::Deserializer<'de>,
|
||||
{
|
||||
String::deserialize(d).and_then(|string| {
|
||||
string
|
||||
.parse::<EncodablePackageId>()
|
||||
.map_err(de::Error::custom)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ser::Serialize for Resolve {
|
||||
#[tracing::instrument(skip_all)]
|
||||
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
|
||||
@ -708,11 +536,11 @@ impl ser::Serialize for Resolve {
|
||||
Some(metadata)
|
||||
};
|
||||
|
||||
let patch = Patch {
|
||||
let patch = TomlLockfilePatch {
|
||||
unused: self
|
||||
.unused_patches()
|
||||
.iter()
|
||||
.map(|id| EncodableDependency {
|
||||
.map(|id| TomlLockfileDependency {
|
||||
name: id.name().to_string(),
|
||||
version: id.version().to_string(),
|
||||
source: encodable_source_id(id.source_id(), self.version()),
|
||||
@ -726,7 +554,7 @@ impl ser::Serialize for Resolve {
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
EncodableResolve {
|
||||
TomlLockfile {
|
||||
package: Some(encodable),
|
||||
root: None,
|
||||
metadata,
|
||||
@ -770,7 +598,7 @@ fn encodable_resolve_node(
|
||||
id: PackageId,
|
||||
resolve: &Resolve,
|
||||
state: &EncodeState<'_>,
|
||||
) -> EncodableDependency {
|
||||
) -> TomlLockfileDependency {
|
||||
let (replace, deps) = match resolve.replacement(id) {
|
||||
Some(id) => (
|
||||
Some(encodable_package_id(id, state, resolve.version())),
|
||||
@ -786,7 +614,7 @@ fn encodable_resolve_node(
|
||||
}
|
||||
};
|
||||
|
||||
EncodableDependency {
|
||||
TomlLockfileDependency {
|
||||
name: id.name().to_string(),
|
||||
version: id.version().to_string(),
|
||||
source: encodable_source_id(id.source_id(), resolve.version()),
|
||||
@ -804,7 +632,7 @@ pub fn encodable_package_id(
|
||||
id: PackageId,
|
||||
state: &EncodeState<'_>,
|
||||
resolve_version: ResolveVersion,
|
||||
) -> EncodablePackageId {
|
||||
) -> TomlLockfilePackageId {
|
||||
let mut version = Some(id.version().to_string());
|
||||
let mut id_to_encode = id.source_id();
|
||||
if resolve_version <= ResolveVersion::V2 {
|
||||
@ -825,21 +653,24 @@ pub fn encodable_package_id(
|
||||
}
|
||||
}
|
||||
}
|
||||
EncodablePackageId {
|
||||
TomlLockfilePackageId {
|
||||
name: id.name().to_string(),
|
||||
version,
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
fn encodable_source_id(id: SourceId, version: ResolveVersion) -> Option<EncodableSourceId> {
|
||||
fn encodable_source_id(id: SourceId, version: ResolveVersion) -> Option<TomlLockfileSourceId> {
|
||||
if id.is_path() {
|
||||
None
|
||||
} else {
|
||||
Some(if version >= ResolveVersion::V4 {
|
||||
EncodableSourceId::new(id)
|
||||
} else {
|
||||
EncodableSourceId::without_url_encoded(id)
|
||||
})
|
||||
Some(
|
||||
if version >= ResolveVersion::V4 {
|
||||
TomlLockfileSourceId::new(id.as_encoded_url().to_string())
|
||||
} else {
|
||||
TomlLockfileSourceId::new(id.as_url().to_string())
|
||||
}
|
||||
.expect("source ID should have valid URLs"),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -76,8 +76,6 @@ use self::features::RequestedFeatures;
|
||||
use self::types::{ConflictMap, ConflictReason, DepsFrame};
|
||||
use self::types::{FeaturesSet, RcVecIter, RemainingDeps, ResolverProgress};
|
||||
|
||||
pub use self::encode::Metadata;
|
||||
pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
|
||||
pub use self::errors::{ActivateError, ActivateResult, ResolveError};
|
||||
pub use self::features::{CliFeatures, ForceAllTargets, HasDevUnits};
|
||||
pub use self::resolve::{Resolve, ResolveVersion};
|
||||
|
@ -1,12 +1,12 @@
|
||||
use cargo_util_schemas::core::PartialVersion;
|
||||
use cargo_util_schemas::manifest::RustVersion;
|
||||
|
||||
use super::encode::Metadata;
|
||||
use crate::core::dependency::DepKind;
|
||||
use crate::core::{Dependency, PackageId, PackageIdSpec, PackageIdSpecQuery, Summary, Target};
|
||||
use crate::util::Graph;
|
||||
use crate::util::errors::CargoResult;
|
||||
use crate::util::interning::InternedString;
|
||||
use cargo_util_schemas::lockfile::TomlLockfileMetadata;
|
||||
use std::borrow::Borrow;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fmt;
|
||||
@ -34,7 +34,7 @@ pub struct Resolve {
|
||||
/// "Unknown" metadata. This is a collection of extra, unrecognized data
|
||||
/// found in the `[metadata]` section of `Cargo.lock`, preserved for
|
||||
/// forwards compatibility.
|
||||
metadata: Metadata,
|
||||
metadata: TomlLockfileMetadata,
|
||||
/// `[patch]` entries that did not match anything, preserved in
|
||||
/// `Cargo.lock` as the `[[patch.unused]]` table array. Tracking unused
|
||||
/// patches helps prevent Cargo from being forced to re-update the
|
||||
@ -156,7 +156,7 @@ impl Resolve {
|
||||
replacements: HashMap<PackageId, PackageId>,
|
||||
features: HashMap<PackageId, Vec<InternedString>>,
|
||||
checksums: HashMap<PackageId, Option<String>>,
|
||||
metadata: Metadata,
|
||||
metadata: TomlLockfileMetadata,
|
||||
unused_patches: Vec<PackageId>,
|
||||
version: ResolveVersion,
|
||||
summaries: HashMap<PackageId, Summary>,
|
||||
@ -394,7 +394,7 @@ unable to verify that `{0}` is the same as when the lockfile was generated
|
||||
self.checksums.insert(pkg_id, Some(checksum));
|
||||
}
|
||||
|
||||
pub fn metadata(&self) -> &Metadata {
|
||||
pub fn metadata(&self) -> &TomlLockfileMetadata {
|
||||
&self.metadata
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,12 @@
|
||||
use std::io::prelude::*;
|
||||
|
||||
use crate::core::{Resolve, ResolveVersion, Workspace, resolver};
|
||||
use crate::core::resolver::encode::into_resolve;
|
||||
use crate::core::{Resolve, ResolveVersion, Workspace};
|
||||
use crate::util::Filesystem;
|
||||
use crate::util::errors::CargoResult;
|
||||
|
||||
use anyhow::Context as _;
|
||||
use cargo_util_schemas::lockfile::TomlLockfile;
|
||||
|
||||
pub const LOCKFILE_NAME: &str = "Cargo.lock";
|
||||
|
||||
@ -22,8 +24,8 @@ pub fn load_pkg_lockfile(ws: &Workspace<'_>) -> CargoResult<Option<Resolve>> {
|
||||
.with_context(|| format!("failed to read file: {}", f.path().display()))?;
|
||||
|
||||
let resolve = (|| -> CargoResult<Option<Resolve>> {
|
||||
let v: resolver::EncodableResolve = toml::from_str(&s)?;
|
||||
Ok(Some(v.into_resolve(&s, ws)?))
|
||||
let v: TomlLockfile = toml::from_str(&s)?;
|
||||
Ok(Some(into_resolve(v, &s, ws)?))
|
||||
})()
|
||||
.with_context(|| format!("failed to parse lock file at: {}", f.path().display()))?;
|
||||
Ok(resolve)
|
||||
@ -206,9 +208,9 @@ fn are_equal_lockfiles(orig: &str, current: &str, ws: &Workspace<'_>) -> bool {
|
||||
// common case where we can update lock files.
|
||||
if !ws.gctx().lock_update_allowed() {
|
||||
let res: CargoResult<bool> = (|| {
|
||||
let old: resolver::EncodableResolve = toml::from_str(orig)?;
|
||||
let new: resolver::EncodableResolve = toml::from_str(current)?;
|
||||
Ok(old.into_resolve(orig, ws)? == new.into_resolve(current, ws)?)
|
||||
let old: TomlLockfile = toml::from_str(orig)?;
|
||||
let new: TomlLockfile = toml::from_str(current)?;
|
||||
Ok(into_resolve(old, orig, ws)? == into_resolve(new, current, ws)?)
|
||||
})();
|
||||
if let Ok(true) = res {
|
||||
return true;
|
||||
|
Loading…
x
Reference in New Issue
Block a user