mirror of
https://github.com/rust-lang/cargo.git
synced 2025-10-01 11:30:39 +00:00
Auto merge of #12248 - weihanglo:source-refactor, r=epage
refactor: registry data kinds cleanup
This commit is contained in:
commit
928b9561c1
@ -10,10 +10,10 @@ use std::time::{Duration, Instant};
|
|||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use bytesize::ByteSize;
|
use bytesize::ByteSize;
|
||||||
use curl::easy::{Easy, HttpVersion};
|
use curl::easy::Easy;
|
||||||
use curl::multi::{EasyHandle, Multi};
|
use curl::multi::{EasyHandle, Multi};
|
||||||
use lazycell::LazyCell;
|
use lazycell::LazyCell;
|
||||||
use log::{debug, warn};
|
use log::debug;
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
@ -725,32 +725,8 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
|
|||||||
handle.http_headers(headers)?;
|
handle.http_headers(headers)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Enable HTTP/2 to be used as it'll allow true multiplexing which makes
|
// Enable HTTP/2 if possible.
|
||||||
// downloads much faster.
|
crate::try_old_curl_http2_pipewait!(self.set.multiplexing, handle);
|
||||||
//
|
|
||||||
// Currently Cargo requests the `http2` feature of the `curl` crate
|
|
||||||
// which means it should always be built in. On OSX, however, we ship
|
|
||||||
// cargo still linked against the system libcurl. Building curl with
|
|
||||||
// ALPN support for HTTP/2 requires newer versions of OSX (the
|
|
||||||
// SecureTransport API) than we want to ship Cargo for. By linking Cargo
|
|
||||||
// against the system libcurl then older curl installations won't use
|
|
||||||
// HTTP/2 but newer ones will. All that to basically say we ignore
|
|
||||||
// errors here on OSX, but consider this a fatal error to not activate
|
|
||||||
// HTTP/2 on all other platforms.
|
|
||||||
if self.set.multiplexing {
|
|
||||||
crate::try_old_curl!(handle.http_version(HttpVersion::V2), "HTTP2");
|
|
||||||
} else {
|
|
||||||
handle.http_version(HttpVersion::V11)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is an option to `libcurl` which indicates that if there's a
|
|
||||||
// bunch of parallel requests to the same host they all wait until the
|
|
||||||
// pipelining status of the host is known. This means that we won't
|
|
||||||
// initiate dozens of connections to crates.io, but rather only one.
|
|
||||||
// Once the main one is opened we realized that pipelining is possible
|
|
||||||
// and multiplexing is possible with static.crates.io. All in all this
|
|
||||||
// reduces the number of connections down to a more manageable state.
|
|
||||||
crate::try_old_curl!(handle.pipewait(true), "pipewait");
|
|
||||||
|
|
||||||
handle.write_function(move |buf| {
|
handle.write_function(move |buf| {
|
||||||
debug!("{} - {} bytes of data", token, buf.len());
|
debug!("{} - {} bytes of data", token, buf.len());
|
||||||
|
@ -196,6 +196,11 @@ impl PackageId {
|
|||||||
pub fn stable_hash(self, workspace: &Path) -> PackageIdStableHash<'_> {
|
pub fn stable_hash(self, workspace: &Path) -> PackageIdStableHash<'_> {
|
||||||
PackageIdStableHash(self, workspace)
|
PackageIdStableHash(self, workspace)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Filename of the `.crate` tarball, e.g., `once_cell-1.18.0.crate`.
|
||||||
|
pub fn tarball_name(&self) -> String {
|
||||||
|
format!("{}-{}.crate", self.name(), self.version())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct PackageIdStableHash<'a>(PackageId, &'a Path);
|
pub struct PackageIdStableHash<'a>(PackageId, &'a Path);
|
||||||
|
@ -127,7 +127,7 @@ pub fn package_one(
|
|||||||
super::check_dep_has_version(dep, false)?;
|
super::check_dep_has_version(dep, false)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
|
let filename = pkg.package_id().tarball_name();
|
||||||
let dir = ws.target_dir().join("package");
|
let dir = ws.target_dir().join("package");
|
||||||
let mut dst = {
|
let mut dst = {
|
||||||
let tmp = format!(".{}", filename);
|
let tmp = format!(".{}", filename);
|
||||||
|
@ -4,10 +4,10 @@
|
|||||||
//! [`RemoteRegistry`]: super::remote::RemoteRegistry
|
//! [`RemoteRegistry`]: super::remote::RemoteRegistry
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
use cargo_util::registry::make_dep_path;
|
||||||
use cargo_util::Sha256;
|
use cargo_util::Sha256;
|
||||||
|
|
||||||
use crate::core::PackageId;
|
use crate::core::PackageId;
|
||||||
use crate::sources::registry::make_dep_prefix;
|
|
||||||
use crate::sources::registry::MaybeLock;
|
use crate::sources::registry::MaybeLock;
|
||||||
use crate::sources::registry::RegistryConfig;
|
use crate::sources::registry::RegistryConfig;
|
||||||
use crate::util::auth;
|
use crate::util::auth;
|
||||||
@ -25,11 +25,6 @@ const PREFIX_TEMPLATE: &str = "{prefix}";
|
|||||||
const LOWER_PREFIX_TEMPLATE: &str = "{lowerprefix}";
|
const LOWER_PREFIX_TEMPLATE: &str = "{lowerprefix}";
|
||||||
const CHECKSUM_TEMPLATE: &str = "{sha256-checksum}";
|
const CHECKSUM_TEMPLATE: &str = "{sha256-checksum}";
|
||||||
|
|
||||||
/// Filename of the `.crate` tarball, e.g., `once_cell-1.18.0.crate`.
|
|
||||||
pub(super) fn filename(pkg: PackageId) -> String {
|
|
||||||
format!("{}-{}.crate", pkg.name(), pkg.version())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks if `pkg` is downloaded and ready under the directory at `cache_path`.
|
/// Checks if `pkg` is downloaded and ready under the directory at `cache_path`.
|
||||||
/// If not, returns a URL to download it from.
|
/// If not, returns a URL to download it from.
|
||||||
///
|
///
|
||||||
@ -41,8 +36,7 @@ pub(super) fn download(
|
|||||||
checksum: &str,
|
checksum: &str,
|
||||||
registry_config: RegistryConfig,
|
registry_config: RegistryConfig,
|
||||||
) -> CargoResult<MaybeLock> {
|
) -> CargoResult<MaybeLock> {
|
||||||
let filename = filename(pkg);
|
let path = cache_path.join(&pkg.tarball_name());
|
||||||
let path = cache_path.join(&filename);
|
|
||||||
let path = config.assert_package_cache_locked(&path);
|
let path = config.assert_package_cache_locked(&path);
|
||||||
|
|
||||||
// Attempt to open a read-only copy first to avoid an exclusive write
|
// Attempt to open a read-only copy first to avoid an exclusive write
|
||||||
@ -74,7 +68,7 @@ pub(super) fn download(
|
|||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
} else {
|
} else {
|
||||||
let prefix = make_dep_prefix(&*pkg.name());
|
let prefix = make_dep_path(&pkg.name(), true);
|
||||||
url = url
|
url = url
|
||||||
.replace(CRATE_TEMPLATE, &*pkg.name())
|
.replace(CRATE_TEMPLATE, &*pkg.name())
|
||||||
.replace(VERSION_TEMPLATE, &pkg.version().to_string())
|
.replace(VERSION_TEMPLATE, &pkg.version().to_string())
|
||||||
@ -113,9 +107,8 @@ pub(super) fn finish_download(
|
|||||||
anyhow::bail!("failed to verify the checksum of `{}`", pkg)
|
anyhow::bail!("failed to verify the checksum of `{}`", pkg)
|
||||||
}
|
}
|
||||||
|
|
||||||
let filename = filename(pkg);
|
|
||||||
cache_path.create_dir()?;
|
cache_path.create_dir()?;
|
||||||
let path = cache_path.join(&filename);
|
let path = cache_path.join(&pkg.tarball_name());
|
||||||
let path = config.assert_package_cache_locked(&path);
|
let path = config.assert_package_cache_locked(&path);
|
||||||
let mut dst = OpenOptions::new()
|
let mut dst = OpenOptions::new()
|
||||||
.create(true)
|
.create(true)
|
||||||
@ -142,7 +135,7 @@ pub(super) fn is_crate_downloaded(
|
|||||||
config: &Config,
|
config: &Config,
|
||||||
pkg: PackageId,
|
pkg: PackageId,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let path = cache_path.join(filename(pkg));
|
let path = cache_path.join(pkg.tarball_name());
|
||||||
let path = config.assert_package_cache_locked(&path);
|
let path = config.assert_package_cache_locked(&path);
|
||||||
if let Ok(meta) = fs::metadata(path) {
|
if let Ok(meta) = fs::metadata(path) {
|
||||||
return meta.len() > 0;
|
return meta.len() > 0;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
//! Access to a HTTP-based crate registry. See [`HttpRegistry`] for details.
|
//! Access to a HTTP-based crate registry. See [`HttpRegistry`] for details.
|
||||||
|
|
||||||
use crate::core::{PackageId, SourceId};
|
use crate::core::{PackageId, SourceId};
|
||||||
use crate::ops::{self};
|
use crate::ops;
|
||||||
use crate::sources::registry::download;
|
use crate::sources::registry::download;
|
||||||
use crate::sources::registry::MaybeLock;
|
use crate::sources::registry::MaybeLock;
|
||||||
use crate::sources::registry::{LoadResponse, RegistryConfig, RegistryData};
|
use crate::sources::registry::{LoadResponse, RegistryConfig, RegistryData};
|
||||||
@ -11,9 +11,9 @@ use crate::util::network::sleep::SleepTracker;
|
|||||||
use crate::util::{auth, Config, Filesystem, IntoUrl, Progress, ProgressStyle};
|
use crate::util::{auth, Config, Filesystem, IntoUrl, Progress, ProgressStyle};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use cargo_util::paths;
|
use cargo_util::paths;
|
||||||
use curl::easy::{Easy, HttpVersion, List};
|
use curl::easy::{Easy, List};
|
||||||
use curl::multi::{EasyHandle, Multi};
|
use curl::multi::{EasyHandle, Multi};
|
||||||
use log::{debug, trace, warn};
|
use log::{debug, trace};
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
@ -383,7 +383,7 @@ impl<'cfg> HttpRegistry<'cfg> {
|
|||||||
}
|
}
|
||||||
let config_json_path = self
|
let config_json_path = self
|
||||||
.assert_index_locked(&self.index_path)
|
.assert_index_locked(&self.index_path)
|
||||||
.join("config.json");
|
.join(RegistryConfig::NAME);
|
||||||
match fs::read(&config_json_path) {
|
match fs::read(&config_json_path) {
|
||||||
Ok(raw_data) => match serde_json::from_slice(&raw_data) {
|
Ok(raw_data) => match serde_json::from_slice(&raw_data) {
|
||||||
Ok(json) => {
|
Ok(json) => {
|
||||||
@ -404,12 +404,12 @@ impl<'cfg> HttpRegistry<'cfg> {
|
|||||||
fn config(&mut self) -> Poll<CargoResult<&RegistryConfig>> {
|
fn config(&mut self) -> Poll<CargoResult<&RegistryConfig>> {
|
||||||
debug!("loading config");
|
debug!("loading config");
|
||||||
let index_path = self.assert_index_locked(&self.index_path);
|
let index_path = self.assert_index_locked(&self.index_path);
|
||||||
let config_json_path = index_path.join("config.json");
|
let config_json_path = index_path.join(RegistryConfig::NAME);
|
||||||
if self.is_fresh(Path::new("config.json")) && self.config_cached()?.is_some() {
|
if self.is_fresh(Path::new(RegistryConfig::NAME)) && self.config_cached()?.is_some() {
|
||||||
return Poll::Ready(Ok(self.registry_config.as_ref().unwrap()));
|
return Poll::Ready(Ok(self.registry_config.as_ref().unwrap()));
|
||||||
}
|
}
|
||||||
|
|
||||||
match ready!(self.load(Path::new(""), Path::new("config.json"), None)?) {
|
match ready!(self.load(Path::new(""), Path::new(RegistryConfig::NAME), None)?) {
|
||||||
LoadResponse::Data {
|
LoadResponse::Data {
|
||||||
raw_data,
|
raw_data,
|
||||||
index_version: _,
|
index_version: _,
|
||||||
@ -543,7 +543,7 @@ impl<'cfg> RegistryData for HttpRegistry<'cfg> {
|
|||||||
}
|
}
|
||||||
StatusCode::Unauthorized
|
StatusCode::Unauthorized
|
||||||
if !self.auth_required
|
if !self.auth_required
|
||||||
&& path == Path::new("config.json")
|
&& path == Path::new(RegistryConfig::NAME)
|
||||||
&& self.config.cli_unstable().registry_auth =>
|
&& self.config.cli_unstable().registry_auth =>
|
||||||
{
|
{
|
||||||
debug!("re-attempting request for config.json with authorization included.");
|
debug!("re-attempting request for config.json with authorization included.");
|
||||||
@ -593,7 +593,7 @@ impl<'cfg> RegistryData for HttpRegistry<'cfg> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if path != Path::new("config.json") {
|
if path != Path::new(RegistryConfig::NAME) {
|
||||||
self.auth_required = ready!(self.config()?).auth_required;
|
self.auth_required = ready!(self.config()?).auth_required;
|
||||||
} else if !self.auth_required {
|
} else if !self.auth_required {
|
||||||
// Check if there's a cached config that says auth is required.
|
// Check if there's a cached config that says auth is required.
|
||||||
@ -618,20 +618,7 @@ impl<'cfg> RegistryData for HttpRegistry<'cfg> {
|
|||||||
handle.follow_location(true)?;
|
handle.follow_location(true)?;
|
||||||
|
|
||||||
// Enable HTTP/2 if possible.
|
// Enable HTTP/2 if possible.
|
||||||
if self.multiplexing {
|
crate::try_old_curl_http2_pipewait!(self.multiplexing, handle);
|
||||||
crate::try_old_curl!(handle.http_version(HttpVersion::V2), "HTTP2");
|
|
||||||
} else {
|
|
||||||
handle.http_version(HttpVersion::V11)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is an option to `libcurl` which indicates that if there's a
|
|
||||||
// bunch of parallel requests to the same host they all wait until the
|
|
||||||
// pipelining status of the host is known. This means that we won't
|
|
||||||
// initiate dozens of connections to crates.io, but rather only one.
|
|
||||||
// Once the main one is opened we realized that pipelining is possible
|
|
||||||
// and multiplexing is possible with static.crates.io. All in all this
|
|
||||||
// reduces the number of connections done to a more manageable state.
|
|
||||||
crate::try_old_curl!(handle.pipewait(true), "pipewait");
|
|
||||||
|
|
||||||
let mut headers = List::new();
|
let mut headers = List::new();
|
||||||
// Include a header to identify the protocol. This allows the server to
|
// Include a header to identify the protocol. This allows the server to
|
||||||
|
@ -167,17 +167,15 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult<MaybeLock> {
|
fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult<MaybeLock> {
|
||||||
let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version());
|
|
||||||
|
|
||||||
// Note that the usage of `into_path_unlocked` here is because the local
|
// Note that the usage of `into_path_unlocked` here is because the local
|
||||||
// crate files here never change in that we're not the one writing them,
|
// crate files here never change in that we're not the one writing them,
|
||||||
// so it's not our responsibility to synchronize access to them.
|
// so it's not our responsibility to synchronize access to them.
|
||||||
let path = self.root.join(&crate_file).into_path_unlocked();
|
let path = self.root.join(&pkg.tarball_name()).into_path_unlocked();
|
||||||
let mut crate_file = paths::open(&path)?;
|
let mut crate_file = paths::open(&path)?;
|
||||||
|
|
||||||
// If we've already got an unpacked version of this crate, then skip the
|
// If we've already got an unpacked version of this crate, then skip the
|
||||||
// checksum below as it is in theory already verified.
|
// checksum below as it is in theory already verified.
|
||||||
let dst = format!("{}-{}", pkg.name(), pkg.version());
|
let dst = path.file_stem().unwrap();
|
||||||
if self.src_path.join(dst).into_path_unlocked().exists() {
|
if self.src_path.join(dst).into_path_unlocked().exists() {
|
||||||
return Ok(MaybeLock::Ready(crate_file));
|
return Ok(MaybeLock::Ready(crate_file));
|
||||||
}
|
}
|
||||||
|
@ -856,6 +856,11 @@ impl<'cfg> Source for RegistrySource<'cfg> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl RegistryConfig {
|
||||||
|
/// File name of [`RegistryConfig`].
|
||||||
|
const NAME: &str = "config.json";
|
||||||
|
}
|
||||||
|
|
||||||
/// Get the maximum upack size that Cargo permits
|
/// Get the maximum upack size that Cargo permits
|
||||||
/// based on a given `size` of your compressed file.
|
/// based on a given `size` of your compressed file.
|
||||||
///
|
///
|
||||||
@ -903,9 +908,3 @@ fn max_unpack_size(config: &Config, size: u64) -> u64 {
|
|||||||
|
|
||||||
u64::max(max_unpack_size, size * max_compression_ratio as u64)
|
u64::max(max_unpack_size, size * max_compression_ratio as u64)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Constructs a path to a dependency in the registry index on filesystem.
|
|
||||||
/// See [`cargo_util::registry::make_dep_path`] for more.
|
|
||||||
fn make_dep_prefix(name: &str) -> String {
|
|
||||||
cargo_util::registry::make_dep_path(name, true)
|
|
||||||
}
|
|
||||||
|
@ -103,14 +103,9 @@ impl<'cfg> RemoteRegistry<'cfg> {
|
|||||||
/// Creates intermediate dirs and initialize the repository.
|
/// Creates intermediate dirs and initialize the repository.
|
||||||
fn repo(&self) -> CargoResult<&git2::Repository> {
|
fn repo(&self) -> CargoResult<&git2::Repository> {
|
||||||
self.repo.try_borrow_with(|| {
|
self.repo.try_borrow_with(|| {
|
||||||
|
trace!("acquiring registry index lock");
|
||||||
let path = self.config.assert_package_cache_locked(&self.index_path);
|
let path = self.config.assert_package_cache_locked(&self.index_path);
|
||||||
|
|
||||||
if let Ok(repo) = git2::Repository::open(&path) {
|
|
||||||
trace!("opened a repo without a lock");
|
|
||||||
return Ok(repo);
|
|
||||||
}
|
|
||||||
|
|
||||||
trace!("acquiring registry index lock");
|
|
||||||
match git2::Repository::open(&path) {
|
match git2::Repository::open(&path) {
|
||||||
Ok(repo) => Ok(repo),
|
Ok(repo) => Ok(repo),
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
@ -210,8 +205,6 @@ impl<'cfg> RemoteRegistry<'cfg> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const LAST_UPDATED_FILE: &str = ".last-updated";
|
|
||||||
|
|
||||||
impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
|
impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
|
||||||
fn prepare(&self) -> CargoResult<()> {
|
fn prepare(&self) -> CargoResult<()> {
|
||||||
self.repo()?;
|
self.repo()?;
|
||||||
@ -311,7 +304,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
|
|||||||
debug!("loading config");
|
debug!("loading config");
|
||||||
self.prepare()?;
|
self.prepare()?;
|
||||||
self.config.assert_package_cache_locked(&self.index_path);
|
self.config.assert_package_cache_locked(&self.index_path);
|
||||||
match ready!(self.load(Path::new(""), Path::new("config.json"), None)?) {
|
match ready!(self.load(Path::new(""), Path::new(RegistryConfig::NAME), None)?) {
|
||||||
LoadResponse::Data { raw_data, .. } => {
|
LoadResponse::Data { raw_data, .. } => {
|
||||||
trace!("config loaded");
|
trace!("config loaded");
|
||||||
let mut cfg: RegistryConfig = serde_json::from_slice(&raw_data)?;
|
let mut cfg: RegistryConfig = serde_json::from_slice(&raw_data)?;
|
||||||
@ -357,7 +350,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
|
|||||||
self.head.set(None);
|
self.head.set(None);
|
||||||
*self.tree.borrow_mut() = None;
|
*self.tree.borrow_mut() = None;
|
||||||
self.current_sha.set(None);
|
self.current_sha.set(None);
|
||||||
let path = self.config.assert_package_cache_locked(&self.index_path);
|
let _path = self.config.assert_package_cache_locked(&self.index_path);
|
||||||
if !self.quiet {
|
if !self.quiet {
|
||||||
self.config
|
self.config
|
||||||
.shell()
|
.shell()
|
||||||
@ -377,10 +370,6 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
|
|||||||
)
|
)
|
||||||
.with_context(|| format!("failed to fetch `{}`", url))?;
|
.with_context(|| format!("failed to fetch `{}`", url))?;
|
||||||
|
|
||||||
// Create a dummy file to record the mtime for when we updated the
|
|
||||||
// index.
|
|
||||||
paths::create(&path.join(LAST_UPDATED_FILE))?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -20,20 +20,51 @@ impl<T> PollExt<T> for Poll<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// When dynamically linked against libcurl, we want to ignore some failures
|
/// When dynamically linked against libcurl, we want to ignore some failures
|
||||||
// when using old versions that don't support certain features.
|
/// when using old versions that don't support certain features.
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! try_old_curl {
|
macro_rules! try_old_curl {
|
||||||
($e:expr, $msg:expr) => {
|
($e:expr, $msg:expr) => {
|
||||||
let result = $e;
|
let result = $e;
|
||||||
if cfg!(target_os = "macos") {
|
if cfg!(target_os = "macos") {
|
||||||
if let Err(e) = result {
|
if let Err(e) = result {
|
||||||
warn!("ignoring libcurl {} error: {}", $msg, e);
|
::log::warn!("ignoring libcurl {} error: {}", $msg, e);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
use ::anyhow::Context;
|
||||||
result.with_context(|| {
|
result.with_context(|| {
|
||||||
anyhow::format_err!("failed to enable {}, is curl not built right?", $msg)
|
::anyhow::format_err!("failed to enable {}, is curl not built right?", $msg)
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Enable HTTP/2 and pipewait to be used as it'll allow true multiplexing
|
||||||
|
/// which makes downloads much faster.
|
||||||
|
///
|
||||||
|
/// Currently Cargo requests the `http2` feature of the `curl` crate which
|
||||||
|
/// means it should always be built in. On OSX, however, we ship cargo still
|
||||||
|
/// linked against the system libcurl. Building curl with ALPN support for
|
||||||
|
/// HTTP/2 requires newer versions of OSX (the SecureTransport API) than we
|
||||||
|
/// want to ship Cargo for. By linking Cargo against the system libcurl then
|
||||||
|
/// older curl installations won't use HTTP/2 but newer ones will. All that to
|
||||||
|
/// basically say we ignore errors here on OSX, but consider this a fatal error
|
||||||
|
/// to not activate HTTP/2 on all other platforms.
|
||||||
|
///
|
||||||
|
/// `pipewait` is an option which indicates that if there's a bunch of parallel
|
||||||
|
/// requests to the same host they all wait until the pipelining status of the
|
||||||
|
/// host is known. This means that we won't initiate dozens of connections but
|
||||||
|
/// rather only one. Once the main one is opened we realized that pipelining is
|
||||||
|
/// possible and multiplexing is possible. All in all this reduces the number
|
||||||
|
/// of connections down to a more manageable state.
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! try_old_curl_http2_pipewait {
|
||||||
|
($multiplexing:expr, $handle:expr) => {
|
||||||
|
if $multiplexing {
|
||||||
|
$crate::try_old_curl!($handle.http_version(curl::easy::HttpVersion::V2), "HTTP/2");
|
||||||
|
} else {
|
||||||
|
$handle.http_version(curl::easy::HttpVersion::V11)?;
|
||||||
|
}
|
||||||
|
$crate::try_old_curl!($handle.pipewait(true), "pipewait");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user