SourceId is copy, clippy thinks we dont need &SourceId or SourceId.clone()

This commit is contained in:
Eh2406 2018-11-25 12:31:27 -05:00
parent 1ff5975b96
commit e5a11190b3
35 changed files with 757 additions and 655 deletions

View File

@ -81,7 +81,7 @@ Run with 'cargo -Z [FLAG] [SUBCOMMAND]'"
pub fn get_version_string(is_verbose: bool) -> String {
let version = cargo::version();
let mut version_string = String::from(version.to_string());
let mut version_string = version.to_string();
version_string.push_str("\n");
if is_verbose {
version_string.push_str(&format!(
@ -218,7 +218,8 @@ See 'cargo help <command>' for more information on a specific command.\n",
opt(
"verbose",
"Use verbose output (-vv very verbose/build.rs output)",
).short("v")
)
.short("v")
.multiple(true)
.global(true),
)

View File

@ -28,7 +28,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let reference = GitReference::Branch(reference.to_string());
let source_id = SourceId::for_git(&url, reference)?;
let mut source = GitSource::new(&source_id, config)?;
let mut source = GitSource::new(source_id, config)?;
source.update()?;

View File

@ -82,7 +82,8 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
compile_opts.build_config.release = !args.is_present("debug");
let krates = args.values_of("crate")
let krates = args
.values_of("crate")
.unwrap_or_default()
.collect::<Vec<_>>();
@ -120,7 +121,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
ops::install(
root,
krates,
&source,
source,
from_cwd,
version,
&compile_opts,

View File

@ -3,9 +3,9 @@ use command_prelude::*;
use std::io::{self, BufRead};
use cargo::core::{Source, SourceId};
use cargo::ops;
use cargo::sources::RegistrySource;
use cargo::util::{CargoError, CargoResultExt};
use cargo::ops;
pub fn cli() -> App {
subcommand("login")
@ -29,11 +29,12 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
return Err(format_err!(
"token must be provided when \
--registry is provided."
).into());
)
.into());
}
None => {
let src = SourceId::crates_io(config)?;
let mut src = RegistrySource::remote(&src, config);
let mut src = RegistrySource::remote(src, config);
src.update()?;
let config = src.config()?.unwrap();
args.value_of("host")

View File

@ -2,14 +2,14 @@ use std::fmt;
use std::rc::Rc;
use std::str::FromStr;
use semver::VersionReq;
use semver::ReqParseError;
use semver::VersionReq;
use serde::ser;
use core::{PackageId, SourceId, Summary};
use core::interning::InternedString;
use util::{Cfg, CfgExpr, Config};
use core::{PackageId, SourceId, Summary};
use util::errors::{CargoError, CargoResult, CargoResultExt};
use util::{Cfg, CfgExpr, Config};
/// Information about a dependency requested by a Cargo manifest.
/// Cheap to copy.
@ -48,7 +48,7 @@ pub enum Platform {
#[derive(Serialize)]
struct SerializedDependency<'a> {
name: &'a str,
source: &'a SourceId,
source: SourceId,
req: String,
kind: Kind,
rename: Option<&'a str>,
@ -74,7 +74,8 @@ impl ser::Serialize for Dependency {
features: self.features(),
target: self.platform(),
rename: self.explicit_name_in_toml().map(|s| s.as_str()),
}.serialize(s)
}
.serialize(s)
}
}
@ -116,7 +117,7 @@ this warning.
config.shell().warn(&msg)?;
Ok(requirement)
},
}
Err(e) => {
let err: CargoResult<VersionReq> = Err(e.into());
let v: VersionReq = err.chain_err(|| {
@ -126,7 +127,7 @@ this warning.
)
})?;
Ok(v)
},
}
Ok(v) => Ok(v),
}
}
@ -140,7 +141,8 @@ impl ser::Serialize for Kind {
Kind::Normal => None,
Kind::Development => Some("dev"),
Kind::Build => Some("build"),
}.serialize(s)
}
.serialize(s)
}
}
@ -149,7 +151,7 @@ impl Dependency {
pub fn parse(
name: &str,
version: Option<&str>,
source_id: &SourceId,
source_id: SourceId,
inside: &PackageId,
config: &Config,
) -> CargoResult<Dependency> {
@ -173,7 +175,7 @@ impl Dependency {
pub fn parse_no_deprecated(
name: &str,
version: Option<&str>,
source_id: &SourceId,
source_id: SourceId,
) -> CargoResult<Dependency> {
let (specified_req, version_req) = match version {
Some(v) => (true, parse_req_with_deprecated(name, v, None)?),
@ -190,12 +192,12 @@ impl Dependency {
Ok(ret)
}
pub fn new_override(name: &str, source_id: &SourceId) -> Dependency {
pub fn new_override(name: &str, source_id: SourceId) -> Dependency {
assert!(!name.is_empty());
Dependency {
inner: Rc::new(Inner {
name: InternedString::new(name),
source_id: source_id.clone(),
source_id,
registry_id: None,
req: VersionReq::any(),
kind: Kind::Normal,
@ -260,16 +262,16 @@ impl Dependency {
self.inner.name
}
pub fn source_id(&self) -> &SourceId {
&self.inner.source_id
pub fn source_id(&self) -> SourceId {
self.inner.source_id
}
pub fn registry_id(&self) -> Option<&SourceId> {
self.inner.registry_id.as_ref()
pub fn registry_id(&self) -> Option<SourceId> {
self.inner.registry_id
}
pub fn set_registry_id(&mut self, registry_id: &SourceId) -> &mut Dependency {
Rc::make_mut(&mut self.inner).registry_id = Some(registry_id.clone());
pub fn set_registry_id(&mut self, registry_id: SourceId) -> &mut Dependency {
Rc::make_mut(&mut self.inner).registry_id = Some(registry_id);
self
}
@ -301,9 +303,14 @@ impl Dependency {
}
/// Sets the list of features requested for the package.
pub fn set_features(&mut self, features: impl IntoIterator<Item=impl AsRef<str>>) -> &mut Dependency {
Rc::make_mut(&mut self.inner).features =
features.into_iter().map(|s| InternedString::new(s.as_ref())).collect();
pub fn set_features(
&mut self,
features: impl IntoIterator<Item = impl AsRef<str>>,
) -> &mut Dependency {
Rc::make_mut(&mut self.inner).features = features
.into_iter()
.map(|s| InternedString::new(s.as_ref()))
.collect();
self
}
@ -343,7 +350,7 @@ impl Dependency {
/// Lock this dependency to depending on the specified package id
pub fn lock_to(&mut self, id: &PackageId) -> &mut Dependency {
assert_eq!(self.inner.source_id, *id.source_id());
assert_eq!(self.inner.source_id, id.source_id());
assert!(self.inner.req.matches(id.version()));
trace!(
"locking dep from `{}` with `{}` at {} to {}",
@ -353,7 +360,7 @@ impl Dependency {
id
);
self.set_version_req(VersionReq::exact(id.version()))
.set_source_id(id.source_id().clone())
.set_source_id(id.source_id())
}
/// Returns whether this is a "locked" dependency, basically whether it has
@ -405,15 +412,14 @@ impl Dependency {
pub fn matches_id(&self, id: &PackageId) -> bool {
self.inner.name == id.name()
&& (self.inner.only_match_name
|| (self.inner.req.matches(id.version())
&& &self.inner.source_id == id.source_id()))
|| (self.inner.req.matches(id.version()) && self.inner.source_id == id.source_id()))
}
pub fn map_source(mut self, to_replace: &SourceId, replace_with: &SourceId) -> Dependency {
pub fn map_source(mut self, to_replace: SourceId, replace_with: SourceId) -> Dependency {
if self.source_id() != to_replace {
self
} else {
self.set_source_id(replace_with.clone());
self.set_source_id(replace_with);
self
}
}
@ -446,7 +452,8 @@ impl FromStr for Platform {
fn from_str(s: &str) -> CargoResult<Platform> {
if s.starts_with("cfg(") && s.ends_with(')') {
let s = &s[4..s.len() - 1];
let p = s.parse()
let p = s
.parse()
.map(Platform::Cfg)
.chain_err(|| format_err!("failed to parse `{}` as a cfg expression", s))?;
Ok(p)

View File

@ -34,7 +34,7 @@ impl Eq for InternedString {}
impl InternedString {
pub fn new(str: &str) -> InternedString {
let mut cache = STRING_CACHE.lock().unwrap();
let s = cache.get(str).map(|&s| s).unwrap_or_else(|| {
let s = cache.get(str).cloned().unwrap_or_else(|| {
let s = leak(str.to_string());
cache.insert(s);
s

View File

@ -15,7 +15,7 @@ use core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary};
use core::{Edition, Feature, Features, WorkspaceConfig};
use util::errors::*;
use util::toml::TomlManifest;
use util::{Config, Filesystem, short_hash};
use util::{short_hash, Config, Filesystem};
pub enum EitherManifest {
Real(Manifest),
@ -254,11 +254,7 @@ impl fmt::Debug for TargetSourcePath {
impl From<PathBuf> for TargetSourcePath {
fn from(path: PathBuf) -> Self {
assert!(
path.is_absolute(),
"`{}` is not absolute",
path.display()
);
assert!(path.is_absolute(), "`{}` is not absolute", path.display());
TargetSourcePath::Path(path)
}
}
@ -290,7 +286,8 @@ impl ser::Serialize for Target {
.required_features
.as_ref()
.map(|rf| rf.iter().map(|s| &**s).collect()),
}.serialize(s)
}
.serialize(s)
}
}
@ -468,7 +465,7 @@ impl Manifest {
self.summary = summary;
}
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Manifest {
pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Manifest {
Manifest {
summary: self.summary.map_source(to_replace, replace_with),
..self
@ -490,11 +487,7 @@ impl Manifest {
if self.default_run.is_some() {
self.features
.require(Feature::default_run())
.chain_err(|| {
format_err!(
"the `default-run` manifest key is unstable"
)
})?;
.chain_err(|| format_err!("the `default-run` manifest key is unstable"))?;
}
Ok(())
@ -627,11 +620,7 @@ impl Target {
}
/// Builds a `Target` corresponding to the `build = "build.rs"` entry.
pub fn custom_build_target(
name: &str,
src_path: PathBuf,
edition: Edition,
) -> Target {
pub fn custom_build_target(name: &str, src_path: PathBuf, edition: Edition) -> Target {
Target {
kind: TargetKind::CustomBuild,
name: name.to_string(),
@ -740,7 +729,9 @@ impl Target {
pub fn for_host(&self) -> bool {
self.for_host
}
pub fn edition(&self) -> Edition { self.edition }
pub fn edition(&self) -> Edition {
self.edition
}
pub fn benched(&self) -> bool {
self.benched
}
@ -839,7 +830,8 @@ impl Target {
pub fn can_lto(&self) -> bool {
match self.kind {
TargetKind::Lib(ref v) => {
!v.contains(&LibKind::Rlib) && !v.contains(&LibKind::Dylib)
!v.contains(&LibKind::Rlib)
&& !v.contains(&LibKind::Dylib)
&& !v.contains(&LibKind::Lib)
}
_ => true,

View File

@ -1,16 +1,16 @@
use std::cell::{Ref, RefCell, Cell};
use std::cell::{Cell, Ref, RefCell};
use std::cmp::Ordering;
use std::collections::{HashMap, HashSet};
use std::fmt;
use std::hash;
use std::mem;
use std::path::{Path, PathBuf};
use std::time::{Instant, Duration};
use std::time::{Duration, Instant};
use bytesize::ByteSize;
use curl::easy::{Easy, HttpVersion};
use curl::multi::{Multi, EasyHandle};
use curl;
use curl::easy::{Easy, HttpVersion};
use curl::multi::{EasyHandle, Multi};
use curl_sys;
use failure::ResultExt;
use lazycell::LazyCell;
@ -18,14 +18,14 @@ use semver::Version;
use serde::ser;
use toml;
use core::interning::InternedString;
use core::source::MaybePackage;
use core::{Dependency, Manifest, PackageId, SourceId, Target};
use core::{FeatureMap, SourceMap, Summary};
use core::source::MaybePackage;
use core::interning::InternedString;
use ops;
use util::{self, internal, lev_distance, Config, Progress, ProgressStyle};
use util::errors::{CargoResult, CargoResultExt, HttpNot200};
use util::network::Retry;
use util::{self, internal, lev_distance, Config, Progress, ProgressStyle};
/// Information about a package that is available somewhere in the file system.
///
@ -60,7 +60,7 @@ struct SerializedPackage<'a> {
license: Option<&'a str>,
license_file: Option<&'a str>,
description: Option<&'a str>,
source: &'a SourceId,
source: SourceId,
dependencies: &'a [Dependency],
targets: Vec<&'a Target>,
features: &'a FeatureMap,
@ -122,7 +122,8 @@ impl ser::Serialize for Package {
repository,
edition: &self.manifest.edition().to_string(),
metabuild: self.manifest.metabuild(),
}.serialize(s)
}
.serialize(s)
}
}
@ -200,7 +201,7 @@ impl Package {
matches.min_by_key(|t| t.0).map(|t| t.1)
}
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Package {
pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Package {
Package {
manifest: self.manifest.map_source(to_replace, replace_with),
manifest_path: self.manifest_path,
@ -340,9 +341,11 @@ impl<'cfg> PackageSet<'cfg> {
// that it's buggy, and we've empirically seen that it's buggy with HTTP
// proxies.
let mut multi = Multi::new();
let multiplexing = config.get::<Option<bool>>("http.multiplexing")?
let multiplexing = config
.get::<Option<bool>>("http.multiplexing")?
.unwrap_or(true);
multi.pipelining(false, multiplexing)
multi
.pipelining(false, multiplexing)
.chain_err(|| "failed to enable multiplexing/pipelining in curl")?;
// let's not flood crates.io with connections
@ -395,9 +398,10 @@ impl<'cfg> PackageSet<'cfg> {
Ok(self.get_many(Some(id))?.remove(0))
}
pub fn get_many<'a>(&self, ids: impl IntoIterator<Item = &'a PackageId>)
-> CargoResult<Vec<&Package>>
{
pub fn get_many<'a>(
&self,
ids: impl IntoIterator<Item = &'a PackageId>,
) -> CargoResult<Vec<&Package>> {
let mut pkgs = Vec::new();
let mut downloads = self.enable_download()?;
for id in ids {
@ -424,7 +428,9 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
pub fn start(&mut self, id: &PackageId) -> CargoResult<Option<&'a Package>> {
// First up see if we've already cached this package, in which case
// there's nothing to do.
let slot = self.set.packages
let slot = self
.set
.packages
.get(id)
.ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?;
if let Some(pkg) = slot.borrow() {
@ -445,7 +451,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
MaybePackage::Ready(pkg) => {
debug!("{} doesn't need a download", id);
assert!(slot.fill(pkg).is_ok());
return Ok(Some(slot.borrow().unwrap()))
return Ok(Some(slot.borrow().unwrap()));
}
MaybePackage::Download { url, descriptor } => (url, descriptor),
};
@ -483,9 +489,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
warn!("ignoring HTTP/2 activation error: {}", e)
}
} else {
result.with_context(|_| {
"failed to enable HTTP2, is curl not built right?"
})?;
result.with_context(|_| "failed to enable HTTP2, is curl not built right?")?;
}
} else {
handle.http_version(HttpVersion::V11)?;
@ -504,7 +508,9 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
debug!("{} - {} bytes of data", token, buf.len());
tls::with(|downloads| {
if let Some(downloads) = downloads {
downloads.pending[&token].0.data
downloads.pending[&token]
.0
.data
.borrow_mut()
.extend_from_slice(buf);
}
@ -514,22 +520,23 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
handle.progress(true)?;
handle.progress_function(move |dl_total, dl_cur, _, _| {
tls::with(|downloads| {
match downloads {
tls::with(|downloads| match downloads {
Some(d) => d.progress(token, dl_total as u64, dl_cur as u64),
None => false,
}
})
})?;
// If the progress bar isn't enabled then it may be awhile before the
// first crate finishes downloading so we inform immediately that we're
// downloading crates here.
if self.downloads_finished == 0 &&
self.pending.len() == 0 &&
!self.progress.borrow().as_ref().unwrap().is_enabled()
if self.downloads_finished == 0
&& self.pending.len() == 0
&& !self.progress.borrow().as_ref().unwrap().is_enabled()
{
self.set.config.shell().status("Downloading", "crates ...")?;
self.set
.config
.shell()
.status("Downloading", "crates ...")?;
}
let dl = Download {
@ -569,7 +576,9 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
let (token, result) = self.wait_for_curl()?;
debug!("{} finished with {:?}", token, result);
let (mut dl, handle) = self.pending.remove(&token)
let (mut dl, handle) = self
.pending
.remove(&token)
.expect("got a token for a non-in-progress transfer");
let data = mem::replace(&mut *dl.data.borrow_mut(), Vec::new());
let mut handle = self.set.multi.remove(handle)?;
@ -581,7 +590,8 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
let ret = {
let timed_out = &dl.timed_out;
let url = &dl.url;
dl.retry.try(|| {
dl.retry
.try(|| {
if let Err(e) = result {
// If this error is "aborted by callback" then that's
// probably because our progress callback aborted due to
@ -591,7 +601,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
// it's flagged as spurious) and then attach our extra
// information to the error.
if !e.is_aborted_by_callback() {
return Err(e.into())
return Err(e.into());
}
return Err(match timed_out.replace(None) {
@ -602,7 +612,8 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
err
}
None => e,
}.into())
}
.into());
}
let code = handle.response_code()?;
@ -611,12 +622,12 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
return Err(HttpNot200 {
code,
url: url.to_string(),
}.into())
}
.into());
}
Ok(())
}).chain_err(|| {
format!("failed to download from `{}`", dl.url)
})?
})
.chain_err(|| format!("failed to download from `{}`", dl.url))?
};
match ret {
Some(()) => break (dl, data),
@ -631,7 +642,10 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
// semblance of progress of how we're downloading crates, and if the
// progress bar is enabled this provides a good log of what's happening.
self.progress.borrow_mut().as_mut().unwrap().clear();
self.set.config.shell().status("Downloaded", &dl.descriptor)?;
self.set
.config
.shell()
.status("Downloaded", &dl.descriptor)?;
self.downloads_finished += 1;
self.downloaded_bytes += dl.total.get();
@ -665,7 +679,8 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
// extracted tarball.
let finish_dur = start.elapsed();
self.updated_at.set(self.updated_at.get() + finish_dur);
self.next_speed_check.set(self.next_speed_check.get() + finish_dur);
self.next_speed_check
.set(self.next_speed_check.get() + finish_dur);
let slot = &self.set.packages[&dl.id];
assert!(slot.fill(pkg).is_ok());
@ -678,7 +693,8 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
handle.set_token(dl.token)?;
self.updated_at.set(now);
self.next_speed_check.set(now + self.timeout.dur);
self.next_speed_check_bytes_threshold.set(self.timeout.low_speed_limit as u64);
self.next_speed_check_bytes_threshold
.set(self.timeout.low_speed_limit as u64);
dl.timed_out.set(None);
dl.current.set(0);
dl.total.set(0);
@ -704,7 +720,9 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
// `wait` method on `multi`.
loop {
let n = tls::set(self, || {
self.set.multi.perform()
self.set
.multi
.perform()
.chain_err(|| "failed to perform http requests")
})?;
debug!("handles remaining: {}", n);
@ -721,12 +739,13 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
});
if let Some(pair) = results.pop() {
break Ok(pair)
break Ok(pair);
}
assert!(self.pending.len() > 0);
let timeout = self.set.multi.get_timeout()?
.unwrap_or(Duration::new(5, 0));
self.set.multi.wait(&mut [], timeout)
let timeout = self.set.multi.get_timeout()?.unwrap_or(Duration::new(5, 0));
self.set
.multi
.wait(&mut [], timeout)
.chain_err(|| "failed to wait on curl `Multi`")?;
}
}
@ -744,25 +763,26 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
if delta >= threshold {
self.next_speed_check.set(now + self.timeout.dur);
self.next_speed_check_bytes_threshold.set(
self.timeout.low_speed_limit as u64,
);
self.next_speed_check_bytes_threshold
.set(self.timeout.low_speed_limit as u64);
} else {
self.next_speed_check_bytes_threshold.set(threshold - delta);
}
}
if !self.tick(WhyTick::DownloadUpdate).is_ok() {
return false
return false;
}
// If we've spent too long not actually receiving any data we time out.
if now - self.updated_at.get() > self.timeout.dur {
self.updated_at.set(now);
let msg = format!("failed to download any data for `{}` within {}s",
let msg = format!(
"failed to download any data for `{}` within {}s",
dl.id,
self.timeout.dur.as_secs());
self.timeout.dur.as_secs()
);
dl.timed_out.set(Some(msg));
return false
return false;
}
// If we reached the point in time that we need to check our speed
@ -772,13 +792,15 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
if now >= self.next_speed_check.get() {
self.next_speed_check.set(now + self.timeout.dur);
assert!(self.next_speed_check_bytes_threshold.get() > 0);
let msg = format!("download of `{}` failed to transfer more \
let msg = format!(
"download of `{}` failed to transfer more \
than {} bytes in {}s",
dl.id,
self.timeout.low_speed_limit,
self.timeout.dur.as_secs());
self.timeout.dur.as_secs()
);
dl.timed_out.set(Some(msg));
return false
return false;
}
true
@ -790,7 +812,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
if let WhyTick::DownloadUpdate = why {
if !progress.update_allowed() {
return Ok(())
return Ok(());
}
}
let mut msg = format!("{} crates", self.pending.len());
@ -833,20 +855,22 @@ impl<'a, 'cfg> Drop for Downloads<'a, 'cfg> {
// Don't print a download summary if we're not using a progress bar,
// we've already printed lots of `Downloading...` items.
if !progress.is_enabled() {
return
return;
}
// If we didn't download anything, no need for a summary
if self.downloads_finished == 0 {
return
return;
}
// If an error happened, let's not clutter up the output
if !self.success {
return
return;
}
let mut status = format!("{} crates ({}) in {}",
let mut status = format!(
"{} crates ({}) in {}",
self.downloads_finished,
ByteSize(self.downloaded_bytes),
util::elapsed(self.start.elapsed()));
util::elapsed(self.start.elapsed())
);
if self.largest.0 > ByteSize::mb(1).0 {
status.push_str(&format!(
" (largest was `{}` at {})",
@ -872,9 +896,7 @@ mod tls {
if ptr == 0 {
f(None)
} else {
unsafe {
f(Some(&*(ptr as *const Downloads)))
}
unsafe { f(Some(&*(ptr as *const Downloads))) }
}
}

View File

@ -1,7 +1,7 @@
use std::cmp::Ordering;
use std::fmt::{self, Formatter};
use std::hash::Hash;
use std::hash;
use std::hash::Hash;
use std::path::Path;
use std::sync::Arc;
@ -9,9 +9,9 @@ use semver;
use serde::de;
use serde::ser;
use util::{CargoResult, ToSemver};
use core::source::SourceId;
use core::interning::InternedString;
use core::source::SourceId;
use util::{CargoResult, ToSemver};
/// Identifier for a specific version of a package in a specific source.
#[derive(Clone)]
@ -100,13 +100,13 @@ impl Ord for PackageId {
}
impl PackageId {
pub fn new<T: ToSemver>(name: &str, version: T, sid: &SourceId) -> CargoResult<PackageId> {
pub fn new<T: ToSemver>(name: &str, version: T, sid: SourceId) -> CargoResult<PackageId> {
let v = version.to_semver()?;
Ok(PackageId {
inner: Arc::new(PackageIdInner {
name: InternedString::new(name),
version: v,
source_id: sid.clone(),
source_id: sid,
}),
})
}
@ -117,8 +117,8 @@ impl PackageId {
pub fn version(&self) -> &semver::Version {
&self.inner.version
}
pub fn source_id(&self) -> &SourceId {
&self.inner.source_id
pub fn source_id(&self) -> SourceId {
self.inner.source_id
}
pub fn with_precise(&self, precise: Option<String>) -> PackageId {
@ -131,12 +131,12 @@ impl PackageId {
}
}
pub fn with_source_id(&self, source: &SourceId) -> PackageId {
pub fn with_source_id(&self, source: SourceId) -> PackageId {
PackageId {
inner: Arc::new(PackageIdInner {
name: self.inner.name,
version: self.inner.version.clone(),
source_id: source.clone(),
source_id: source,
}),
}
}
@ -190,9 +190,9 @@ mod tests {
let loc = CRATES_IO_INDEX.to_url().unwrap();
let repo = SourceId::for_registry(&loc).unwrap();
assert!(PackageId::new("foo", "1.0", &repo).is_err());
assert!(PackageId::new("foo", "1", &repo).is_err());
assert!(PackageId::new("foo", "bar", &repo).is_err());
assert!(PackageId::new("foo", "", &repo).is_err());
assert!(PackageId::new("foo", "1.0", repo).is_err());
assert!(PackageId::new("foo", "1", repo).is_err());
assert!(PackageId::new("foo", "bar", repo).is_err());
assert!(PackageId::new("foo", "", repo).is_err());
}
}

View File

@ -6,8 +6,8 @@ use serde::{de, ser};
use url::Url;
use core::PackageId;
use util::{ToSemver, ToUrl};
use util::errors::{CargoResult, CargoResultExt};
use util::{ToSemver, ToUrl};
/// Some or all of the data required to identify a package:
///
@ -104,7 +104,8 @@ impl PackageIdSpec {
let frag = url.fragment().map(|s| s.to_owned());
url.set_fragment(None);
let (name, version) = {
let mut path = url.path_segments()
let mut path = url
.path_segments()
.ok_or_else(|| format_err!("pkgid urls must have a path: {}", url))?;
let path_name = path.next_back().ok_or_else(|| {
format_err!(
@ -275,10 +276,10 @@ impl<'de> de::Deserialize<'de> for PackageIdSpec {
#[cfg(test)]
mod tests {
use core::{PackageId, SourceId};
use super::PackageIdSpec;
use url::Url;
use core::{PackageId, SourceId};
use semver::Version;
use url::Url;
#[test]
fn good_parsing() {
@ -367,8 +368,8 @@ mod tests {
fn matching() {
let url = Url::parse("http://example.com").unwrap();
let sid = SourceId::for_registry(&url).unwrap();
let foo = PackageId::new("foo", "1.2.3", &sid).unwrap();
let bar = PackageId::new("bar", "1.2.3", &sid).unwrap();
let foo = PackageId::new("foo", "1.2.3", sid).unwrap();
let bar = PackageId::new("bar", "1.2.3", sid).unwrap();
assert!(PackageIdSpec::parse("foo").unwrap().matches(&foo));
assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar));

View File

@ -205,11 +205,13 @@ impl ProfileMaker {
.keys()
.filter_map(|key| match *key {
ProfilePackageSpec::All => None,
ProfilePackageSpec::Spec(ref spec) => if spec.matches(pkg_id) {
ProfilePackageSpec::Spec(ref spec) => {
if spec.matches(pkg_id) {
Some(spec)
} else {
None
},
}
}
})
.collect();
match matches.len() {
@ -313,11 +315,13 @@ fn merge_toml(
.iter()
.filter_map(|(key, spec_profile)| match *key {
ProfilePackageSpec::All => None,
ProfilePackageSpec::Spec(ref s) => if s.matches(pkg_id) {
ProfilePackageSpec::Spec(ref s) => {
if s.matches(pkg_id) {
Some(spec_profile)
} else {
None
},
}
}
});
if let Some(spec_profile) = matches.next() {
merge_profile(profile, spec_profile);
@ -586,7 +590,7 @@ impl UnitFor {
pub fn with_for_host(self, for_host: bool) -> UnitFor {
UnitFor {
custom_build: self.custom_build,
panic_ok: self.panic_ok && !for_host
panic_ok: self.panic_ok && !for_host,
}
}
@ -597,16 +601,25 @@ impl UnitFor {
}
/// Returns true if this unit is allowed to set the `panic` compiler flag.
pub fn is_panic_ok(&self) -> bool {
pub fn is_panic_ok(self) -> bool {
self.panic_ok
}
/// All possible values, used by `clean`.
pub fn all_values() -> &'static [UnitFor] {
static ALL: [UnitFor; 3] = [
UnitFor { custom_build: false, panic_ok: true },
UnitFor { custom_build: true, panic_ok: false },
UnitFor { custom_build: false, panic_ok: false },
UnitFor {
custom_build: false,
panic_ok: true,
},
UnitFor {
custom_build: true,
panic_ok: false,
},
UnitFor {
custom_build: false,
panic_ok: false,
},
];
&ALL
}

View File

@ -3,11 +3,11 @@ use std::collections::HashMap;
use semver::VersionReq;
use url::Url;
use core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary};
use core::PackageSet;
use util::{profile, Config};
use util::errors::{CargoResult, CargoResultExt};
use core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary};
use sources::config::SourceConfigMap;
use util::errors::{CargoResult, CargoResultExt};
use util::{profile, Config};
/// Source of information about a group of packages.
///
@ -22,8 +22,8 @@ pub trait Registry {
Ok(ret)
}
fn describe_source(&self, source: &SourceId) -> String;
fn is_replaced(&self, source: &SourceId) -> bool;
fn describe_source(&self, source: SourceId) -> String;
fn is_replaced(&self, source: SourceId) -> bool;
}
/// This structure represents a registry of known packages. It internally
@ -102,8 +102,8 @@ impl<'cfg> PackageRegistry<'cfg> {
PackageSet::new(package_ids, self.sources, self.config)
}
fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> {
match self.source_ids.get(namespace) {
fn ensure_loaded(&mut self, namespace: SourceId, kind: Kind) -> CargoResult<()> {
match self.source_ids.get(&namespace) {
// We've previously loaded this source, and we've already locked it,
// so we're not allowed to change it even if `namespace` has a
// slightly different precise version listed.
@ -138,8 +138,8 @@ impl<'cfg> PackageRegistry<'cfg> {
Ok(())
}
pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> {
for id in ids.iter() {
pub fn add_sources(&mut self, ids: impl IntoIterator<Item = SourceId>) -> CargoResult<()> {
for id in ids {
self.ensure_loaded(id, Kind::Locked)?;
}
Ok(())
@ -150,13 +150,13 @@ impl<'cfg> PackageRegistry<'cfg> {
}
fn add_source(&mut self, source: Box<Source + 'cfg>, kind: Kind) {
let id = source.source_id().clone();
let id = source.source_id();
self.sources.insert(source);
self.source_ids.insert(id.clone(), (id, kind));
self.source_ids.insert(id, (id, kind));
}
pub fn add_override(&mut self, source: Box<Source + 'cfg>) {
self.overrides.push(source.source_id().clone());
self.overrides.push(source.source_id());
self.add_source(source, Kind::Override);
}
@ -165,8 +165,9 @@ impl<'cfg> PackageRegistry<'cfg> {
for dep in deps.iter() {
trace!("\t-> {}", dep);
}
let sub_map = self.locked
.entry(id.source_id().clone())
let sub_map = self
.locked
.entry(id.source_id())
.or_insert_with(HashMap::new);
let sub_vec = sub_map
.entry(id.name().to_string())
@ -200,9 +201,14 @@ impl<'cfg> PackageRegistry<'cfg> {
// Remember that each dependency listed in `[patch]` has to resolve to
// precisely one package, so that's why we're just creating a flat list
// of summaries which should be the same length as `deps` above.
let unlocked_summaries = deps.iter()
let unlocked_summaries = deps
.iter()
.map(|dep| {
debug!("registring a patch for `{}` with `{}`", url, dep.package_name());
debug!(
"registring a patch for `{}` with `{}`",
url,
dep.package_name()
);
// Go straight to the source for resolving `dep`. Load it as we
// normally would and then ask it directly for the list of summaries
@ -216,7 +222,8 @@ impl<'cfg> PackageRegistry<'cfg> {
)
})?;
let mut summaries = self.sources
let mut summaries = self
.sources
.get_mut(dep.source_id())
.expect("loaded source not present")
.query_vec(dep)?
@ -289,14 +296,14 @@ impl<'cfg> PackageRegistry<'cfg> {
&self.patches
}
fn load(&mut self, source_id: &SourceId, kind: Kind) -> CargoResult<()> {
fn load(&mut self, source_id: SourceId, kind: Kind) -> CargoResult<()> {
(|| {
debug!("loading source {}", source_id);
let source = self.source_config.load(source_id)?;
assert_eq!(source.source_id(), source_id);
if kind == Kind::Override {
self.overrides.push(source_id.clone());
self.overrides.push(source_id);
}
self.add_source(source, kind);
@ -309,7 +316,7 @@ impl<'cfg> PackageRegistry<'cfg> {
}
fn query_overrides(&mut self, dep: &Dependency) -> CargoResult<Option<Summary>> {
for s in self.overrides.iter() {
for &s in self.overrides.iter() {
let src = self.sources.get_mut(s).unwrap();
let dep = Dependency::new_override(&*dep.package_name(), s);
let mut results = src.query_vec(&dep)?;
@ -532,14 +539,14 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
Ok(())
}
fn describe_source(&self, id: &SourceId) -> String {
fn describe_source(&self, id: SourceId) -> String {
match self.sources.get(id) {
Some(src) => src.describe(),
None => id.to_string(),
}
}
fn is_replaced(&self, id: &SourceId) -> bool {
fn is_replaced(&self, id: SourceId) -> bool {
match self.sources.get(id) {
Some(src) => src.is_replaced(),
None => false,
@ -549,7 +556,7 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Summary) -> Summary {
let pair = locked
.get(summary.source_id())
.get(&summary.source_id())
.and_then(|map| map.get(&*summary.name()))
.and_then(|vec| vec.iter().find(|&&(ref id, _)| id == summary.package_id()));
@ -561,7 +568,12 @@ fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Sum
None => summary,
};
summary.map_dependencies(|dep| {
trace!("\t{}/{}/{}", dep.package_name(), dep.version_req(), dep.source_id());
trace!(
"\t{}/{}/{}",
dep.package_name(),
dep.version_req(),
dep.source_id()
);
// If we've got a known set of overrides for this summary, then
// one of a few cases can arise:
@ -596,7 +608,7 @@ fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Sum
// all known locked packages to see if they match this dependency.
// If anything does then we lock it to that and move on.
let v = locked
.get(dep.source_id())
.get(&dep.source_id())
.and_then(|map| map.get(&*dep.package_name()))
.and_then(|vec| vec.iter().find(|&&(ref id, _)| dep.matches_id(id)));
if let Some(&(ref id, _)) = v {
@ -610,16 +622,14 @@ fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Sum
// this dependency.
let v = patches.get(dep.source_id().url()).map(|vec| {
let dep2 = dep.clone();
let mut iter = vec.iter().filter(move |p| {
dep2.matches_ignoring_source(p)
});
let mut iter = vec.iter().filter(move |p| dep2.matches_ignoring_source(p));
(iter.next(), iter)
});
if let Some((Some(patch_id), mut remaining)) = v {
assert!(remaining.next().is_none());
let patch_source = patch_id.source_id();
let patch_locked = locked
.get(patch_source)
.get(&patch_source)
.and_then(|m| m.get(&*patch_id.name()))
.map(|list| list.iter().any(|&(ref id, _)| id == patch_id))
.unwrap_or(false);

View File

@ -3,9 +3,9 @@ use std::rc::Rc;
use core::interning::InternedString;
use core::{Dependency, FeatureValue, PackageId, SourceId, Summary};
use im_rc;
use util::CargoResult;
use util::Graph;
use im_rc;
use super::errors::ActivateResult;
use super::types::{ConflictReason, DepInfo, GraphNode, Method, RcList, RegistryQueryer};
@ -55,7 +55,7 @@ impl Context {
let id = summary.package_id();
let prev = self
.activations
.entry((id.name(), id.source_id().clone()))
.entry((id.name(), id.source_id()))
.or_insert_with(|| Rc::new(Vec::new()));
if !prev.iter().any(|c| c == summary) {
self.resolve_graph.push(GraphNode::Add(id.clone()));
@ -126,14 +126,14 @@ impl Context {
pub fn prev_active(&self, dep: &Dependency) -> &[Summary] {
self.activations
.get(&(dep.package_name(), dep.source_id().clone()))
.get(&(dep.package_name(), dep.source_id()))
.map(|v| &v[..])
.unwrap_or(&[])
}
pub fn is_active(&self, id: &PackageId) -> bool {
self.activations
.get(&(id.name(), id.source_id().clone()))
.get(&(id.name(), id.source_id()))
.map(|v| v.iter().any(|s| s.package_id() == id))
.unwrap_or(false)
}

View File

@ -50,7 +50,7 @@ impl EncodableResolve {
let enc_id = EncodablePackageId {
name: pkg.name.clone(),
version: pkg.version.clone(),
source: pkg.source.clone(),
source: pkg.source,
};
if !all_pkgs.insert(enc_id.clone()) {
@ -63,7 +63,7 @@ impl EncodableResolve {
debug!("path dependency now missing {} v{}", pkg.name, pkg.version);
continue;
}
Some(source) => PackageId::new(&pkg.name, &pkg.version, source)?,
Some(&source) => PackageId::new(&pkg.name, &pkg.version, source)?,
};
assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none())
@ -156,7 +156,7 @@ impl EncodableResolve {
let mut unused_patches = Vec::new();
for pkg in self.patch.unused {
let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) {
Some(src) => PackageId::new(&pkg.name, &pkg.version, src)?,
Some(&src) => PackageId::new(&pkg.name, &pkg.version, src)?,
None => continue,
};
unused_patches.push(id);
@ -188,9 +188,9 @@ fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
for member in members.iter() {
ret.insert(
member.package_id().name().to_string(),
member.package_id().source_id().clone(),
member.package_id().source_id(),
);
visited.insert(member.package_id().source_id().clone());
visited.insert(member.package_id().source_id());
}
for member in members.iter() {
build_pkg(member, ws, &mut ret, &mut visited);
@ -224,7 +224,7 @@ fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
visited: &mut HashSet<SourceId>,
) {
let id = dep.source_id();
if visited.contains(id) || !id.is_path() {
if visited.contains(&id) || !id.is_path() {
return;
}
let path = match id.url().to_file_path() {
@ -235,8 +235,8 @@ fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
Ok(p) => p,
Err(_) => return,
};
ret.insert(pkg.name().to_string(), pkg.package_id().source_id().clone());
visited.insert(pkg.package_id().source_id().clone());
ret.insert(pkg.name().to_string(), pkg.package_id().source_id());
visited.insert(pkg.package_id().source_id());
build_pkg(&pkg, ws, ret, visited);
}
}
@ -412,10 +412,10 @@ pub fn encodable_package_id(id: &PackageId) -> EncodablePackageId {
}
}
fn encode_source(id: &SourceId) -> Option<SourceId> {
fn encode_source(id: SourceId) -> Option<SourceId> {
if id.is_path() {
None
} else {
Some(id.clone())
Some(id)
}
}

View File

@ -12,10 +12,10 @@ pub use self::source_id::{GitReference, SourceId};
/// versions.
pub trait Source {
/// Returns the `SourceId` corresponding to this source
fn source_id(&self) -> &SourceId;
fn source_id(&self) -> SourceId;
/// Returns the replaced `SourceId` corresponding to this source
fn replaced_source_id(&self) -> &SourceId {
fn replaced_source_id(&self) -> SourceId {
self.source_id()
}
@ -92,12 +92,12 @@ pub enum MaybePackage {
impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
/// Forwards to `Source::source_id`
fn source_id(&self) -> &SourceId {
fn source_id(&self) -> SourceId {
(**self).source_id()
}
/// Forwards to `Source::replaced_source_id`
fn replaced_source_id(&self) -> &SourceId {
fn replaced_source_id(&self) -> SourceId {
(**self).replaced_source_id()
}
@ -155,11 +155,11 @@ impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
}
impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T {
fn source_id(&self) -> &SourceId {
fn source_id(&self) -> SourceId {
(**self).source_id()
}
fn replaced_source_id(&self) -> &SourceId {
fn replaced_source_id(&self) -> SourceId {
(**self).replaced_source_id()
}
@ -231,13 +231,13 @@ impl<'src> SourceMap<'src> {
}
/// Like `HashMap::contains_key`
pub fn contains(&self, id: &SourceId) -> bool {
self.map.contains_key(id)
pub fn contains(&self, id: SourceId) -> bool {
self.map.contains_key(&id)
}
/// Like `HashMap::get`
pub fn get(&self, id: &SourceId) -> Option<&(Source + 'src)> {
let source = self.map.get(id);
pub fn get(&self, id: SourceId) -> Option<&(Source + 'src)> {
let source = self.map.get(&id);
source.map(|s| {
let s: &(Source + 'src) = &**s;
@ -246,8 +246,8 @@ impl<'src> SourceMap<'src> {
}
/// Like `HashMap::get_mut`
pub fn get_mut(&mut self, id: &SourceId) -> Option<&mut (Source + 'src)> {
self.map.get_mut(id).map(|s| {
pub fn get_mut(&mut self, id: SourceId) -> Option<&mut (Source + 'src)> {
self.map.get_mut(&id).map(|s| {
let s: &mut (Source + 'src) = &mut **s;
s
})
@ -261,7 +261,7 @@ impl<'src> SourceMap<'src> {
/// Like `HashMap::insert`, but derives the SourceId key from the Source
pub fn insert(&mut self, source: Box<Source + 'src>) {
let id = source.source_id().clone();
let id = source.source_id();
self.map.insert(id, source);
}

View File

@ -4,18 +4,18 @@ use std::fmt::{self, Formatter};
use std::hash::{self, Hash};
use std::path::Path;
use std::ptr;
use std::sync::Mutex;
use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT};
use std::sync::atomic::Ordering::SeqCst;
use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT};
use std::sync::Mutex;
use serde::ser;
use serde::de;
use serde::ser;
use url::Url;
use ops;
use sources::git;
use sources::{GitSource, PathSource, RegistrySource, CRATES_IO_INDEX};
use sources::DirectorySource;
use sources::{GitSource, PathSource, RegistrySource, CRATES_IO_INDEX};
use util::{CargoResult, Config, ToUrl};
lazy_static! {
@ -74,21 +74,19 @@ impl SourceId {
///
/// The canonical url will be calculated, but the precise field will not
fn new(kind: Kind, url: Url) -> CargoResult<SourceId> {
let source_id = SourceId::wrap(
SourceIdInner {
let source_id = SourceId::wrap(SourceIdInner {
kind,
canonical_url: git::canonicalize_url(&url)?,
url,
precise: None,
name: None,
}
);
});
Ok(source_id)
}
fn wrap(inner: SourceIdInner) -> SourceId {
let mut cache = SOURCE_ID_CACHE.lock().unwrap();
let inner = cache.get(&inner).map(|&x| x).unwrap_or_else(|| {
let inner = cache.get(&inner).cloned().unwrap_or_else(|| {
let inner = Box::leak(Box::new(inner));
cache.insert(inner);
inner
@ -209,15 +207,13 @@ impl SourceId {
pub fn alt_registry(config: &Config, key: &str) -> CargoResult<SourceId> {
let url = config.get_registry_index(key)?;
Ok(SourceId::wrap(
SourceIdInner {
Ok(SourceId::wrap(SourceIdInner {
kind: Kind::Registry,
canonical_url: git::canonicalize_url(&url)?,
url,
precise: None,
name: Some(key.to_string()),
}
))
}))
}
/// Get this source URL
@ -225,7 +221,7 @@ impl SourceId {
&self.inner.url
}
pub fn display_registry(&self) -> String {
pub fn display_registry(self) -> String {
if self.is_default_registry() {
"crates.io index".to_string()
} else {
@ -234,12 +230,12 @@ impl SourceId {
}
/// Is this source from a filesystem path
pub fn is_path(&self) -> bool {
pub fn is_path(self) -> bool {
self.inner.kind == Kind::Path
}
/// Is this source from a registry (either local or not)
pub fn is_registry(&self) -> bool {
pub fn is_registry(self) -> bool {
match self.inner.kind {
Kind::Registry | Kind::LocalRegistry => true,
_ => false,
@ -247,12 +243,12 @@ impl SourceId {
}
/// Is this source from an alternative registry
pub fn is_alt_registry(&self) -> bool {
pub fn is_alt_registry(self) -> bool {
self.is_registry() && self.inner.name.is_some()
}
/// Is this source from a git repository
pub fn is_git(&self) -> bool {
pub fn is_git(self) -> bool {
match self.inner.kind {
Kind::Git(_) => true,
_ => false,
@ -260,7 +256,7 @@ impl SourceId {
}
/// Creates an implementation of `Source` corresponding to this ID.
pub fn load<'a>(&self, config: &'a Config) -> CargoResult<Box<super::Source + 'a>> {
pub fn load<'a>(self, config: &'a Config) -> CargoResult<Box<super::Source + 'a>> {
trace!("loading SourceId; {}", self);
match self.inner.kind {
Kind::Git(..) => Ok(Box::new(GitSource::new(self, config)?)),
@ -290,12 +286,12 @@ impl SourceId {
}
/// Get the value of the precise field
pub fn precise(&self) -> Option<&str> {
pub fn precise(self) -> Option<&'static str> {
self.inner.precise.as_ref().map(|s| &s[..])
}
/// Get the git reference if this is a git source, otherwise None.
pub fn git_reference(&self) -> Option<&GitReference> {
pub fn git_reference(self) -> Option<&'static GitReference> {
match self.inner.kind {
Kind::Git(ref s) => Some(s),
_ => None,
@ -303,17 +299,15 @@ impl SourceId {
}
/// Create a new SourceId from this source with the given `precise`
pub fn with_precise(&self, v: Option<String>) -> SourceId {
SourceId::wrap(
SourceIdInner {
pub fn with_precise(self, v: Option<String>) -> SourceId {
SourceId::wrap(SourceIdInner {
precise: v,
..(*self.inner).clone()
}
)
})
}
/// Whether the remote registry is the standard https://crates.io
pub fn is_default_registry(&self) -> bool {
pub fn is_default_registry(self) -> bool {
match self.inner.kind {
Kind::Registry => {}
_ => return false,
@ -325,9 +319,10 @@ impl SourceId {
///
/// For paths, remove the workspace prefix so the same source will give the
/// same hash in different locations.
pub fn stable_hash<S: hash::Hasher>(&self, workspace: &Path, into: &mut S) {
pub fn stable_hash<S: hash::Hasher>(self, workspace: &Path, into: &mut S) {
if self.is_path() {
if let Ok(p) = self.inner
if let Ok(p) = self
.inner
.url
.to_file_path()
.unwrap()

View File

@ -39,7 +39,9 @@ impl Summary {
links: Option<impl AsRef<str>>,
namespaced_features: bool,
) -> CargoResult<Summary>
where K: Borrow<str> + Ord + Display {
where
K: Borrow<str> + Ord + Display,
{
for dep in dependencies.iter() {
let feature = dep.name_in_toml();
if !namespaced_features && features.get(&*feature).is_some() {
@ -78,7 +80,7 @@ impl Summary {
pub fn version(&self) -> &Version {
self.package_id().version()
}
pub fn source_id(&self) -> &SourceId {
pub fn source_id(&self) -> SourceId {
self.package_id().source_id()
}
pub fn dependencies(&self) -> &[Dependency] {
@ -119,7 +121,7 @@ impl Summary {
self
}
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Summary {
pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Summary {
let me = if self.package_id().source_id() == to_replace {
let new_id = self.package_id().with_source_id(replace_with);
self.override_id(new_id)
@ -143,7 +145,9 @@ fn build_feature_map<K>(
dependencies: &[Dependency],
namespaced: bool,
) -> CargoResult<FeatureMap>
where K: Borrow<str> + Ord + Display {
where
K: Borrow<str> + Ord + Display,
{
use self::FeatureValue::*;
let mut dep_map = HashMap::new();
for dep in dependencies.iter() {
@ -258,7 +262,8 @@ where K: Borrow<str> + Ord + Display {
// not recognized as a feature is pegged as a `Crate`. Here we handle the case
// where the dependency exists but is non-optional. It branches on namespaced
// just to provide the correct string for the crate dependency in the error.
(&Crate(ref dep), true, false) => if namespaced {
(&Crate(ref dep), true, false) => {
if namespaced {
bail!(
"Feature `{}` includes `crate:{}` which is not an \
optional dependency.\nConsider adding \
@ -274,12 +279,14 @@ where K: Borrow<str> + Ord + Display {
feature,
dep
)
},
}
}
// If namespaced, the value was tagged as a dependency; if not namespaced,
// this could be anything not defined as a feature. This handles the case
// where no such dependency is actually defined; again, the branch on
// namespaced here is just to provide the correct string in the error.
(&Crate(ref dep), false, _) => if namespaced {
(&Crate(ref dep), false, _) => {
if namespaced {
bail!(
"Feature `{}` includes `crate:{}` which is not a known \
dependency",
@ -293,7 +300,8 @@ where K: Borrow<str> + Ord + Display {
feature,
dep
)
},
}
}
(&Crate(_), true, true) => {}
// If the value is a feature for one of the dependencies, bail out if no such
// dependency is actually defined in the manifest.
@ -372,11 +380,13 @@ impl FeatureValue {
use self::FeatureValue::*;
match *self {
Feature(ref f) => f.to_string(),
Crate(ref c) => if s.namespaced_features() {
Crate(ref c) => {
if s.namespaced_features() {
format!("crate:{}", &c)
} else {
c.to_string()
},
}
}
CrateFeature(ref c, ref f) => [c.as_ref(), f.as_ref()].join("/"),
}
}

View File

@ -236,7 +236,8 @@ impl<'cfg> Workspace<'cfg> {
}
pub fn profiles(&self) -> &Profiles {
let root = self.root_manifest
let root = self
.root_manifest
.as_ref()
.unwrap_or(&self.current_manifest);
match *self.packages.get(root) {
@ -253,7 +254,8 @@ impl<'cfg> Workspace<'cfg> {
match self.root_manifest {
Some(ref p) => p,
None => &self.current_manifest,
}.parent()
}
.parent()
.unwrap()
}
@ -425,8 +427,8 @@ impl<'cfg> Workspace<'cfg> {
let root_package = self.packages.load(&root_manifest_path)?;
match *root_package.workspace_config() {
WorkspaceConfig::Root(ref root_config) => {
members_paths =
root_config.members_paths(root_config.members.as_ref().unwrap_or(&vec![]))?;
members_paths = root_config
.members_paths(root_config.members.as_ref().unwrap_or(&vec![]))?;
default_members_paths = if let Some(ref default) = root_config.default_members {
Some(root_config.members_paths(default)?)
} else {
@ -475,7 +477,8 @@ impl<'cfg> Workspace<'cfg> {
if self.members.contains(&manifest_path) {
return Ok(());
}
if is_path_dep && !manifest_path.parent().unwrap().starts_with(self.root())
if is_path_dep
&& !manifest_path.parent().unwrap().starts_with(self.root())
&& self.find_root(&manifest_path)? != self.root_manifest
{
// If `manifest_path` is a path dependency outside of the workspace,
@ -655,7 +658,8 @@ impl<'cfg> Workspace<'cfg> {
}
if let Some(ref root_manifest) = self.root_manifest {
for pkg in self.members()
for pkg in self
.members()
.filter(|p| p.manifest_path() != root_manifest)
{
let manifest = pkg.manifest();
@ -699,7 +703,7 @@ impl<'cfg> Workspace<'cfg> {
return Ok(p);
}
let source_id = SourceId::for_path(manifest_path.parent().unwrap())?;
let (package, _nested_paths) = ops::read_package(manifest_path, &source_id, self.config)?;
let (package, _nested_paths) = ops::read_package(manifest_path, source_id, self.config)?;
loaded.insert(manifest_path.to_path_buf(), package.clone());
Ok(package)
}
@ -745,10 +749,7 @@ impl<'cfg> Workspace<'cfg> {
for warning in warnings {
if warning.is_critical {
let err = format_err!("{}", warning.message);
let cx = format_err!(
"failed to parse manifest at `{}`",
path.display()
);
let cx = format_err!("failed to parse manifest at `{}`", path.display());
return Err(err.context(cx).into());
} else {
let msg = if self.root_manifest.is_none() {
@ -782,7 +783,7 @@ impl<'cfg> Packages<'cfg> {
Entry::Vacant(v) => {
let source_id = SourceId::for_path(key)?;
let (manifest, _nested_paths) =
read_manifest(manifest_path, &source_id, self.config)?;
read_manifest(manifest_path, source_id, self.config)?;
Ok(v.insert(match manifest {
EitherManifest::Real(manifest) => {
MaybePackage::Package(Package::new(manifest, manifest_path))
@ -843,7 +844,8 @@ impl WorkspaceRootConfig {
///
/// This method does NOT consider the `members` list.
fn is_excluded(&self, manifest_path: &Path) -> bool {
let excluded = self.exclude
let excluded = self
.exclude
.iter()
.any(|ex| manifest_path.starts_with(self.root_dir.join(ex)));
@ -886,9 +888,9 @@ impl WorkspaceRootConfig {
None => return Ok(Vec::new()),
};
let res = glob(path).chain_err(|| format_err!("could not parse pattern `{}`", &path))?;
let res = res.map(|p| {
p.chain_err(|| format_err!("unable to match path to pattern `{}`", &path))
}).collect::<Result<Vec<_>, _>>()?;
let res = res
.map(|p| p.chain_err(|| format_err!("unable to match path to pattern `{}`", &path)))
.collect::<Result<Vec<_>, _>>()?;
Ok(res)
}
}

View File

@ -73,13 +73,13 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()>
} else {
precise.to_string()
};
dep.source_id().clone().with_precise(Some(precise))
dep.source_id().with_precise(Some(precise))
}
None => dep.source_id().clone().with_precise(None),
None => dep.source_id().with_precise(None),
});
}
}
registry.add_sources(&sources)?;
registry.add_sources(sources)?;
}
let resolve = ops::resolve_with_previous(
@ -141,7 +141,7 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()>
previous_resolve: &'a Resolve,
resolve: &'a Resolve,
) -> Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> {
fn key(dep: &PackageId) -> (&str, &SourceId) {
fn key(dep: &PackageId) -> (&str, SourceId) {
(dep.name().as_str(), dep.source_id())
}

View File

@ -1,26 +1,26 @@
use std::collections::btree_map::Entry;
use std::collections::{BTreeMap, BTreeSet};
use std::{env, fs};
use std::io::prelude::*;
use std::io::SeekFrom;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::{env, fs};
use semver::{Version, VersionReq};
use tempfile::Builder as TempFileBuilder;
use toml;
use core::compiler::{DefaultExecutor, Executor};
use core::package::PackageSet;
use core::source::SourceMap;
use core::{Dependency, Edition, Package, PackageIdSpec, Source, SourceId};
use core::{PackageId, Workspace};
use core::source::SourceMap;
use core::package::PackageSet;
use core::compiler::{DefaultExecutor, Executor};
use ops::{self, CompileFilter};
use sources::{GitSource, PathSource, SourceConfigMap};
use util::{internal, Config};
use util::{FileLock, Filesystem};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
use util::{internal, Config};
use util::{FileLock, Filesystem};
#[derive(Deserialize, Serialize)]
#[serde(untagged)]
@ -59,7 +59,7 @@ impl Drop for Transaction {
pub fn install(
root: Option<&str>,
krates: Vec<&str>,
source_id: &SourceId,
source_id: SourceId,
from_cwd: bool,
vers: Option<&str>,
opts: &ops::CompileOptions,
@ -154,7 +154,7 @@ fn install_one(
root: &Filesystem,
map: &SourceConfigMap,
krate: Option<&str>,
source_id: &SourceId,
source_id: SourceId,
from_cwd: bool,
vers: Option<&str>,
opts: &ops::CompileOptions,
@ -182,7 +182,9 @@ fn install_one(
src.path().display()
)
})?;
select_pkg(src, krate, vers, config, false, &mut |path| path.read_packages())?
select_pkg(src, krate, vers, config, false, &mut |path| {
path.read_packages()
})?
} else {
select_pkg(
map.load(source_id)?,
@ -255,8 +257,7 @@ fn install_one(
}
let exec: Arc<Executor> = Arc::new(DefaultExecutor);
let compile =
ops::compile_ws(&ws, Some(source), opts, &exec).chain_err(|| {
let compile = ops::compile_ws(&ws, Some(source), opts, &exec).chain_err(|| {
if let Some(td) = td_opt.take() {
// preserve the temporary directory, so the user can inspect it
td.into_path();
@ -368,7 +369,8 @@ fn install_one(
}
// Remove empty metadata lines.
let pkgs = list.v1
let pkgs = list
.v1
.iter()
.filter_map(|(p, set)| {
if set.is_empty() {
@ -410,7 +412,7 @@ fn install_one(
Ok(())
}
fn path_source<'a>(source_id: &SourceId, config: &'a Config) -> CargoResult<PathSource<'a>> {
fn path_source<'a>(source_id: SourceId, config: &'a Config) -> CargoResult<PathSource<'a>> {
let path = source_id
.url()
.to_file_path()
@ -439,7 +441,8 @@ where
Some(v) => {
// If the version begins with character <, >, =, ^, ~ parse it as a
// version range, otherwise parse it as a specific version
let first = v.chars()
let first = v
.chars()
.nth(0)
.ok_or_else(|| format_err!("no version provided for the `--vers` flag"))?;
@ -495,16 +498,13 @@ where
} else {
vers
};
let dep = Dependency::parse_no_deprecated(
name,
vers_spec,
source.source_id(),
)?;
let dep = Dependency::parse_no_deprecated(name, vers_spec, source.source_id())?;
let deps = source.query_vec(&dep)?;
let pkgid = match deps.iter().map(|p| p.package_id()).max() {
Some(pkgid) => pkgid,
None => {
let vers_info = vers.map(|v| format!(" with version `{}`", v))
let vers_info = vers
.map(|v| format!(" with version `{}`", v))
.unwrap_or_default();
bail!(
"could not find `{}` in {}{}",
@ -624,7 +624,8 @@ fn find_duplicates(
}
};
match *filter {
CompileFilter::Default { .. } => pkg.targets()
CompileFilter::Default { .. } => pkg
.targets()
.iter()
.filter(|t| t.is_bin())
.filter_map(|t| check(t.name().to_string()))
@ -782,17 +783,14 @@ pub fn uninstall_one(
uninstall_pkgid(crate_metadata, metadata, &pkgid, bins, config)
}
fn uninstall_cwd(
root: &Filesystem,
bins: &[String],
config: &Config,
) -> CargoResult<()> {
fn uninstall_cwd(root: &Filesystem, bins: &[String], config: &Config) -> CargoResult<()> {
let crate_metadata = metadata(config, root)?;
let metadata = read_crate_list(&crate_metadata)?;
let source_id = SourceId::for_path(config.cwd())?;
let src = path_source(&source_id, config)?;
let (pkg, _source) =
select_pkg(src, None, None, config, true, &mut |path| path.read_packages())?;
let src = path_source(source_id, config)?;
let (pkg, _source) = select_pkg(src, None, None, config, true, &mut |path| {
path.read_packages()
})?;
let pkgid = pkg.package_id();
uninstall_pkgid(crate_metadata, metadata, pkgid, bins, config)
}
@ -821,7 +819,8 @@ fn uninstall_pkgid(
}
}
let bins = bins.iter()
let bins = bins
.iter()
.map(|s| {
if s.ends_with(env::consts::EXE_SUFFIX) {
s.to_string()
@ -865,7 +864,8 @@ fn metadata(config: &Config, root: &Filesystem) -> CargoResult<FileLock> {
fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult<Filesystem> {
let config_root = config.get_path("install.root")?;
Ok(flag.map(PathBuf::from)
Ok(flag
.map(PathBuf::from)
.or_else(|| env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from))
.or_else(move || config_root.map(|v| v.val))
.map(Filesystem::new)

View File

@ -10,13 +10,13 @@ use git2;
use serde_json;
use tar::{Archive, Builder, EntryType, Header};
use core::{Package, Source, SourceId, Workspace};
use core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor};
use sources::PathSource;
use util::{self, internal, Config, FileLock};
use util::paths;
use util::errors::{CargoResult, CargoResultExt};
use core::{Package, Source, SourceId, Workspace};
use ops;
use sources::PathSource;
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
use util::{self, internal, Config, FileLock};
pub struct PackageOpts<'cfg> {
pub config: &'cfg Config,
@ -60,7 +60,8 @@ pub fn package(ws: &Workspace, opts: &PackageOpts) -> CargoResult<Option<FileLoc
if opts.list {
let root = pkg.root();
let mut list: Vec<_> = src.list_files(pkg)?
let mut list: Vec<_> = src
.list_files(pkg)?
.iter()
.map(|file| util::without_prefix(file, root).unwrap().to_path_buf())
.collect();
@ -175,7 +176,7 @@ fn check_repo_state(
p: &Package,
src_files: &[PathBuf],
config: &Config,
allow_dirty: bool
allow_dirty: bool,
) -> CargoResult<Option<String>> {
if let Ok(repo) = git2::Repository::discover(p.root()) {
if let Some(workdir) = repo.workdir() {
@ -194,7 +195,8 @@ fn check_repo_state(
config.shell().verbose(|shell| {
shell.warn(format!(
"No (git) Cargo.toml found at `{}` in workdir `{}`",
path.display(), workdir.display()
path.display(),
workdir.display()
))
})?;
}
@ -212,7 +214,7 @@ fn check_repo_state(
p: &Package,
src_files: &[PathBuf],
repo: &git2::Repository,
allow_dirty: bool
allow_dirty: bool,
) -> CargoResult<Option<String>> {
let workdir = repo.workdir().unwrap();
let dirty = src_files
@ -256,12 +258,15 @@ fn check_repo_state(
fn check_vcs_file_collision(pkg: &Package, src_files: &[PathBuf]) -> CargoResult<()> {
let root = pkg.root();
let vcs_info_path = Path::new(VCS_INFO_FILE);
let collision = src_files.iter().find(|&p| {
util::without_prefix(&p, root).unwrap() == vcs_info_path
});
let collision = src_files
.iter()
.find(|&p| util::without_prefix(&p, root).unwrap() == vcs_info_path);
if collision.is_some() {
bail!("Invalid inclusion of reserved file name \
{} in package source", VCS_INFO_FILE);
bail!(
"Invalid inclusion of reserved file name \
{} in package source",
VCS_INFO_FILE
);
}
Ok(())
}
@ -271,7 +276,7 @@ fn tar(
src_files: &[PathBuf],
vcs_info: Option<&serde_json::Value>,
dst: &File,
filename: &str
filename: &str,
) -> CargoResult<()> {
// Prepare the encoder and its header
let filename = Path::new(filename);
@ -325,7 +330,8 @@ fn tar(
.chain_err(|| format!("failed to add to archive: `{}`", relative))?;
let mut file = File::open(file)
.chain_err(|| format!("failed to open for archiving: `{}`", file.display()))?;
let metadata = file.metadata()
let metadata = file
.metadata()
.chain_err(|| format!("could not learn metadata for: `{}`", relative))?;
header.set_metadata(&metadata);
@ -367,9 +373,9 @@ fn tar(
fnd
);
let mut header = Header::new_ustar();
header.set_path(&path).chain_err(|| {
format!("failed to add to archive: `{}`", fnd)
})?;
header
.set_path(&path)
.chain_err(|| format!("failed to add to archive: `{}`", fnd))?;
let json = format!("{}\n", serde_json::to_string_pretty(json)?);
let mut header = Header::new_ustar();
header.set_path(&path)?;
@ -377,9 +383,8 @@ fn tar(
header.set_mode(0o644);
header.set_size(json.len() as u64);
header.set_cksum();
ar.append(&header, json.as_bytes()).chain_err(|| {
internal(format!("could not archive source file `{}`", fnd))
})?;
ar.append(&header, json.as_bytes())
.chain_err(|| internal(format!("could not archive source file `{}`", fnd)))?;
}
if include_lockfile(pkg) {
@ -412,7 +417,8 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult
config.shell().status("Verifying", pkg)?;
let f = GzDecoder::new(tar.file());
let dst = tar.parent()
let dst = tar
.parent()
.join(&format!("{}-{}", pkg.name(), pkg.version()));
if dst.exists() {
paths::remove_dir_all(&dst)?;
@ -426,7 +432,7 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult
// Manufacture an ephemeral workspace to ensure that even if the top-level
// package has a workspace we can still build our new crate.
let id = SourceId::for_path(&dst)?;
let mut src = PathSource::new(&dst, &id, ws.config());
let mut src = PathSource::new(&dst, id, ws.config());
let new_pkg = src.root_package()?;
let pkg_fingerprint = src.last_modified_file(&new_pkg)?;
let ws = Workspace::ephemeral(new_pkg, config, None, true)?;

View File

@ -4,14 +4,14 @@ use std::io;
use std::path::{Path, PathBuf};
use core::{EitherManifest, Package, PackageId, SourceId};
use util::{self, Config};
use util::errors::{CargoError, CargoResult};
use util::important_paths::find_project_manifest_exact;
use util::toml::read_manifest;
use util::{self, Config};
pub fn read_package(
path: &Path,
source_id: &SourceId,
source_id: SourceId,
config: &Config,
) -> CargoResult<(Package, Vec<PathBuf>)> {
trace!(
@ -34,7 +34,7 @@ pub fn read_package(
pub fn read_packages(
path: &Path,
source_id: &SourceId,
source_id: SourceId,
config: &Config,
) -> CargoResult<Vec<Package>> {
let mut all_packages = HashMap::new();
@ -129,7 +129,7 @@ fn has_manifest(path: &Path) -> bool {
fn read_nested_packages(
path: &Path,
all_packages: &mut HashMap<PackageId, Package>,
source_id: &SourceId,
source_id: SourceId,
config: &Config,
visited: &mut HashSet<PathBuf>,
errors: &mut Vec<CargoError>,

View File

@ -5,9 +5,9 @@ use std::str;
use std::time::Duration;
use std::{cmp, env};
use log::Level;
use curl::easy::{Easy, SslOpt, InfoType};
use curl::easy::{Easy, InfoType, SslOpt};
use git2;
use log::Level;
use registry::{NewCrate, NewCrateDependency, Registry};
use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
@ -68,7 +68,7 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
opts.index.clone(),
opts.registry.clone(),
)?;
verify_dependencies(pkg, &reg_id)?;
verify_dependencies(pkg, reg_id)?;
// Prepare a tarball, with a non-surpressable warning if metadata
// is missing since this is being put online.
@ -84,7 +84,8 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
jobs: opts.jobs,
registry: opts.registry.clone(),
},
)?.unwrap();
)?
.unwrap();
// Upload said tarball to the specified destination
opts.config
@ -95,14 +96,14 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
pkg,
tarball.file(),
&mut registry,
&reg_id,
reg_id,
opts.dry_run,
)?;
Ok(())
}
fn verify_dependencies(pkg: &Package, registry_src: &SourceId) -> CargoResult<()> {
fn verify_dependencies(pkg: &Package, registry_src: SourceId) -> CargoResult<()> {
for dep in pkg.dependencies().iter() {
if dep.source_id().is_path() {
if !dep.specified_req() {
@ -148,10 +149,11 @@ fn transmit(
pkg: &Package,
tarball: &File,
registry: &mut Registry,
registry_id: &SourceId,
registry_id: SourceId,
dry_run: bool,
) -> CargoResult<()> {
let deps = pkg.dependencies()
let deps = pkg
.dependencies()
.iter()
.map(|dep| {
// If the dependency is from a different registry, then include the
@ -177,7 +179,8 @@ fn transmit(
Kind::Normal => "normal",
Kind::Build => "build",
Kind::Development => "dev",
}.to_string(),
}
.to_string(),
registry: dep_registry,
explicit_name_in_toml: dep.explicit_name_in_toml().map(|s| s.to_string()),
})
@ -325,7 +328,7 @@ pub fn registry(
let token = token.or(token_config);
let sid = get_source_id(config, index_config.or(index), registry)?;
let api_host = {
let mut src = RegistrySource::remote(&sid, config);
let mut src = RegistrySource::remote(sid, config);
src.update()
.chain_err(|| format!("failed to update {}", sid))?;
(src.config()?).unwrap().api.unwrap()
@ -401,8 +404,7 @@ pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<
InfoType::HeaderOut => (">", Level::Debug),
InfoType::DataIn => ("{", Level::Trace),
InfoType::DataOut => ("}", Level::Trace),
InfoType::SslDataIn |
InfoType::SslDataOut => return,
InfoType::SslDataIn | InfoType::SslDataOut => return,
_ => return,
};
match str::from_utf8(data) {
@ -412,7 +414,12 @@ pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<
}
}
Err(_) => {
log!(level, "http-debug: {} ({} bytes of data)", prefix, data.len());
log!(
level,
"http-debug: {} ({} bytes of data)",
prefix,
data.len()
);
}
}
})?;
@ -429,9 +436,11 @@ pub struct HttpTimeout {
impl HttpTimeout {
pub fn new(config: &Config) -> CargoResult<HttpTimeout> {
let low_speed_limit = config.get::<Option<u32>>("http.low-speed-limit")?
let low_speed_limit = config
.get::<Option<u32>>("http.low-speed-limit")?
.unwrap_or(10);
let seconds = config.get::<Option<u64>>("http.timeout")?
let seconds = config
.get::<Option<u64>>("http.timeout")?
.or_else(|| env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok()))
.unwrap_or(30);
Ok(HttpTimeout {
@ -623,8 +632,8 @@ fn get_source_id(
(_, Some(i)) => SourceId::for_registry(&i.to_url()?),
_ => {
let map = SourceConfigMap::new(config)?;
let src = map.load(&SourceId::crates_io(config)?)?;
Ok(src.replaced_source_id().clone())
let src = map.load(SourceId::crates_io(config)?)?;
Ok(src.replaced_source_id())
}
}
}
@ -650,7 +659,7 @@ pub fn search(
let sid = get_source_id(config, index, reg)?;
let mut regsrc = RegistrySource::remote(&sid, config);
let mut regsrc = RegistrySource::remote(sid, config);
let cfg = match regsrc.config() {
Ok(c) => c,
Err(_) => {

View File

@ -150,7 +150,7 @@ pub fn resolve_with_previous<'a, 'cfg>(
//
// TODO: This seems like a hokey reason to single out the registry as being
// different
let mut to_avoid_sources: HashSet<&SourceId> = HashSet::new();
let mut to_avoid_sources: HashSet<SourceId> = HashSet::new();
if let Some(to_avoid) = to_avoid {
to_avoid_sources.extend(
to_avoid
@ -161,7 +161,8 @@ pub fn resolve_with_previous<'a, 'cfg>(
}
let keep = |p: &&'a PackageId| {
!to_avoid_sources.contains(p.source_id()) && match to_avoid {
!to_avoid_sources.contains(&p.source_id())
&& match to_avoid {
Some(set) => !set.contains(p),
None => true,
}
@ -214,7 +215,7 @@ pub fn resolve_with_previous<'a, 'cfg>(
}
for member in ws.members() {
registry.add_sources(&[member.package_id().source_id().clone()])?;
registry.add_sources(Some(member.package_id().source_id()))?;
}
let mut summaries = Vec::new();
@ -357,7 +358,7 @@ pub fn add_overrides<'a>(
for (path, definition) in paths {
let id = SourceId::for_path(&path)?;
let mut source = PathSource::new_recursive(&path, &id, ws.config());
let mut source = PathSource::new_recursive(&path, id, ws.config());
source.update().chain_err(|| {
format!(
"failed to update path override `{}` \
@ -401,7 +402,7 @@ fn register_previous_locks<'a>(
resolve: &'a Resolve,
keep: &Fn(&&'a PackageId) -> bool,
) {
let path_pkg = |id: &SourceId| {
let path_pkg = |id: SourceId| {
if !id.is_path() {
return None;
}

View File

@ -11,9 +11,9 @@ use url::Url;
use core::{GitReference, Source, SourceId};
use sources::{ReplacedSource, CRATES_IO_REGISTRY};
use util::{Config, ToUrl};
use util::config::ConfigValue;
use util::errors::{CargoResult, CargoResultExt};
use util::{Config, ToUrl};
#[derive(Clone)]
pub struct SourceConfigMap<'cfg> {
@ -72,9 +72,9 @@ impl<'cfg> SourceConfigMap<'cfg> {
self.config
}
pub fn load(&self, id: &SourceId) -> CargoResult<Box<Source + 'cfg>> {
pub fn load(&self, id: SourceId) -> CargoResult<Box<Source + 'cfg>> {
debug!("loading: {}", id);
let mut name = match self.id2name.get(id) {
let mut name = match self.id2name.get(&id) {
Some(name) => name,
None => return Ok(id.load(self.config)?),
};
@ -98,7 +98,7 @@ impl<'cfg> SourceConfigMap<'cfg> {
name = s;
path = p;
}
None if *id == cfg.id => return Ok(id.load(self.config)?),
None if id == cfg.id => return Ok(id.load(self.config)?),
None => {
new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string()));
break;
@ -143,11 +143,11 @@ restore the source replacement configuration to continue the build
);
}
Ok(Box::new(ReplacedSource::new(id, &new_id, new_src)))
Ok(Box::new(ReplacedSource::new(id, new_id, new_src)))
}
fn add(&mut self, name: &str, cfg: SourceConfig) {
self.id2name.insert(cfg.id.clone(), name.to_string());
self.id2name.insert(cfg.id, name.to_string());
self.cfgs.insert(name.to_string(), cfg);
}

View File

@ -8,12 +8,12 @@ use hex;
use serde_json;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use core::source::MaybePackage;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use sources::PathSource;
use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
use util::{Config, Sha256};
pub struct DirectorySource<'cfg> {
source_id: SourceId,
@ -29,9 +29,9 @@ struct Checksum {
}
impl<'cfg> DirectorySource<'cfg> {
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> DirectorySource<'cfg> {
pub fn new(path: &Path, id: SourceId, config: &'cfg Config) -> DirectorySource<'cfg> {
DirectorySource {
source_id: id.clone(),
source_id: id,
root: path.to_path_buf(),
config,
packages: HashMap::new(),
@ -71,8 +71,8 @@ impl<'cfg> Source for DirectorySource<'cfg> {
true
}
fn source_id(&self) -> &SourceId {
&self.source_id
fn source_id(&self) -> SourceId {
self.source_id
}
fn update(&mut self) -> CargoResult<()> {
@ -116,7 +116,7 @@ impl<'cfg> Source for DirectorySource<'cfg> {
continue;
}
let mut src = PathSource::new(&path, &self.source_id, self.config);
let mut src = PathSource::new(&path, self.source_id, self.config);
src.update()?;
let pkg = src.root_package()?;

View File

@ -2,14 +2,14 @@ use std::fmt::{self, Debug, Formatter};
use url::Url;
use core::source::{Source, SourceId, MaybePackage};
use core::source::{MaybePackage, Source, SourceId};
use core::GitReference;
use core::{Dependency, Package, PackageId, Summary};
use util::Config;
use sources::git::utils::{GitRemote, GitRevision};
use sources::PathSource;
use util::errors::CargoResult;
use util::hex::short_hash;
use sources::PathSource;
use sources::git::utils::{GitRemote, GitRevision};
use util::Config;
pub struct GitSource<'cfg> {
remote: GitRemote,
@ -22,7 +22,7 @@ pub struct GitSource<'cfg> {
}
impl<'cfg> GitSource<'cfg> {
pub fn new(source_id: &SourceId, config: &'cfg Config) -> CargoResult<GitSource<'cfg>> {
pub fn new(source_id: SourceId, config: &'cfg Config) -> CargoResult<GitSource<'cfg>> {
assert!(source_id.is_git(), "id is not git, id={}", source_id);
let remote = GitRemote::new(source_id.url());
@ -36,7 +36,7 @@ impl<'cfg> GitSource<'cfg> {
let source = GitSource {
remote,
reference,
source_id: source_id.clone(),
source_id,
path_source: None,
rev: None,
ident,
@ -60,7 +60,8 @@ impl<'cfg> GitSource<'cfg> {
fn ident(url: &Url) -> CargoResult<String> {
let url = canonicalize_url(url)?;
let ident = url.path_segments()
let ident = url
.path_segments()
.and_then(|mut s| s.next_back())
.unwrap_or("");
@ -124,14 +125,16 @@ impl<'cfg> Debug for GitSource<'cfg> {
impl<'cfg> Source for GitSource<'cfg> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let src = self.path_source
let src = self
.path_source
.as_mut()
.expect("BUG: update() must be called before query()");
src.query(dep, f)
}
fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let src = self.path_source
let src = self
.path_source
.as_mut()
.expect("BUG: update() must be called before query()");
src.fuzzy_query(dep, f)
@ -145,8 +148,8 @@ impl<'cfg> Source for GitSource<'cfg> {
true
}
fn source_id(&self) -> &SourceId {
&self.source_id
fn source_id(&self) -> SourceId {
self.source_id
}
fn update(&mut self) -> CargoResult<()> {
@ -190,7 +193,8 @@ impl<'cfg> Source for GitSource<'cfg> {
// https://github.com/servo/servo/pull/14397
let short_id = db.to_short_id(&actual_rev).unwrap();
let checkout_path = lock.parent()
let checkout_path = lock
.parent()
.join("checkouts")
.join(&self.ident)
.join(short_id.as_str());
@ -203,7 +207,7 @@ impl<'cfg> Source for GitSource<'cfg> {
db.copy_to(actual_rev.clone(), &checkout_path, self.config)?;
let source_id = self.source_id.with_precise(Some(actual_rev.to_string()));
let path_source = PathSource::new_recursive(&checkout_path, &source_id, self.config);
let path_source = PathSource::new_recursive(&checkout_path, source_id, self.config);
self.path_source = Some(path_source);
self.rev = Some(actual_rev);
@ -237,8 +241,8 @@ impl<'cfg> Source for GitSource<'cfg> {
#[cfg(test)]
mod test {
use url::Url;
use super::ident;
use url::Url;
use util::ToUrl;
#[test]

View File

@ -5,15 +5,15 @@ use std::path::{Path, PathBuf};
use filetime::FileTime;
use git2;
use glob::Pattern;
use ignore::Match;
use ignore::gitignore::GitignoreBuilder;
use ignore::Match;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use core::source::MaybePackage;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use ops;
use util::{self, internal, CargoResult};
use util::paths;
use util::Config;
use util::{self, internal, CargoResult};
pub struct PathSource<'cfg> {
source_id: SourceId,
@ -29,9 +29,9 @@ impl<'cfg> PathSource<'cfg> {
///
/// This source will only return the package at precisely the `path`
/// specified, and it will be an error if there's not a package at `path`.
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> {
pub fn new(path: &Path, source_id: SourceId, config: &'cfg Config) -> PathSource<'cfg> {
PathSource {
source_id: id.clone(),
source_id,
path: path.to_path_buf(),
updated: false,
packages: Vec::new(),
@ -48,7 +48,7 @@ impl<'cfg> PathSource<'cfg> {
///
/// Note that this should be used with care and likely shouldn't be chosen
/// by default!
pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> {
pub fn new_recursive(root: &Path, id: SourceId, config: &'cfg Config) -> PathSource<'cfg> {
PathSource {
recursive: true,
..PathSource::new(root, id, config)
@ -78,10 +78,10 @@ impl<'cfg> PathSource<'cfg> {
if self.updated {
Ok(self.packages.clone())
} else if self.recursive {
ops::read_packages(&self.path, &self.source_id, self.config)
ops::read_packages(&self.path, self.source_id, self.config)
} else {
let path = self.path.join("Cargo.toml");
let (pkg, _) = ops::read_package(&path, &self.source_id, self.config)?;
let (pkg, _) = ops::read_package(&path, self.source_id, self.config)?;
Ok(vec![pkg])
}
}
@ -127,13 +127,15 @@ impl<'cfg> PathSource<'cfg> {
.map_err(|e| format_err!("could not parse glob pattern `{}`: {}", p, e))
};
let glob_exclude = pkg.manifest()
let glob_exclude = pkg
.manifest()
.exclude()
.iter()
.map(|p| glob_parse(p))
.collect::<Result<Vec<_>, _>>()?;
let glob_include = pkg.manifest()
let glob_include = pkg
.manifest()
.include()
.iter()
.map(|p| glob_parse(p))
@ -302,7 +304,8 @@ impl<'cfg> PathSource<'cfg> {
) -> CargoResult<Vec<PathBuf>> {
warn!("list_files_git {}", pkg.package_id());
let index = repo.index()?;
let root = repo.workdir()
let root = repo
.workdir()
.ok_or_else(|| internal("Can't list files on a bare repository."))?;
let pkg_path = pkg.root();
@ -374,7 +377,8 @@ impl<'cfg> PathSource<'cfg> {
if is_dir.unwrap_or_else(|| file_path.is_dir()) {
warn!(" found submodule {}", file_path.display());
let rel = util::without_prefix(&file_path, root).unwrap();
let rel = rel.to_str()
let rel = rel
.to_str()
.ok_or_else(|| format_err!("invalid utf-8 filename: {}", rel.display()))?;
// Git submodules are currently only named through `/` path
// separators, explicitly not `\` which windows uses. Who knew?
@ -398,8 +402,8 @@ impl<'cfg> PathSource<'cfg> {
#[cfg(unix)]
fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {
use std::os::unix::prelude::*;
use std::ffi::OsStr;
use std::os::unix::prelude::*;
Ok(path.join(<OsStr as OsStrExt>::from_bytes(data)))
}
#[cfg(windows)]
@ -527,8 +531,8 @@ impl<'cfg> Source for PathSource<'cfg> {
false
}
fn source_id(&self) -> &SourceId {
&self.source_id
fn source_id(&self) -> SourceId {
self.source_id
}
fn update(&mut self) -> CargoResult<()> {

View File

@ -38,29 +38,29 @@ impl<'s> Iterator for UncanonicalizedIter<'s> {
type Item = String;
fn next(&mut self) -> Option<Self::Item> {
if self.hyphen_combination_num > 0 && self.hyphen_combination_num.trailing_zeros() >= self.num_hyphen_underscore {
if self.hyphen_combination_num > 0
&& self.hyphen_combination_num.trailing_zeros() >= self.num_hyphen_underscore
{
return None;
}
let ret = Some(self.input
let ret = Some(
self.input
.chars()
.scan(0u16, |s, c| {
// the check against 15 here's to prevent
// shift overflow on inputs with more then 15 hyphens
if (c == '_' || c == '-') && *s <= 15 {
let switch = (self.hyphen_combination_num & (1u16 << *s)) > 0;
let out = if (c == '_') ^ switch {
'_'
} else {
'-'
};
let out = if (c == '_') ^ switch { '_' } else { '-' };
*s += 1;
Some(out)
} else {
Some(c)
}
})
.collect());
.collect(),
);
self.hyphen_combination_num += 1;
ret
}
@ -78,14 +78,21 @@ fn no_hyphen() {
fn two_hyphen() {
assert_eq!(
UncanonicalizedIter::new("te-_st").collect::<Vec<_>>(),
vec!["te-_st".to_string(), "te__st".to_string(), "te--st".to_string(), "te_-st".to_string()]
vec![
"te-_st".to_string(),
"te__st".to_string(),
"te--st".to_string(),
"te_-st".to_string()
]
)
}
#[test]
fn overflow_hyphen() {
assert_eq!(
UncanonicalizedIter::new("te-_-_-_-_-_-_-_-_-st").take(100).count(),
UncanonicalizedIter::new("te-_-_-_-_-_-_-_-_-st")
.take(100)
.count(),
100
)
}
@ -101,13 +108,13 @@ pub struct RegistryIndex<'cfg> {
impl<'cfg> RegistryIndex<'cfg> {
pub fn new(
id: &SourceId,
source_id: SourceId,
path: &Filesystem,
config: &'cfg Config,
locked: bool,
) -> RegistryIndex<'cfg> {
RegistryIndex {
source_id: id.clone(),
source_id,
path: path.clone(),
cache: HashMap::new(),
hashes: HashMap::new(),
@ -247,11 +254,11 @@ impl<'cfg> RegistryIndex<'cfg> {
yanked,
links,
} = serde_json::from_str(line)?;
let pkgid = PackageId::new(&name, &vers, &self.source_id)?;
let pkgid = PackageId::new(&name, &vers, self.source_id)?;
let name = pkgid.name();
let deps = deps
.into_iter()
.map(|dep| dep.into_dep(&self.source_id))
.map(|dep| dep.into_dep(self.source_id))
.collect::<CargoResult<Vec<_>>>()?;
let summary = Summary::new(pkgid, deps, &features, links, false)?;
let summary = summary.set_checksum(cksum.clone());
@ -268,7 +275,7 @@ impl<'cfg> RegistryIndex<'cfg> {
load: &mut RegistryData,
f: &mut FnMut(Summary),
) -> CargoResult<()> {
let source_id = self.source_id.clone();
let source_id = self.source_id;
let name = dep.package_name().as_str();
let summaries = self.summaries(name, load)?;
let summaries = summaries

View File

@ -228,15 +228,17 @@ pub struct RegistryPackage<'a> {
#[test]
fn escaped_cher_in_json() {
let _: RegistryPackage = serde_json::from_str(
r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{}}"#
).unwrap();
r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{}}"#,
)
.unwrap();
let _: RegistryPackage = serde_json::from_str(
r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{"test":["k","q"]},"links":"a-sys"}"#
).unwrap();
// Now we add escaped cher all the places they can go
// these are not valid, but it should error later than json parsing
let _: RegistryPackage = serde_json::from_str(r#"{
let _: RegistryPackage = serde_json::from_str(
r#"{
"name":"This name has a escaped cher in it \n\t\" ",
"vers":"0.0.1",
"deps":[{
@ -251,8 +253,9 @@ fn escaped_cher_in_json() {
}],
"cksum":"bae3",
"features":{"test \n\t\" ":["k \n\t\" ","q \n\t\" "]},
"links":" \n\t\" "}"#
).unwrap();
"links":" \n\t\" "}"#,
)
.unwrap();
}
#[derive(Deserialize)]
@ -282,7 +285,7 @@ struct RegistryDependency<'a> {
impl<'a> RegistryDependency<'a> {
/// Converts an encoded dependency in the registry to a cargo dependency
pub fn into_dep(self, default: &SourceId) -> CargoResult<Dependency> {
pub fn into_dep(self, default: SourceId) -> CargoResult<Dependency> {
let RegistryDependency {
name,
req,
@ -298,15 +301,11 @@ impl<'a> RegistryDependency<'a> {
let id = if let Some(registry) = registry {
SourceId::for_registry(&registry.to_url()?)?
} else {
default.clone()
default
};
let mut dep = Dependency::parse_no_deprecated(
package.as_ref().unwrap_or(&name),
Some(&req),
&id,
)?;
let mut dep =
Dependency::parse_no_deprecated(package.as_ref().unwrap_or(&name), Some(&req), id)?;
if package.is_some() {
dep.set_explicit_name_in_toml(&name);
}
@ -350,8 +349,12 @@ pub trait RegistryData {
fn config(&mut self) -> CargoResult<Option<RegistryConfig>>;
fn update_index(&mut self) -> CargoResult<()>;
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<MaybeLock>;
fn finish_download(&mut self, pkg: &PackageId, checksum: &str, data: &[u8])
-> CargoResult<FileLock>;
fn finish_download(
&mut self,
pkg: &PackageId,
checksum: &str,
data: &[u8],
) -> CargoResult<FileLock>;
fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool {
true
@ -360,34 +363,34 @@ pub trait RegistryData {
pub enum MaybeLock {
Ready(FileLock),
Download { url: String, descriptor: String }
Download { url: String, descriptor: String },
}
mod index;
mod local;
mod remote;
fn short_name(id: &SourceId) -> String {
let hash = hex::short_hash(id);
fn short_name(id: SourceId) -> String {
let hash = hex::short_hash(&id);
let ident = id.url().host_str().unwrap_or("").to_string();
format!("{}-{}", ident, hash)
}
impl<'cfg> RegistrySource<'cfg> {
pub fn remote(source_id: &SourceId, config: &'cfg Config) -> RegistrySource<'cfg> {
pub fn remote(source_id: SourceId, config: &'cfg Config) -> RegistrySource<'cfg> {
let name = short_name(source_id);
let ops = remote::RemoteRegistry::new(source_id, config, &name);
RegistrySource::new(source_id, config, &name, Box::new(ops), true)
}
pub fn local(source_id: &SourceId, path: &Path, config: &'cfg Config) -> RegistrySource<'cfg> {
pub fn local(source_id: SourceId, path: &Path, config: &'cfg Config) -> RegistrySource<'cfg> {
let name = short_name(source_id);
let ops = local::LocalRegistry::new(path, config, &name);
RegistrySource::new(source_id, config, &name, Box::new(ops), false)
}
fn new(
source_id: &SourceId,
source_id: SourceId,
config: &'cfg Config,
name: &str,
ops: Box<RegistryData + 'cfg>,
@ -396,7 +399,7 @@ impl<'cfg> RegistrySource<'cfg> {
RegistrySource {
src_path: config.registry_source_path().join(name),
config,
source_id: source_id.clone(),
source_id,
updated: false,
index: index::RegistryIndex::new(source_id, ops.index_path(), config, index_locked),
index_locked,
@ -468,7 +471,7 @@ impl<'cfg> RegistrySource<'cfg> {
self.ops.update_index()?;
let path = self.ops.index_path();
self.index =
index::RegistryIndex::new(&self.source_id, path, self.config, self.index_locked);
index::RegistryIndex::new(self.source_id, path, self.config, self.index_locked);
Ok(())
}
@ -476,7 +479,7 @@ impl<'cfg> RegistrySource<'cfg> {
let path = self
.unpack_package(package, &path)
.chain_err(|| internal(format!("failed to unpack package `{}`", package)))?;
let mut src = PathSource::new(&path, &self.source_id, self.config);
let mut src = PathSource::new(&path, self.source_id, self.config);
src.update()?;
let pkg = match src.download(package)? {
MaybePackage::Ready(pkg) => pkg,
@ -543,8 +546,8 @@ impl<'cfg> Source for RegistrySource<'cfg> {
false
}
fn source_id(&self) -> &SourceId {
&self.source_id
fn source_id(&self) -> SourceId {
self.source_id
}
fn update(&mut self) -> CargoResult<()> {
@ -566,18 +569,14 @@ impl<'cfg> Source for RegistrySource<'cfg> {
fn download(&mut self, package: &PackageId) -> CargoResult<MaybePackage> {
let hash = self.index.hash(package, &mut *self.ops)?;
match self.ops.download(package, &hash)? {
MaybeLock::Ready(file) => {
self.get_pkg(package, file).map(MaybePackage::Ready)
}
MaybeLock::Ready(file) => self.get_pkg(package, file).map(MaybePackage::Ready),
MaybeLock::Download { url, descriptor } => {
Ok(MaybePackage::Download { url, descriptor })
}
}
}
fn finish_download(&mut self, package: &PackageId, data: Vec<u8>)
-> CargoResult<Package>
{
fn finish_download(&mut self, package: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
let hash = self.index.hash(package, &mut *self.ops)?;
let file = self.ops.finish_download(package, &hash, &data)?;
self.get_pkg(package, file)

View File

@ -1,23 +1,25 @@
use std::cell::{Cell, Ref, RefCell};
use std::fmt::Write as FmtWrite;
use std::io::SeekFrom;
use std::io::prelude::*;
use std::io::SeekFrom;
use std::mem;
use std::path::Path;
use std::str;
use git2;
use hex;
use serde_json;
use lazycell::LazyCell;
use serde_json;
use core::{PackageId, SourceId};
use sources::git;
use sources::registry::{RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE};
use sources::registry::MaybeLock;
use util::{FileLock, Filesystem};
use util::{Config, Sha256};
use sources::registry::{
RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE,
};
use util::errors::{CargoResult, CargoResultExt};
use util::{Config, Sha256};
use util::{FileLock, Filesystem};
pub struct RemoteRegistry<'cfg> {
index_path: Filesystem,
@ -30,11 +32,11 @@ pub struct RemoteRegistry<'cfg> {
}
impl<'cfg> RemoteRegistry<'cfg> {
pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> {
pub fn new(source_id: SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> {
RemoteRegistry {
index_path: config.registry_index_path().join(name),
cache_path: config.registry_cache_path().join(name),
source_id: source_id.clone(),
source_id,
config,
tree: RefCell::new(None),
repo: LazyCell::new(),
@ -54,9 +56,11 @@ impl<'cfg> RemoteRegistry<'cfg> {
// Ok, now we need to lock and try the whole thing over again.
trace!("acquiring registry index lock");
let lock =
self.index_path
.open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?;
let lock = self.index_path.open_rw(
Path::new(INDEX_LOCK),
self.config,
"the registry index",
)?;
match git2::Repository::open(&path) {
Ok(repo) => Ok(repo),
Err(_) => {
@ -79,9 +83,8 @@ impl<'cfg> RemoteRegistry<'cfg> {
// things that we don't want.
let mut opts = git2::RepositoryInitOptions::new();
opts.external_template(false);
Ok(git2::Repository::init_opts(&path, &opts).chain_err(|| {
"failed to initialized index git repository"
})?)
Ok(git2::Repository::init_opts(&path, &opts)
.chain_err(|| "failed to initialized index git repository")?)
}
}
})
@ -231,15 +234,22 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) {
write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap();
}
let url = url.replace(CRATE_TEMPLATE, &*pkg.name())
let url = url
.replace(CRATE_TEMPLATE, &*pkg.name())
.replace(VERSION_TEMPLATE, &pkg.version().to_string());
Ok(MaybeLock::Download { url, descriptor: pkg.to_string() })
Ok(MaybeLock::Download {
url,
descriptor: pkg.to_string(),
})
}
fn finish_download(&mut self, pkg: &PackageId, checksum: &str, data: &[u8])
-> CargoResult<FileLock>
{
fn finish_download(
&mut self,
pkg: &PackageId,
checksum: &str,
data: &[u8],
) -> CargoResult<FileLock> {
// Verify what we just downloaded
let mut state = Sha256::new();
state.update(data);

View File

@ -1,5 +1,5 @@
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use core::source::MaybePackage;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use util::errors::{CargoResult, CargoResultExt};
pub struct ReplacedSource<'cfg> {
@ -10,43 +10,25 @@ pub struct ReplacedSource<'cfg> {
impl<'cfg> ReplacedSource<'cfg> {
pub fn new(
to_replace: &SourceId,
replace_with: &SourceId,
to_replace: SourceId,
replace_with: SourceId,
src: Box<Source + 'cfg>,
) -> ReplacedSource<'cfg> {
ReplacedSource {
to_replace: to_replace.clone(),
replace_with: replace_with.clone(),
to_replace,
replace_with,
inner: src,
}
}
}
impl<'cfg> Source for ReplacedSource<'cfg> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let (replace_with, to_replace) = (&self.replace_with, &self.to_replace);
let dep = dep.clone().map_source(to_replace, replace_with);
self.inner
.query(
&dep,
&mut |summary| f(summary.map_source(replace_with, to_replace)),
)
.chain_err(|| format!("failed to query replaced source {}", self.to_replace))?;
Ok(())
fn source_id(&self) -> SourceId {
self.to_replace
}
fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let (replace_with, to_replace) = (&self.replace_with, &self.to_replace);
let dep = dep.clone().map_source(to_replace, replace_with);
self.inner
.fuzzy_query(
&dep,
&mut |summary| f(summary.map_source(replace_with, to_replace)),
)
.chain_err(|| format!("failed to query replaced source {}", self.to_replace))?;
Ok(())
fn replaced_source_id(&self) -> SourceId {
self.replace_with
}
fn supports_checksums(&self) -> bool {
@ -57,12 +39,28 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
self.inner.requires_precise()
}
fn source_id(&self) -> &SourceId {
&self.to_replace
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let (replace_with, to_replace) = (self.replace_with, self.to_replace);
let dep = dep.clone().map_source(to_replace, replace_with);
self.inner
.query(&dep, &mut |summary| {
f(summary.map_source(replace_with, to_replace))
})
.chain_err(|| format!("failed to query replaced source {}", self.to_replace))?;
Ok(())
}
fn replaced_source_id(&self) -> &SourceId {
&self.replace_with
fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let (replace_with, to_replace) = (self.replace_with, self.to_replace);
let dep = dep.clone().map_source(to_replace, replace_with);
self.inner
.fuzzy_query(&dep, &mut |summary| {
f(summary.map_source(replace_with, to_replace))
})
.chain_err(|| format!("failed to query replaced source {}", self.to_replace))?;
Ok(())
}
fn update(&mut self) -> CargoResult<()> {
@ -73,26 +71,26 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
}
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
let id = id.with_source_id(&self.replace_with);
let pkg = self.inner
let id = id.with_source_id(self.replace_with);
let pkg = self
.inner
.download(&id)
.chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
Ok(match pkg {
MaybePackage::Ready(pkg) => {
MaybePackage::Ready(pkg.map_source(&self.replace_with, &self.to_replace))
MaybePackage::Ready(pkg.map_source(self.replace_with, self.to_replace))
}
other @ MaybePackage::Download { .. } => other,
})
}
fn finish_download(&mut self, id: &PackageId, data: Vec<u8>)
-> CargoResult<Package>
{
let id = id.with_source_id(&self.replace_with);
let pkg = self.inner
fn finish_download(&mut self, id: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
let id = id.with_source_id(self.replace_with);
let pkg = self
.inner
.finish_download(&id, data)
.chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
Ok(pkg.map_source(&self.replace_with, &self.to_replace))
Ok(pkg.map_source(self.replace_with, self.to_replace))
}
fn fingerprint(&self, id: &Package) -> CargoResult<String> {
@ -100,12 +98,16 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
}
fn verify(&self, id: &PackageId) -> CargoResult<()> {
let id = id.with_source_id(&self.replace_with);
let id = id.with_source_id(self.replace_with);
self.inner.verify(&id)
}
fn describe(&self) -> String {
format!("{} (which is replacing {})", self.inner.describe(), self.to_replace)
format!(
"{} (which is replacing {})",
self.inner.describe(),
self.to_replace
)
}
fn is_replaced(&self) -> bool {

View File

@ -28,7 +28,7 @@ use self::targets::targets;
pub fn read_manifest(
path: &Path,
source_id: &SourceId,
source_id: SourceId,
config: &Config,
) -> Result<(EitherManifest, Vec<PathBuf>), ManifestError> {
trace!(
@ -46,7 +46,7 @@ pub fn read_manifest(
fn do_read_manifest(
contents: &str,
manifest_file: &Path,
source_id: &SourceId,
source_id: SourceId,
config: &Config,
) -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
let package_root = manifest_file.parent().unwrap();
@ -517,7 +517,6 @@ impl<'de> de::Deserialize<'de> for StringOrVec {
{
let seq = de::value::SeqAccessDeserializer::new(v);
Vec::deserialize(seq).map(StringOrVec)
}
}
@ -661,7 +660,7 @@ pub struct TomlWorkspace {
}
impl TomlProject {
pub fn to_package_id(&self, source_id: &SourceId) -> CargoResult<PackageId> {
pub fn to_package_id(&self, source_id: SourceId) -> CargoResult<PackageId> {
PackageId::new(&self.name, self.version.clone(), source_id)
}
}
@ -669,7 +668,7 @@ impl TomlProject {
struct Context<'a, 'b> {
pkgid: Option<&'a PackageId>,
deps: &'a mut Vec<Dependency>,
source_id: &'a SourceId,
source_id: SourceId,
nested_paths: &'a mut Vec<PathBuf>,
config: &'b Config,
warnings: &'a mut Vec<String>,
@ -789,7 +788,7 @@ impl TomlManifest {
fn to_real_manifest(
me: &Rc<TomlManifest>,
source_id: &SourceId,
source_id: SourceId,
package_root: &Path,
config: &Config,
) -> CargoResult<(Manifest, Vec<PathBuf>)> {
@ -817,7 +816,11 @@ impl TomlManifest {
if c == '_' || c == '-' {
continue;
}
bail!("Invalid character `{}` in package name: `{}`", c, package_name)
bail!(
"Invalid character `{}` in package name: `{}`",
c,
package_name
)
}
let pkgid = project.to_package_id(source_id)?;
@ -1061,7 +1064,7 @@ impl TomlManifest {
fn to_virtual_manifest(
me: &Rc<TomlManifest>,
source_id: &SourceId,
source_id: SourceId,
root: &Path,
config: &Config,
) -> CargoResult<(VirtualManifest, Vec<PathBuf>)> {
@ -1258,7 +1261,8 @@ impl TomlDependency {
TomlDependency::Simple(ref version) => DetailedTomlDependency {
version: Some(version.clone()),
..Default::default()
}.to_dependency(name, cx, kind),
}
.to_dependency(name, cx, kind),
TomlDependency::Detailed(ref details) => details.to_dependency(name, cx, kind),
}
}
@ -1376,7 +1380,7 @@ impl DetailedTomlDependency {
let path = util::normalize_path(&path);
SourceId::for_path(&path)?
} else {
cx.source_id.clone()
cx.source_id
}
}
(None, None, Some(registry), None) => SourceId::alt_registry(cx.config, registry)?,
@ -1394,8 +1398,8 @@ impl DetailedTomlDependency {
let version = self.version.as_ref().map(|v| &v[..]);
let mut dep = match cx.pkgid {
Some(id) => Dependency::parse(pkg_name, version, &new_source_id, id, cx.config)?,
None => Dependency::parse_no_deprecated(pkg_name, version, &new_source_id)?,
Some(id) => Dependency::parse(pkg_name, version, new_source_id, id, cx.config)?,
None => Dependency::parse_no_deprecated(pkg_name, version, new_source_id)?,
};
dep.set_features(self.features.iter().flat_map(|x| x))
.set_default_features(
@ -1405,7 +1409,7 @@ impl DetailedTomlDependency {
)
.set_optional(self.optional.unwrap_or(false))
.set_platform(cx.platform.clone())
.set_registry_id(&registry_id);
.set_registry_id(registry_id);
if let Some(kind) = kind {
dep.set_kind(kind);
}

View File

@ -67,7 +67,8 @@ fn setup() {
.file(
"config.json",
&format!(r#"{{"dl":"{0}","api":"{0}"}}"#, api()),
).build();
)
.build();
let base = api_path().join("api/v1/crates");
write_crates(&base);
@ -89,8 +90,10 @@ replace-with = 'dummy-registry'
registry = '{reg}'
"#,
reg = registry_url(),
).as_bytes(),
).unwrap();
)
.as_bytes(),
)
.unwrap();
}
#[test]
@ -104,7 +107,7 @@ fn not_update() {
let sid = SourceId::for_registry(&registry_url()).unwrap();
let cfg = Config::new(Shell::new(), paths::root(), paths::home().join(".cargo"));
let mut regsrc = RegistrySource::remote(&sid, &cfg);
let mut regsrc = RegistrySource::remote(sid, &cfg);
regsrc.update().unwrap();
cargo_process("search postgres")
@ -142,7 +145,8 @@ fn simple() {
fn simple_with_host() {
setup();
cargo_process("search postgres --host").arg(registry_url().to_string())
cargo_process("search postgres --host")
.arg(registry_url().to_string())
.with_stderr(
"\
[WARNING] The flag '--host' is no longer valid.
@ -157,9 +161,7 @@ about this warning.
[UPDATING] `[CWD]/registry` index
",
)
.with_stdout_contains(
"hoare = \"0.1.1\" # Design by contract style assertions for Rust",
)
.with_stdout_contains("hoare = \"0.1.1\" # Design by contract style assertions for Rust")
.run();
}
@ -169,7 +171,10 @@ about this warning.
fn simple_with_index_and_host() {
setup();
cargo_process("search postgres --index").arg(registry_url().to_string()).arg("--host").arg(registry_url().to_string())
cargo_process("search postgres --index")
.arg(registry_url().to_string())
.arg("--host")
.arg(registry_url().to_string())
.with_stderr(
"\
[WARNING] The flag '--host' is no longer valid.
@ -184,9 +189,7 @@ about this warning.
[UPDATING] `[CWD]/registry` index
",
)
.with_stdout_contains(
"hoare = \"0.1.1\" # Design by contract style assertions for Rust",
)
.with_stdout_contains("hoare = \"0.1.1\" # Design by contract style assertions for Rust")
.run();
}

View File

@ -87,11 +87,11 @@ pub fn resolve_with_config_raw(
Ok(())
}
fn describe_source(&self, _src: &SourceId) -> String {
fn describe_source(&self, _src: SourceId) -> String {
String::new()
}
fn is_replaced(&self, _src: &SourceId) -> bool {
fn is_replaced(&self, _src: SourceId) -> bool {
false
}
}
@ -127,7 +127,7 @@ pub trait ToDep {
impl ToDep for &'static str {
fn to_dep(self) -> Dependency {
Dependency::parse_no_deprecated(self, Some("1.0.0"), &registry_loc()).unwrap()
Dependency::parse_no_deprecated(self, Some("1.0.0"), registry_loc()).unwrap()
}
}
@ -149,14 +149,14 @@ impl ToPkgId for PackageId {
impl<'a> ToPkgId for &'a str {
fn to_pkgid(&self) -> PackageId {
PackageId::new(*self, "1.0.0", &registry_loc()).unwrap()
PackageId::new(*self, "1.0.0", registry_loc()).unwrap()
}
}
impl<T: AsRef<str>, U: AsRef<str>> ToPkgId for (T, U) {
fn to_pkgid(&self) -> PackageId {
let (name, vers) = self;
PackageId::new(name.as_ref(), vers.as_ref(), &registry_loc()).unwrap()
PackageId::new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap()
}
}
@ -176,7 +176,7 @@ fn registry_loc() -> SourceId {
static ref EXAMPLE_DOT_COM: SourceId =
SourceId::for_registry(&"http://example.com".to_url().unwrap()).unwrap();
}
EXAMPLE_DOT_COM.clone()
*EXAMPLE_DOT_COM
}
pub fn pkg<T: ToPkgId>(name: T) -> Summary {
@ -201,7 +201,7 @@ pub fn pkg_dep<T: ToPkgId>(name: T, dep: Vec<Dependency>) -> Summary {
}
pub fn pkg_id(name: &str) -> PackageId {
PackageId::new(name, "1.0.0", &registry_loc()).unwrap()
PackageId::new(name, "1.0.0", registry_loc()).unwrap()
}
fn pkg_id_loc(name: &str, loc: &str) -> PackageId {
@ -209,7 +209,7 @@ fn pkg_id_loc(name: &str, loc: &str) -> PackageId {
let master = GitReference::Branch("master".to_string());
let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap();
PackageId::new(name, "1.0.0", &source_id).unwrap()
PackageId::new(name, "1.0.0", source_id).unwrap()
}
pub fn pkg_loc(name: &str, loc: &str) -> Summary {
@ -232,7 +232,7 @@ pub fn dep(name: &str) -> Dependency {
dep_req(name, "*")
}
pub fn dep_req(name: &str, req: &str) -> Dependency {
Dependency::parse_no_deprecated(name, Some(req), &registry_loc()).unwrap()
Dependency::parse_no_deprecated(name, Some(req), registry_loc()).unwrap()
}
pub fn dep_req_kind(name: &str, req: &str, kind: Kind) -> Dependency {
let mut dep = dep_req(name, req);
@ -244,7 +244,7 @@ pub fn dep_loc(name: &str, location: &str) -> Dependency {
let url = location.to_url().unwrap();
let master = GitReference::Branch("master".to_string());
let source_id = SourceId::for_git(&url, master).unwrap();
Dependency::parse_no_deprecated(name, Some("1.0.0"), &source_id).unwrap()
Dependency::parse_no_deprecated(name, Some("1.0.0"), source_id).unwrap()
}
pub fn dep_kind(name: &str, kind: Kind) -> Dependency {
dep(name).set_kind(kind).clone()
@ -281,9 +281,7 @@ impl fmt::Debug for PrettyPrintRegistry {
} else {
write!(f, "pkg!((\"{}\", \"{}\") => [", s.name(), s.version())?;
for d in s.dependencies() {
if d.kind() == Kind::Normal
&& &d.version_req().to_string() == "*"
{
if d.kind() == Kind::Normal && &d.version_req().to_string() == "*" {
write!(f, "dep(\"{}\"),", d.name_in_toml())?;
} else if d.kind() == Kind::Normal {
write!(