mirror of
https://github.com/rust-lang/cargo.git
synced 2025-09-28 11:20:36 +00:00
PackageId is copy, clippy thinks we dont need &PackageId or PackageId.clone()
This commit is contained in:
parent
e3435d105d
commit
dae87a2624
@ -1,5 +1,5 @@
|
||||
use std::path::Path;
|
||||
use std::cell::RefCell;
|
||||
use std::path::Path;
|
||||
|
||||
use serde::ser;
|
||||
|
||||
@ -51,9 +51,11 @@ impl BuildConfig {
|
||||
let path = Path::new(target)
|
||||
.canonicalize()
|
||||
.chain_err(|| format_err!("Target path {:?} is not a valid file", target))?;
|
||||
Some(path.into_os_string()
|
||||
.into_string()
|
||||
.map_err(|_| format_err!("Target path is not valid unicode"))?)
|
||||
Some(
|
||||
path.into_os_string()
|
||||
.into_string()
|
||||
.map_err(|_| format_err!("Target path is not valid unicode"))?,
|
||||
)
|
||||
}
|
||||
other => other.clone(),
|
||||
};
|
||||
|
@ -180,7 +180,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn show_warnings(&self, pkg: &PackageId) -> bool {
|
||||
pub fn show_warnings(&self, pkg: PackageId) -> bool {
|
||||
pkg.source_id().is_path() || self.config.extra_verbose()
|
||||
}
|
||||
|
||||
|
@ -4,9 +4,9 @@ use std::path::PathBuf;
|
||||
use std::str::{self, FromStr};
|
||||
|
||||
use super::env_args;
|
||||
use util::{CargoResult, CargoResultExt, Cfg, Config, ProcessBuilder, Rustc};
|
||||
use core::TargetKind;
|
||||
use super::Kind;
|
||||
use core::TargetKind;
|
||||
use util::{CargoResult, CargoResultExt, Cfg, Config, ProcessBuilder, Rustc};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TargetInfo {
|
||||
@ -173,17 +173,16 @@ impl TargetInfo {
|
||||
Some((ref prefix, ref suffix)) => (prefix, suffix),
|
||||
None => return Ok(None),
|
||||
};
|
||||
let mut ret = vec![
|
||||
FileType {
|
||||
suffix: suffix.clone(),
|
||||
prefix: prefix.clone(),
|
||||
flavor,
|
||||
should_replace_hyphens: false,
|
||||
},
|
||||
];
|
||||
let mut ret = vec![FileType {
|
||||
suffix: suffix.clone(),
|
||||
prefix: prefix.clone(),
|
||||
flavor,
|
||||
should_replace_hyphens: false,
|
||||
}];
|
||||
|
||||
// rust-lang/cargo#4500
|
||||
if target_triple.ends_with("pc-windows-msvc") && crate_type.ends_with("dylib")
|
||||
if target_triple.ends_with("pc-windows-msvc")
|
||||
&& crate_type.ends_with("dylib")
|
||||
&& suffix == ".dll"
|
||||
{
|
||||
ret.push(FileType {
|
||||
|
@ -8,13 +8,13 @@
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use core::TargetKind;
|
||||
use super::{CompileMode, Context, Kind, Unit};
|
||||
use super::context::OutputFile;
|
||||
use util::{internal, CargoResult, ProcessBuilder};
|
||||
use std::path::PathBuf;
|
||||
use serde_json;
|
||||
use super::{CompileMode, Context, Kind, Unit};
|
||||
use core::TargetKind;
|
||||
use semver;
|
||||
use serde_json;
|
||||
use std::path::PathBuf;
|
||||
use util::{internal, CargoResult, ProcessBuilder};
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Invocation {
|
||||
@ -71,7 +71,8 @@ impl Invocation {
|
||||
}
|
||||
|
||||
pub fn update_cmd(&mut self, cmd: &ProcessBuilder) -> CargoResult<()> {
|
||||
self.program = cmd.get_program()
|
||||
self.program = cmd
|
||||
.get_program()
|
||||
.to_str()
|
||||
.ok_or_else(|| format_err!("unicode program string required"))?
|
||||
.to_string();
|
||||
@ -111,7 +112,8 @@ impl BuildPlan {
|
||||
pub fn add(&mut self, cx: &Context, unit: &Unit) -> CargoResult<()> {
|
||||
let id = self.plan.invocations.len();
|
||||
self.invocation_map.insert(unit.buildkey(), id);
|
||||
let deps = cx.dep_targets(&unit)
|
||||
let deps = cx
|
||||
.dep_targets(&unit)
|
||||
.iter()
|
||||
.map(|dep| self.invocation_map[&dep.buildkey()])
|
||||
.collect();
|
||||
@ -127,10 +129,10 @@ impl BuildPlan {
|
||||
outputs: &[OutputFile],
|
||||
) -> CargoResult<()> {
|
||||
let id = self.invocation_map[invocation_name];
|
||||
let invocation = self.plan
|
||||
.invocations
|
||||
.get_mut(id)
|
||||
.ok_or_else(|| internal(format!("couldn't find invocation for {}", invocation_name)))?;
|
||||
let invocation =
|
||||
self.plan.invocations.get_mut(id).ok_or_else(|| {
|
||||
internal(format!("couldn't find invocation for {}", invocation_name))
|
||||
})?;
|
||||
|
||||
invocation.update_cmd(cmd)?;
|
||||
for output in outputs.iter() {
|
||||
|
@ -5,9 +5,9 @@ use std::path::PathBuf;
|
||||
|
||||
use semver::Version;
|
||||
|
||||
use super::BuildContext;
|
||||
use core::{Edition, Package, PackageId, Target, TargetKind};
|
||||
use util::{self, join_paths, process, CargoResult, CfgExpr, Config, ProcessBuilder};
|
||||
use super::BuildContext;
|
||||
|
||||
pub struct Doctest {
|
||||
/// The package being doctested.
|
||||
@ -196,7 +196,7 @@ impl<'cfg> Compilation<'cfg> {
|
||||
let search_path = join_paths(&search_path, util::dylib_path_envvar())?;
|
||||
|
||||
cmd.env(util::dylib_path_envvar(), &search_path);
|
||||
if let Some(env) = self.extra_env.get(pkg.package_id()) {
|
||||
if let Some(env) = self.extra_env.get(&pkg.package_id()) {
|
||||
for &(ref k, ref v) in env {
|
||||
cmd.env(k, v);
|
||||
}
|
||||
@ -276,8 +276,10 @@ fn target_runner(bcx: &BuildContext) -> CargoResult<Option<(PathBuf, Vec<String>
|
||||
if let Some(runner) = bcx.config.get_path_and_args(&key)? {
|
||||
// more than one match, error out
|
||||
if matching_runner.is_some() {
|
||||
bail!("several matching instances of `target.'cfg(..)'.runner` \
|
||||
in `.cargo/config`")
|
||||
bail!(
|
||||
"several matching instances of `target.'cfg(..)'.runner` \
|
||||
in `.cargo/config`"
|
||||
)
|
||||
}
|
||||
|
||||
matching_runner = Some(runner.val);
|
||||
|
@ -271,27 +271,29 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
|
||||
)?;
|
||||
|
||||
match file_types {
|
||||
Some(types) => for file_type in types {
|
||||
let path = out_dir.join(file_type.filename(&file_stem));
|
||||
let hardlink = link_stem
|
||||
.as_ref()
|
||||
.map(|&(ref ld, ref ls)| ld.join(file_type.filename(ls)));
|
||||
let export_path = if unit.target.is_custom_build() {
|
||||
None
|
||||
} else {
|
||||
self.export_dir.as_ref().and_then(|export_dir| {
|
||||
hardlink.as_ref().and_then(|hardlink| {
|
||||
Some(export_dir.join(hardlink.file_name().unwrap()))
|
||||
Some(types) => {
|
||||
for file_type in types {
|
||||
let path = out_dir.join(file_type.filename(&file_stem));
|
||||
let hardlink = link_stem
|
||||
.as_ref()
|
||||
.map(|&(ref ld, ref ls)| ld.join(file_type.filename(ls)));
|
||||
let export_path = if unit.target.is_custom_build() {
|
||||
None
|
||||
} else {
|
||||
self.export_dir.as_ref().and_then(|export_dir| {
|
||||
hardlink.as_ref().and_then(|hardlink| {
|
||||
Some(export_dir.join(hardlink.file_name().unwrap()))
|
||||
})
|
||||
})
|
||||
})
|
||||
};
|
||||
ret.push(OutputFile {
|
||||
path,
|
||||
hardlink,
|
||||
export_path,
|
||||
flavor: file_type.flavor,
|
||||
});
|
||||
},
|
||||
};
|
||||
ret.push(OutputFile {
|
||||
path,
|
||||
hardlink,
|
||||
export_path,
|
||||
flavor: file_type.flavor,
|
||||
});
|
||||
}
|
||||
}
|
||||
// not supported, don't worry about it
|
||||
None => {
|
||||
unsupported.push(crate_type.to_string());
|
||||
@ -392,7 +394,8 @@ fn compute_metadata<'a, 'cfg>(
|
||||
let bcx = &cx.bcx;
|
||||
let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA");
|
||||
if !(unit.mode.is_any_test() || unit.mode.is_check())
|
||||
&& (unit.target.is_dylib() || unit.target.is_cdylib()
|
||||
&& (unit.target.is_dylib()
|
||||
|| unit.target.is_cdylib()
|
||||
|| (unit.target.is_bin() && bcx.target_triple().starts_with("wasm32-")))
|
||||
&& unit.pkg.package_id().source_id().is_path()
|
||||
&& __cargo_default_lib_metadata.is_err()
|
||||
@ -433,7 +436,8 @@ fn compute_metadata<'a, 'cfg>(
|
||||
|
||||
// Mix in the target-metadata of all the dependencies of this target
|
||||
{
|
||||
let mut deps_metadata = cx.dep_targets(unit)
|
||||
let mut deps_metadata = cx
|
||||
.dep_targets(unit)
|
||||
.iter()
|
||||
.map(|dep| metadata_of(dep, cx, metas))
|
||||
.collect::<Vec<_>>();
|
||||
|
@ -7,25 +7,25 @@ use std::sync::Arc;
|
||||
|
||||
use jobserver::Client;
|
||||
|
||||
use core::{Package, PackageId, Resolve, Target};
|
||||
use core::compiler::compilation;
|
||||
use core::profiles::Profile;
|
||||
use core::{Package, PackageId, Resolve, Target};
|
||||
use util::errors::{CargoResult, CargoResultExt};
|
||||
use util::{internal, profile, Config, short_hash};
|
||||
use util::{internal, profile, short_hash, Config};
|
||||
|
||||
use super::build_plan::BuildPlan;
|
||||
use super::custom_build::{self, BuildDeps, BuildScripts, BuildState};
|
||||
use super::fingerprint::Fingerprint;
|
||||
use super::job_queue::JobQueue;
|
||||
use super::layout::Layout;
|
||||
use super::{BuildContext, Compilation, CompileMode, Executor, FileFlavor, Kind};
|
||||
use super::build_plan::BuildPlan;
|
||||
|
||||
mod unit_dependencies;
|
||||
use self::unit_dependencies::build_unit_dependencies;
|
||||
|
||||
mod compilation_files;
|
||||
pub use self::compilation_files::{Metadata, OutputFile};
|
||||
use self::compilation_files::CompilationFiles;
|
||||
pub use self::compilation_files::{Metadata, OutputFile};
|
||||
|
||||
/// All information needed to define a Unit.
|
||||
///
|
||||
@ -68,7 +68,7 @@ pub struct Unit<'a> {
|
||||
impl<'a> Unit<'a> {
|
||||
pub fn buildkey(&self) -> String {
|
||||
format!("{}-{}", self.pkg.name(), short_hash(self))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Context<'a, 'cfg: 'a> {
|
||||
@ -80,12 +80,12 @@ pub struct Context<'a, 'cfg: 'a> {
|
||||
pub fingerprints: HashMap<Unit<'a>, Arc<Fingerprint>>,
|
||||
pub compiled: HashSet<Unit<'a>>,
|
||||
pub build_scripts: HashMap<Unit<'a>, Arc<BuildScripts>>,
|
||||
pub links: Links<'a>,
|
||||
pub links: Links,
|
||||
pub jobserver: Client,
|
||||
primary_packages: HashSet<&'a PackageId>,
|
||||
primary_packages: HashSet<PackageId>,
|
||||
unit_dependencies: HashMap<Unit<'a>, Vec<Unit<'a>>>,
|
||||
files: Option<CompilationFiles<'a, 'cfg>>,
|
||||
package_cache: HashMap<&'a PackageId, &'a Package>,
|
||||
package_cache: HashMap<PackageId, &'a Package>,
|
||||
}
|
||||
|
||||
impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
@ -189,7 +189,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
let out_dir = self.files().build_script_out_dir(dep).display().to_string();
|
||||
self.compilation
|
||||
.extra_env
|
||||
.entry(dep.pkg.package_id().clone())
|
||||
.entry(dep.pkg.package_id())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(("OUT_DIR".to_string(), out_dir));
|
||||
}
|
||||
@ -235,7 +235,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
if !feats.is_empty() {
|
||||
self.compilation
|
||||
.cfgs
|
||||
.entry(unit.pkg.package_id().clone())
|
||||
.entry(unit.pkg.package_id())
|
||||
.or_insert_with(|| {
|
||||
feats
|
||||
.iter()
|
||||
@ -247,7 +247,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
if !rustdocflags.is_empty() {
|
||||
self.compilation
|
||||
.rustdocflags
|
||||
.entry(unit.pkg.package_id().clone())
|
||||
.entry(unit.pkg.package_id())
|
||||
.or_insert(rustdocflags);
|
||||
}
|
||||
|
||||
@ -289,7 +289,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
Some(target) => Some(Layout::new(self.bcx.ws, Some(target), dest)?),
|
||||
None => None,
|
||||
};
|
||||
self.primary_packages.extend(units.iter().map(|u| u.pkg.package_id()));
|
||||
self.primary_packages
|
||||
.extend(units.iter().map(|u| u.pkg.package_id()));
|
||||
|
||||
build_unit_dependencies(
|
||||
units,
|
||||
@ -361,7 +362,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
// gets a full pre-filtered set of dependencies. This is not super
|
||||
// obvious, and clear, but it does work at the moment.
|
||||
if unit.target.is_custom_build() {
|
||||
let key = (unit.pkg.package_id().clone(), unit.kind);
|
||||
let key = (unit.pkg.package_id(), unit.kind);
|
||||
if self.build_script_overridden.contains(&key) {
|
||||
return Vec::new();
|
||||
}
|
||||
@ -393,7 +394,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
// incremental compilation or not. Primarily development profiles
|
||||
// have it enabled by default while release profiles have it disabled
|
||||
// by default.
|
||||
let global_cfg = self.bcx
|
||||
let global_cfg = self
|
||||
.bcx
|
||||
.config
|
||||
.get_bool("build.incremental")?
|
||||
.map(|c| c.val);
|
||||
@ -426,12 +428,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
}
|
||||
|
||||
pub fn is_primary_package(&self, unit: &Unit<'a>) -> bool {
|
||||
self.primary_packages.contains(unit.pkg.package_id())
|
||||
self.primary_packages.contains(&unit.pkg.package_id())
|
||||
}
|
||||
|
||||
/// Gets a package for the given package id.
|
||||
pub fn get_package(&self, id: &PackageId) -> CargoResult<&'a Package> {
|
||||
self.package_cache.get(id)
|
||||
pub fn get_package(&self, id: PackageId) -> CargoResult<&'a Package> {
|
||||
self.package_cache
|
||||
.get(&id)
|
||||
.cloned()
|
||||
.ok_or_else(|| format_err!("failed to find {}", id))
|
||||
}
|
||||
@ -457,8 +460,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
let describe_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> String {
|
||||
format!(
|
||||
"The {} target `{}` in package `{}` has the same output \
|
||||
filename as the {} target `{}` in package `{}`.\n\
|
||||
Colliding filename is: {}\n",
|
||||
filename as the {} target `{}` in package `{}`.\n\
|
||||
Colliding filename is: {}\n",
|
||||
unit.target.kind().description(),
|
||||
unit.target.name(),
|
||||
unit.pkg.package_id(),
|
||||
@ -470,13 +473,16 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
};
|
||||
let suggestion = "Consider changing their names to be unique or compiling them separately.\n\
|
||||
This may become a hard error in the future, see https://github.com/rust-lang/cargo/issues/6313";
|
||||
let report_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> CargoResult<()> {
|
||||
let report_collision = |unit: &Unit,
|
||||
other_unit: &Unit,
|
||||
path: &PathBuf|
|
||||
-> CargoResult<()> {
|
||||
if unit.target.name() == other_unit.target.name() {
|
||||
self.bcx.config.shell().warn(format!(
|
||||
"output filename collision.\n\
|
||||
{}\
|
||||
The targets should have unique names.\n\
|
||||
{}",
|
||||
{}\
|
||||
The targets should have unique names.\n\
|
||||
{}",
|
||||
describe_collision(unit, other_unit, path),
|
||||
suggestion
|
||||
))
|
||||
@ -507,28 +513,24 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
keys.sort_unstable();
|
||||
for unit in keys {
|
||||
for output in self.outputs(unit)?.iter() {
|
||||
if let Some(other_unit) =
|
||||
output_collisions.insert(output.path.clone(), unit)
|
||||
{
|
||||
if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
|
||||
report_collision(unit, &other_unit, &output.path)?;
|
||||
}
|
||||
if let Some(hardlink) = output.hardlink.as_ref() {
|
||||
if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit)
|
||||
{
|
||||
if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
|
||||
report_collision(unit, &other_unit, hardlink)?;
|
||||
}
|
||||
}
|
||||
if let Some(ref export_path) = output.export_path {
|
||||
if let Some(other_unit) =
|
||||
output_collisions.insert(export_path.clone(), unit)
|
||||
{
|
||||
self.bcx.config.shell().warn(format!("`--out-dir` filename collision.\n\
|
||||
{}\
|
||||
The exported filenames should be unique.\n\
|
||||
{}",
|
||||
if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
|
||||
self.bcx.config.shell().warn(format!(
|
||||
"`--out-dir` filename collision.\n\
|
||||
{}\
|
||||
The exported filenames should be unique.\n\
|
||||
{}",
|
||||
describe_collision(unit, &other_unit, &export_path),
|
||||
suggestion
|
||||
))?;
|
||||
))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -538,20 +540,20 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Links<'a> {
|
||||
validated: HashSet<&'a PackageId>,
|
||||
links: HashMap<String, &'a PackageId>,
|
||||
pub struct Links {
|
||||
validated: HashSet<PackageId>,
|
||||
links: HashMap<String, PackageId>,
|
||||
}
|
||||
|
||||
impl<'a> Links<'a> {
|
||||
pub fn new() -> Links<'a> {
|
||||
impl Links {
|
||||
pub fn new() -> Links {
|
||||
Links {
|
||||
validated: HashSet::new(),
|
||||
links: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'a>) -> CargoResult<()> {
|
||||
pub fn validate(&mut self, resolve: &Resolve, unit: &Unit) -> CargoResult<()> {
|
||||
if !self.validated.insert(unit.pkg.package_id()) {
|
||||
return Ok(());
|
||||
}
|
||||
@ -559,11 +561,11 @@ impl<'a> Links<'a> {
|
||||
Some(lib) => lib,
|
||||
None => return Ok(()),
|
||||
};
|
||||
if let Some(prev) = self.links.get(lib) {
|
||||
if let Some(&prev) = self.links.get(lib) {
|
||||
let pkg = unit.pkg.package_id();
|
||||
|
||||
let describe_path = |pkgid: &PackageId| -> String {
|
||||
let dep_path = resolve.path_to_top(pkgid);
|
||||
let describe_path = |pkgid: PackageId| -> String {
|
||||
let dep_path = resolve.path_to_top(&pkgid);
|
||||
let mut dep_path_desc = format!("package `{}`", dep_path[0]);
|
||||
for dep in dep_path.iter().skip(1) {
|
||||
write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap();
|
||||
@ -585,7 +587,8 @@ impl<'a> Links<'a> {
|
||||
lib
|
||||
)
|
||||
}
|
||||
if !unit.pkg
|
||||
if !unit
|
||||
.pkg
|
||||
.manifest()
|
||||
.targets()
|
||||
.iter()
|
||||
|
@ -28,8 +28,8 @@ use CargoResult;
|
||||
struct State<'a: 'tmp, 'cfg: 'a, 'tmp> {
|
||||
bcx: &'tmp BuildContext<'a, 'cfg>,
|
||||
deps: &'tmp mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
|
||||
pkgs: RefCell<&'tmp mut HashMap<&'a PackageId, &'a Package>>,
|
||||
waiting_on_download: HashSet<&'a PackageId>,
|
||||
pkgs: RefCell<&'tmp mut HashMap<PackageId, &'a Package>>,
|
||||
waiting_on_download: HashSet<PackageId>,
|
||||
downloads: Downloads<'a, 'cfg>,
|
||||
}
|
||||
|
||||
@ -37,7 +37,7 @@ pub fn build_unit_dependencies<'a, 'cfg>(
|
||||
roots: &[Unit<'a>],
|
||||
bcx: &BuildContext<'a, 'cfg>,
|
||||
deps: &mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
|
||||
pkgs: &mut HashMap<&'a PackageId, &'a Package>,
|
||||
pkgs: &mut HashMap<PackageId, &'a Package>,
|
||||
) -> CargoResult<()> {
|
||||
assert!(deps.is_empty(), "can only build unit deps once");
|
||||
|
||||
@ -393,7 +393,7 @@ fn new_unit<'a>(
|
||||
mode: CompileMode,
|
||||
) -> Unit<'a> {
|
||||
let profile = bcx.profiles.get_profile(
|
||||
&pkg.package_id(),
|
||||
pkg.package_id(),
|
||||
bcx.ws.is_member(pkg),
|
||||
unit_for,
|
||||
mode,
|
||||
@ -482,9 +482,9 @@ fn connect_run_custom_build_deps(state: &mut State) {
|
||||
}
|
||||
|
||||
impl<'a, 'cfg, 'tmp> State<'a, 'cfg, 'tmp> {
|
||||
fn get(&mut self, id: &'a PackageId) -> CargoResult<Option<&'a Package>> {
|
||||
fn get(&mut self, id: PackageId) -> CargoResult<Option<&'a Package>> {
|
||||
let mut pkgs = self.pkgs.borrow_mut();
|
||||
if let Some(pkg) = pkgs.get(id) {
|
||||
if let Some(pkg) = pkgs.get(&id) {
|
||||
return Ok(Some(pkg));
|
||||
}
|
||||
if !self.waiting_on_download.insert(id) {
|
||||
@ -492,7 +492,7 @@ impl<'a, 'cfg, 'tmp> State<'a, 'cfg, 'tmp> {
|
||||
}
|
||||
if let Some(pkg) = self.downloads.start(id)? {
|
||||
pkgs.insert(id, pkg);
|
||||
self.waiting_on_download.remove(id);
|
||||
self.waiting_on_download.remove(&id);
|
||||
return Ok(Some(pkg));
|
||||
}
|
||||
Ok(None)
|
||||
@ -510,7 +510,7 @@ impl<'a, 'cfg, 'tmp> State<'a, 'cfg, 'tmp> {
|
||||
assert!(self.downloads.remaining() > 0);
|
||||
loop {
|
||||
let pkg = self.downloads.wait()?;
|
||||
self.waiting_on_download.remove(pkg.package_id());
|
||||
self.waiting_on_download.remove(&pkg.package_id());
|
||||
self.pkgs.borrow_mut().insert(pkg.package_id(), pkg);
|
||||
|
||||
// Arbitrarily choose that 5 or more packages concurrently download
|
||||
|
@ -87,7 +87,7 @@ pub fn prepare<'a, 'cfg>(
|
||||
unit.target.name()
|
||||
));
|
||||
|
||||
let key = (unit.pkg.package_id().clone(), unit.kind);
|
||||
let key = (unit.pkg.package_id(), unit.kind);
|
||||
let overridden = cx.build_script_overridden.contains(&key);
|
||||
let (work_dirty, work_fresh) = if overridden {
|
||||
(Work::noop(), Work::noop())
|
||||
@ -106,7 +106,7 @@ pub fn prepare<'a, 'cfg>(
|
||||
}
|
||||
}
|
||||
|
||||
fn emit_build_output(output: &BuildOutput, id: &PackageId) {
|
||||
fn emit_build_output(output: &BuildOutput, package_id: PackageId) {
|
||||
let library_paths = output
|
||||
.library_paths
|
||||
.iter()
|
||||
@ -114,7 +114,7 @@ fn emit_build_output(output: &BuildOutput, id: &PackageId) {
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
machine_message::emit(&machine_message::BuildScript {
|
||||
package_id: id,
|
||||
package_id,
|
||||
linked_libs: &output.library_links,
|
||||
linked_paths: &library_paths,
|
||||
cfgs: &output.cfgs,
|
||||
@ -230,7 +230,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
||||
if unit.mode.is_run_custom_build() {
|
||||
Some((
|
||||
unit.pkg.manifest().links().unwrap().to_string(),
|
||||
unit.pkg.package_id().clone(),
|
||||
unit.pkg.package_id(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
@ -240,7 +240,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
||||
};
|
||||
let pkg_name = unit.pkg.to_string();
|
||||
let build_state = Arc::clone(&cx.build_state);
|
||||
let id = unit.pkg.package_id().clone();
|
||||
let id = unit.pkg.package_id();
|
||||
let (output_file, err_file, root_output_file) = {
|
||||
let build_output_parent = script_out_dir.parent().unwrap();
|
||||
let output_file = build_output_parent.join("output");
|
||||
@ -250,7 +250,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
||||
};
|
||||
let host_target_root = cx.files().target_root().to_path_buf();
|
||||
let all = (
|
||||
id.clone(),
|
||||
id,
|
||||
pkg_name.clone(),
|
||||
Arc::clone(&build_state),
|
||||
output_file.clone(),
|
||||
@ -267,8 +267,13 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
||||
.and_then(|bytes| util::bytes2path(&bytes))
|
||||
.unwrap_or_else(|_| script_out_dir.clone());
|
||||
|
||||
let prev_output =
|
||||
BuildOutput::parse_file(&output_file, &pkg_name, &prev_script_out_dir, &script_out_dir).ok();
|
||||
let prev_output = BuildOutput::parse_file(
|
||||
&output_file,
|
||||
&pkg_name,
|
||||
&prev_script_out_dir,
|
||||
&script_out_dir,
|
||||
)
|
||||
.ok();
|
||||
let deps = BuildDeps::new(&output_file, prev_output.as_ref());
|
||||
cx.build_explicit_deps.insert(*unit, deps);
|
||||
|
||||
@ -301,7 +306,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
||||
if !build_plan {
|
||||
let build_state = build_state.outputs.lock().unwrap();
|
||||
for (name, id) in lib_deps {
|
||||
let key = (id.clone(), kind);
|
||||
let key = (id, kind);
|
||||
let state = build_state.get(&key).ok_or_else(|| {
|
||||
internal(format!(
|
||||
"failed to locate build state for env \
|
||||
@ -355,7 +360,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
||||
BuildOutput::parse(&output.stdout, &pkg_name, &script_out_dir, &script_out_dir)?;
|
||||
|
||||
if json_messages {
|
||||
emit_build_output(&parsed_output, &id);
|
||||
emit_build_output(&parsed_output, id);
|
||||
}
|
||||
build_state.insert(id, kind, parsed_output);
|
||||
}
|
||||
@ -369,13 +374,16 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
||||
let (id, pkg_name, build_state, output_file, script_out_dir) = all;
|
||||
let output = match prev_output {
|
||||
Some(output) => output,
|
||||
None => {
|
||||
BuildOutput::parse_file(&output_file, &pkg_name, &prev_script_out_dir, &script_out_dir)?
|
||||
}
|
||||
None => BuildOutput::parse_file(
|
||||
&output_file,
|
||||
&pkg_name,
|
||||
&prev_script_out_dir,
|
||||
&script_out_dir,
|
||||
)?,
|
||||
};
|
||||
|
||||
if json_messages {
|
||||
emit_build_output(&output, &id);
|
||||
emit_build_output(&output, id);
|
||||
}
|
||||
|
||||
build_state.insert(id, kind, output);
|
||||
@ -412,7 +420,12 @@ impl BuildOutput {
|
||||
script_out_dir: &Path,
|
||||
) -> CargoResult<BuildOutput> {
|
||||
let contents = paths::read_bytes(path)?;
|
||||
BuildOutput::parse(&contents, pkg_name, script_out_dir_when_generated, script_out_dir)
|
||||
BuildOutput::parse(
|
||||
&contents,
|
||||
pkg_name,
|
||||
script_out_dir_when_generated,
|
||||
script_out_dir,
|
||||
)
|
||||
}
|
||||
|
||||
// Parses the output of a script.
|
||||
@ -620,14 +633,15 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca
|
||||
}
|
||||
|
||||
{
|
||||
let key = unit.pkg
|
||||
let key = unit
|
||||
.pkg
|
||||
.manifest()
|
||||
.links()
|
||||
.map(|l| (l.to_string(), unit.kind));
|
||||
let build_state = &cx.build_state;
|
||||
if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) {
|
||||
let key = (unit.pkg.package_id().clone(), unit.kind);
|
||||
cx.build_script_overridden.insert(key.clone());
|
||||
let key = (unit.pkg.package_id(), unit.kind);
|
||||
cx.build_script_overridden.insert(key);
|
||||
build_state
|
||||
.outputs
|
||||
.lock()
|
||||
@ -656,7 +670,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca
|
||||
ret.plugins
|
||||
.extend(dep_scripts.to_link.iter().map(|p| &p.0).cloned());
|
||||
} else if unit.target.linkable() {
|
||||
for &(ref pkg, kind) in dep_scripts.to_link.iter() {
|
||||
for &(pkg, kind) in dep_scripts.to_link.iter() {
|
||||
add_to_link(&mut ret, pkg, kind);
|
||||
}
|
||||
}
|
||||
@ -670,9 +684,9 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca
|
||||
|
||||
// When adding an entry to 'to_link' we only actually push it on if the
|
||||
// script hasn't seen it yet (e.g. we don't push on duplicates).
|
||||
fn add_to_link(scripts: &mut BuildScripts, pkg: &PackageId, kind: Kind) {
|
||||
if scripts.seen_to_link.insert((pkg.clone(), kind)) {
|
||||
scripts.to_link.push((pkg.clone(), kind));
|
||||
fn add_to_link(scripts: &mut BuildScripts, pkg: PackageId, kind: Kind) {
|
||||
if scripts.seen_to_link.insert((pkg, kind)) {
|
||||
scripts.to_link.push((pkg, kind));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -15,9 +15,9 @@ use util::errors::{CargoResult, CargoResultExt};
|
||||
use util::paths;
|
||||
use util::{internal, profile, Dirty, Fresh, Freshness};
|
||||
|
||||
use super::{Context, BuildContext, FileFlavor, Unit};
|
||||
use super::custom_build::BuildDeps;
|
||||
use super::job::Work;
|
||||
use super::{BuildContext, Context, FileFlavor, Unit};
|
||||
|
||||
/// A tuple result of the `prepare_foo` functions in this module.
|
||||
///
|
||||
@ -88,11 +88,13 @@ pub fn prepare_target<'a, 'cfg>(
|
||||
let root = cx.files().out_dir(unit);
|
||||
let missing_outputs = {
|
||||
if unit.mode.is_doc() {
|
||||
!root.join(unit.target.crate_name())
|
||||
!root
|
||||
.join(unit.target.crate_name())
|
||||
.join("index.html")
|
||||
.exists()
|
||||
} else {
|
||||
match cx.outputs(unit)?
|
||||
match cx
|
||||
.outputs(unit)?
|
||||
.iter()
|
||||
.filter(|output| output.flavor != FileFlavor::DebugInfo)
|
||||
.find(|output| !output.path.exists())
|
||||
@ -159,7 +161,10 @@ pub struct Fingerprint {
|
||||
target: u64,
|
||||
profile: u64,
|
||||
path: u64,
|
||||
#[serde(serialize_with = "serialize_deps", deserialize_with = "deserialize_deps")]
|
||||
#[serde(
|
||||
serialize_with = "serialize_deps",
|
||||
deserialize_with = "deserialize_deps"
|
||||
)]
|
||||
deps: Vec<DepFingerprint>,
|
||||
local: Vec<LocalFingerprint>,
|
||||
#[serde(skip_serializing, skip_deserializing)]
|
||||
@ -172,8 +177,7 @@ fn serialize_deps<S>(deps: &[DepFingerprint], ser: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: ser::Serializer,
|
||||
{
|
||||
ser.collect_seq(deps.iter()
|
||||
.map(|&(ref a, ref b, ref c)| (a, b, c.hash())))
|
||||
ser.collect_seq(deps.iter().map(|&(ref a, ref b, ref c)| (a, b, c.hash())))
|
||||
}
|
||||
|
||||
fn deserialize_deps<'de, D>(d: D) -> Result<Vec<DepFingerprint>, D::Error>
|
||||
@ -363,7 +367,8 @@ impl hash::Hash for Fingerprint {
|
||||
} = *self;
|
||||
(
|
||||
rustc, features, target, path, profile, local, edition, rustflags,
|
||||
).hash(h);
|
||||
)
|
||||
.hash(h);
|
||||
|
||||
h.write_usize(deps.len());
|
||||
for &(ref pkg_id, ref name, ref fingerprint) in deps {
|
||||
@ -400,9 +405,9 @@ impl<'de> de::Deserialize<'de> for MtimeSlot {
|
||||
D: de::Deserializer<'de>,
|
||||
{
|
||||
let kind: Option<(i64, u32)> = de::Deserialize::deserialize(d)?;
|
||||
Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| {
|
||||
FileTime::from_unix_time(s, n)
|
||||
}))))
|
||||
Ok(MtimeSlot(Mutex::new(
|
||||
kind.map(|(s, n)| FileTime::from_unix_time(s, n)),
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -435,7 +440,8 @@ fn calculate<'a, 'cfg>(
|
||||
// induce a recompile, they're just dependencies in the sense that they need
|
||||
// to be built.
|
||||
let deps = cx.dep_targets(unit);
|
||||
let deps = deps.iter()
|
||||
let deps = deps
|
||||
.iter()
|
||||
.filter(|u| !u.target.is_custom_build() && !u.target.is_bin())
|
||||
.map(|dep| {
|
||||
calculate(cx, dep).and_then(|fingerprint| {
|
||||
@ -548,7 +554,7 @@ pub fn prepare_build_cmd<'a, 'cfg>(
|
||||
// the kind of fingerprint by reinterpreting the dependencies output by the
|
||||
// build script.
|
||||
let state = Arc::clone(&cx.build_state);
|
||||
let key = (unit.pkg.package_id().clone(), unit.kind);
|
||||
let key = (unit.pkg.package_id(), unit.kind);
|
||||
let pkg_root = unit.pkg.root().to_path_buf();
|
||||
let target_root = cx.files().target_root().to_path_buf();
|
||||
let write_fingerprint = Work::new(move |_| {
|
||||
@ -581,7 +587,7 @@ fn build_script_local_fingerprints<'a, 'cfg>(
|
||||
//
|
||||
// Note that the `None` here means that we don't want to update the local
|
||||
// fingerprint afterwards because this is all just overridden.
|
||||
if let Some(output) = state.get(&(unit.pkg.package_id().clone(), unit.kind)) {
|
||||
if let Some(output) = state.get(&(unit.pkg.package_id(), unit.kind)) {
|
||||
debug!("override local fingerprints deps");
|
||||
let s = format!(
|
||||
"overridden build state with hash: {}",
|
||||
@ -695,7 +701,8 @@ pub fn parse_dep_info(pkg: &Package, dep_info: &Path) -> CargoResult<Option<Vec<
|
||||
Ok(data) => data,
|
||||
Err(_) => return Ok(None),
|
||||
};
|
||||
let paths = data.split(|&x| x == 0)
|
||||
let paths = data
|
||||
.split(|&x| x == 0)
|
||||
.filter(|x| !x.is_empty())
|
||||
.map(|p| util::bytes2path(p).map(|p| pkg.root().join(p)))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::fmt;
|
||||
|
||||
use util::{CargoResult, Dirty, Fresh, Freshness};
|
||||
use super::job_queue::JobState;
|
||||
use util::{CargoResult, Dirty, Fresh, Freshness};
|
||||
|
||||
pub struct Job {
|
||||
dirty: Work,
|
||||
|
@ -35,9 +35,9 @@ pub struct JobQueue<'a> {
|
||||
rx: Receiver<Message<'a>>,
|
||||
active: Vec<Key<'a>>,
|
||||
pending: HashMap<Key<'a>, PendingBuild>,
|
||||
compiled: HashSet<&'a PackageId>,
|
||||
documented: HashSet<&'a PackageId>,
|
||||
counts: HashMap<&'a PackageId, usize>,
|
||||
compiled: HashSet<PackageId>,
|
||||
documented: HashSet<PackageId>,
|
||||
counts: HashMap<PackageId, usize>,
|
||||
is_release: bool,
|
||||
}
|
||||
|
||||
@ -52,7 +52,7 @@ struct PendingBuild {
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Hash)]
|
||||
struct Key<'a> {
|
||||
pkg: &'a PackageId,
|
||||
pkg: PackageId,
|
||||
target: &'a Target,
|
||||
profile: Profile,
|
||||
kind: Kind,
|
||||
@ -398,7 +398,7 @@ impl<'a> JobQueue<'a> {
|
||||
info!("start: {:?}", key);
|
||||
|
||||
self.active.push(key);
|
||||
*self.counts.get_mut(key.pkg).unwrap() -= 1;
|
||||
*self.counts.get_mut(&key.pkg).unwrap() -= 1;
|
||||
|
||||
let my_tx = self.tx.clone();
|
||||
let doit = move || {
|
||||
@ -424,7 +424,7 @@ impl<'a> JobQueue<'a> {
|
||||
fn emit_warnings(&self, msg: Option<&str>, key: &Key<'a>, cx: &mut Context) -> CargoResult<()> {
|
||||
let output = cx.build_state.outputs.lock().unwrap();
|
||||
let bcx = &mut cx.bcx;
|
||||
if let Some(output) = output.get(&(key.pkg.clone(), key.kind)) {
|
||||
if let Some(output) = output.get(&(key.pkg, key.kind)) {
|
||||
if let Some(msg) = msg {
|
||||
if !output.warnings.is_empty() {
|
||||
writeln!(bcx.config.shell().err(), "{}\n", msg)?;
|
||||
@ -472,8 +472,8 @@ impl<'a> JobQueue<'a> {
|
||||
key: &Key<'a>,
|
||||
fresh: Freshness,
|
||||
) -> CargoResult<()> {
|
||||
if (self.compiled.contains(key.pkg) && !key.mode.is_doc())
|
||||
|| (self.documented.contains(key.pkg) && key.mode.is_doc())
|
||||
if (self.compiled.contains(&key.pkg) && !key.mode.is_doc())
|
||||
|| (self.documented.contains(&key.pkg) && key.mode.is_doc())
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
@ -499,8 +499,8 @@ impl<'a> JobQueue<'a> {
|
||||
}
|
||||
Fresh => {
|
||||
// If doctest is last, only print "Fresh" if nothing has been printed.
|
||||
if self.counts[key.pkg] == 0
|
||||
&& !(key.mode == CompileMode::Doctest && self.compiled.contains(key.pkg))
|
||||
if self.counts[&key.pkg] == 0
|
||||
&& !(key.mode == CompileMode::Doctest && self.compiled.contains(&key.pkg))
|
||||
{
|
||||
self.compiled.insert(key.pkg);
|
||||
config.shell().verbose(|c| c.status("Fresh", key.pkg))?;
|
||||
|
@ -135,9 +135,9 @@ impl Layout {
|
||||
///
|
||||
/// This is recommended to prevent derived/temporary files from bloating backups.
|
||||
fn exclude_from_backups(&self, path: &Path) {
|
||||
use std::ptr;
|
||||
use core_foundation::{number, string, url};
|
||||
use core_foundation::base::TCFType;
|
||||
use core_foundation::{number, string, url};
|
||||
use std::ptr;
|
||||
|
||||
// For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey
|
||||
let is_excluded_key: Result<string::CFString, _> = "NSURLIsExcludedFromBackupKey".parse();
|
||||
|
@ -14,7 +14,7 @@ use core::profiles::{Lto, Profile};
|
||||
use core::{PackageId, Target};
|
||||
use util::errors::{CargoResult, CargoResultExt, Internal, ProcessError};
|
||||
use util::paths;
|
||||
use util::{self, machine_message, Freshness, ProcessBuilder, process};
|
||||
use util::{self, machine_message, process, Freshness, ProcessBuilder};
|
||||
use util::{internal, join_paths, profile};
|
||||
|
||||
use self::build_plan::BuildPlan;
|
||||
@ -23,8 +23,8 @@ use self::job_queue::JobQueue;
|
||||
|
||||
use self::output_depinfo::output_depinfo;
|
||||
|
||||
pub use self::build_context::{BuildContext, FileFlavor, TargetConfig, TargetInfo};
|
||||
pub use self::build_config::{BuildConfig, CompileMode, MessageFormat};
|
||||
pub use self::build_context::{BuildContext, FileFlavor, TargetConfig, TargetInfo};
|
||||
pub use self::compilation::{Compilation, Doctest};
|
||||
pub use self::context::{Context, Unit};
|
||||
pub use self::custom_build::{BuildMap, BuildOutput, BuildScripts};
|
||||
@ -65,9 +65,9 @@ pub trait Executor: Send + Sync + 'static {
|
||||
fn exec(
|
||||
&self,
|
||||
cmd: ProcessBuilder,
|
||||
_id: &PackageId,
|
||||
_id: PackageId,
|
||||
_target: &Target,
|
||||
_mode: CompileMode
|
||||
_mode: CompileMode,
|
||||
) -> CargoResult<()> {
|
||||
cmd.exec()?;
|
||||
Ok(())
|
||||
@ -76,7 +76,7 @@ pub trait Executor: Send + Sync + 'static {
|
||||
fn exec_and_capture_output(
|
||||
&self,
|
||||
cmd: ProcessBuilder,
|
||||
id: &PackageId,
|
||||
id: PackageId,
|
||||
target: &Target,
|
||||
mode: CompileMode,
|
||||
_state: &job_queue::JobState<'_>,
|
||||
@ -88,7 +88,7 @@ pub trait Executor: Send + Sync + 'static {
|
||||
fn exec_json(
|
||||
&self,
|
||||
cmd: ProcessBuilder,
|
||||
_id: &PackageId,
|
||||
_id: PackageId,
|
||||
_target: &Target,
|
||||
_mode: CompileMode,
|
||||
handle_stdout: &mut FnMut(&str) -> CargoResult<()>,
|
||||
@ -114,7 +114,7 @@ impl Executor for DefaultExecutor {
|
||||
fn exec_and_capture_output(
|
||||
&self,
|
||||
cmd: ProcessBuilder,
|
||||
_id: &PackageId,
|
||||
_id: PackageId,
|
||||
_target: &Target,
|
||||
_mode: CompileMode,
|
||||
state: &job_queue::JobState<'_>,
|
||||
@ -207,7 +207,7 @@ fn rustc<'a, 'cfg>(
|
||||
|
||||
// Prepare the native lib state (extra -L and -l flags)
|
||||
let build_state = cx.build_state.clone();
|
||||
let current_id = unit.pkg.package_id().clone();
|
||||
let current_id = unit.pkg.package_id();
|
||||
let build_deps = load_build_deps(cx, unit);
|
||||
|
||||
// If we are a binary and the package also contains a library, then we
|
||||
@ -222,12 +222,13 @@ fn rustc<'a, 'cfg>(
|
||||
root.join(&crate_name)
|
||||
} else {
|
||||
root.join(&cx.files().file_stem(unit))
|
||||
}.with_extension("d");
|
||||
}
|
||||
.with_extension("d");
|
||||
let dep_info_loc = fingerprint::dep_info_loc(cx, unit);
|
||||
|
||||
rustc.args(&cx.bcx.rustflags_args(unit)?);
|
||||
let json_messages = cx.bcx.build_config.json_messages();
|
||||
let package_id = unit.pkg.package_id().clone();
|
||||
let package_id = unit.pkg.package_id();
|
||||
let target = unit.target.clone();
|
||||
let mode = unit.mode;
|
||||
|
||||
@ -257,11 +258,11 @@ fn rustc<'a, 'cfg>(
|
||||
&build_state,
|
||||
&build_deps,
|
||||
pass_l_flag,
|
||||
¤t_id,
|
||||
current_id,
|
||||
)?;
|
||||
add_plugin_deps(&mut rustc, &build_state, &build_deps, &root_output)?;
|
||||
}
|
||||
add_custom_env(&mut rustc, &build_state, ¤t_id, kind)?;
|
||||
add_custom_env(&mut rustc, &build_state, current_id, kind)?;
|
||||
}
|
||||
|
||||
for output in outputs.iter() {
|
||||
@ -293,18 +294,18 @@ fn rustc<'a, 'cfg>(
|
||||
if json_messages {
|
||||
exec.exec_json(
|
||||
rustc,
|
||||
&package_id,
|
||||
package_id,
|
||||
&target,
|
||||
mode,
|
||||
&mut assert_is_empty,
|
||||
&mut |line| json_stderr(line, &package_id, &target),
|
||||
&mut |line| json_stderr(line, package_id, &target),
|
||||
)
|
||||
.map_err(internal_if_simple_exit_code)
|
||||
.chain_err(|| format!("Could not compile `{}`.", name))?;
|
||||
} else if build_plan {
|
||||
state.build_plan(buildkey, rustc.clone(), outputs.clone());
|
||||
} else {
|
||||
exec.exec_and_capture_output(rustc, &package_id, &target, mode, state)
|
||||
exec.exec_and_capture_output(rustc, package_id, &target, mode, state)
|
||||
.map_err(internal_if_simple_exit_code)
|
||||
.chain_err(|| format!("Could not compile `{}`.", name))?;
|
||||
}
|
||||
@ -344,7 +345,7 @@ fn rustc<'a, 'cfg>(
|
||||
build_state: &BuildMap,
|
||||
build_scripts: &BuildScripts,
|
||||
pass_l_flag: bool,
|
||||
current_id: &PackageId,
|
||||
current_id: PackageId,
|
||||
) -> CargoResult<()> {
|
||||
for key in build_scripts.to_link.iter() {
|
||||
let output = build_state.get(key).ok_or_else(|| {
|
||||
@ -356,7 +357,7 @@ fn rustc<'a, 'cfg>(
|
||||
for path in output.library_paths.iter() {
|
||||
rustc.arg("-L").arg(path);
|
||||
}
|
||||
if key.0 == *current_id {
|
||||
if key.0 == current_id {
|
||||
for cfg in &output.cfgs {
|
||||
rustc.arg("--cfg").arg(cfg);
|
||||
}
|
||||
@ -375,10 +376,10 @@ fn rustc<'a, 'cfg>(
|
||||
fn add_custom_env(
|
||||
rustc: &mut ProcessBuilder,
|
||||
build_state: &BuildMap,
|
||||
current_id: &PackageId,
|
||||
current_id: PackageId,
|
||||
kind: Kind,
|
||||
) -> CargoResult<()> {
|
||||
let key = (current_id.clone(), kind);
|
||||
let key = (current_id, kind);
|
||||
if let Some(output) = build_state.get(&key) {
|
||||
for &(ref name, ref value) in output.env.iter() {
|
||||
rustc.env(name, value);
|
||||
@ -398,11 +399,12 @@ fn link_targets<'a, 'cfg>(
|
||||
let bcx = cx.bcx;
|
||||
let outputs = cx.outputs(unit)?;
|
||||
let export_dir = cx.files().export_dir();
|
||||
let package_id = unit.pkg.package_id().clone();
|
||||
let package_id = unit.pkg.package_id();
|
||||
let profile = unit.profile;
|
||||
let unit_mode = unit.mode;
|
||||
let features = bcx.resolve
|
||||
.features_sorted(&package_id)
|
||||
let features = bcx
|
||||
.resolve
|
||||
.features_sorted(package_id)
|
||||
.into_iter()
|
||||
.map(|s| s.to_owned())
|
||||
.collect();
|
||||
@ -456,7 +458,7 @@ fn link_targets<'a, 'cfg>(
|
||||
};
|
||||
|
||||
machine_message::emit(&machine_message::Artifact {
|
||||
package_id: &package_id,
|
||||
package_id,
|
||||
target: &target,
|
||||
profile: art_profile,
|
||||
features,
|
||||
@ -526,10 +528,9 @@ fn add_plugin_deps(
|
||||
let var = util::dylib_path_envvar();
|
||||
let search_path = rustc.get_env(var).unwrap_or_default();
|
||||
let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>();
|
||||
for id in build_scripts.plugins.iter() {
|
||||
let key = (id.clone(), Kind::Host);
|
||||
for &id in build_scripts.plugins.iter() {
|
||||
let output = build_state
|
||||
.get(&key)
|
||||
.get(&(id, Kind::Host))
|
||||
.ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", id)))?;
|
||||
search_path.append(&mut filter_dynamic_search_path(
|
||||
output.library_paths.iter(),
|
||||
@ -637,9 +638,9 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult
|
||||
|
||||
let name = unit.pkg.name().to_string();
|
||||
let build_state = cx.build_state.clone();
|
||||
let key = (unit.pkg.package_id().clone(), unit.kind);
|
||||
let key = (unit.pkg.package_id(), unit.kind);
|
||||
let json_messages = bcx.build_config.json_messages();
|
||||
let package_id = unit.pkg.package_id().clone();
|
||||
let package_id = unit.pkg.package_id();
|
||||
let target = unit.target.clone();
|
||||
|
||||
Ok(Work::new(move |state| {
|
||||
@ -657,9 +658,10 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult
|
||||
rustdoc
|
||||
.exec_with_streaming(
|
||||
&mut assert_is_empty,
|
||||
&mut |line| json_stderr(line, &package_id, &target),
|
||||
&mut |line| json_stderr(line, package_id, &target),
|
||||
false,
|
||||
).map(drop)
|
||||
)
|
||||
.map(drop)
|
||||
} else {
|
||||
state.capture_output(&rustdoc, None, false).map(drop)
|
||||
};
|
||||
@ -719,15 +721,23 @@ fn add_cap_lints(bcx: &BuildContext, unit: &Unit, cmd: &mut ProcessBuilder) {
|
||||
|
||||
fn add_color(bcx: &BuildContext, cmd: &mut ProcessBuilder) {
|
||||
let shell = bcx.config.shell();
|
||||
let color = if shell.supports_color() { "always" } else { "never" };
|
||||
let color = if shell.supports_color() {
|
||||
"always"
|
||||
} else {
|
||||
"never"
|
||||
};
|
||||
cmd.args(&["--color", color]);
|
||||
}
|
||||
|
||||
fn add_error_format(bcx: &BuildContext, cmd: &mut ProcessBuilder) {
|
||||
match bcx.build_config.message_format {
|
||||
MessageFormat::Human => (),
|
||||
MessageFormat::Json => { cmd.arg("--error-format").arg("json"); },
|
||||
MessageFormat::Short => { cmd.arg("--error-format").arg("short"); },
|
||||
MessageFormat::Json => {
|
||||
cmd.arg("--error-format").arg("json");
|
||||
}
|
||||
MessageFormat::Short => {
|
||||
cmd.arg("--error-format").arg("short");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1009,7 +1019,7 @@ fn assert_is_empty(line: &str) -> CargoResult<()> {
|
||||
}
|
||||
}
|
||||
|
||||
fn json_stderr(line: &str, package_id: &PackageId, target: &Target) -> CargoResult<()> {
|
||||
fn json_stderr(line: &str, package_id: PackageId, target: &Target) -> CargoResult<()> {
|
||||
// stderr from rustc/rustdoc can have a mix of JSON and non-JSON output
|
||||
if line.starts_with('{') {
|
||||
// Handle JSON lines
|
||||
|
@ -52,7 +52,7 @@ fn add_deps_for_unit<'a, 'b>(
|
||||
}
|
||||
|
||||
// Add rerun-if-changed dependencies
|
||||
let key = (unit.pkg.package_id().clone(), unit.kind);
|
||||
let key = (unit.pkg.package_id(), unit.kind);
|
||||
if let Some(output) = context.build_state.outputs.lock().unwrap().get(&key) {
|
||||
for path in &output.rerun_if_changed {
|
||||
deps.insert(path.into());
|
||||
@ -87,7 +87,8 @@ pub fn output_depinfo<'a, 'b>(cx: &mut Context<'a, 'b>, unit: &Unit<'a>) -> Carg
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
let deps = deps.iter()
|
||||
let deps = deps
|
||||
.iter()
|
||||
.map(|f| render_filename(f, basedir))
|
||||
.collect::<CargoResult<Vec<_>>>()?;
|
||||
|
||||
|
@ -89,7 +89,7 @@ pub enum Kind {
|
||||
fn parse_req_with_deprecated(
|
||||
name: &str,
|
||||
req: &str,
|
||||
extra: Option<(&PackageId, &Config)>,
|
||||
extra: Option<(PackageId, &Config)>,
|
||||
) -> CargoResult<VersionReq> {
|
||||
match VersionReq::parse(req) {
|
||||
Err(ReqParseError::DeprecatedVersionRequirement(requirement)) => {
|
||||
@ -152,7 +152,7 @@ impl Dependency {
|
||||
name: &str,
|
||||
version: Option<&str>,
|
||||
source_id: SourceId,
|
||||
inside: &PackageId,
|
||||
inside: PackageId,
|
||||
config: &Config,
|
||||
) -> CargoResult<Dependency> {
|
||||
let arg = Some((inside, config));
|
||||
@ -349,7 +349,7 @@ impl Dependency {
|
||||
}
|
||||
|
||||
/// Lock this dependency to depending on the specified package id
|
||||
pub fn lock_to(&mut self, id: &PackageId) -> &mut Dependency {
|
||||
pub fn lock_to(&mut self, id: PackageId) -> &mut Dependency {
|
||||
assert_eq!(self.inner.source_id, id.source_id());
|
||||
assert!(self.inner.req.matches(id.version()));
|
||||
trace!(
|
||||
@ -404,12 +404,12 @@ impl Dependency {
|
||||
}
|
||||
|
||||
/// Returns true if the package (`sum`) can fulfill this dependency request.
|
||||
pub fn matches_ignoring_source(&self, id: &PackageId) -> bool {
|
||||
pub fn matches_ignoring_source(&self, id: PackageId) -> bool {
|
||||
self.package_name() == id.name() && self.version_req().matches(id.version())
|
||||
}
|
||||
|
||||
/// Returns true if the package (`id`) can fulfill this dependency request.
|
||||
pub fn matches_id(&self, id: &PackageId) -> bool {
|
||||
pub fn matches_id(&self, id: PackageId) -> bool {
|
||||
self.inner.name == id.name()
|
||||
&& (self.inner.only_match_name
|
||||
|| (self.inner.req.matches(id.version()) && self.inner.source_id == id.source_id()))
|
||||
|
@ -413,7 +413,7 @@ impl Manifest {
|
||||
pub fn name(&self) -> InternedString {
|
||||
self.package_id().name()
|
||||
}
|
||||
pub fn package_id(&self) -> &PackageId {
|
||||
pub fn package_id(&self) -> PackageId {
|
||||
self.summary.package_id()
|
||||
}
|
||||
pub fn summary(&self) -> &Summary {
|
||||
@ -519,7 +519,7 @@ impl Manifest {
|
||||
}
|
||||
|
||||
pub fn metabuild_path(&self, target_dir: Filesystem) -> PathBuf {
|
||||
let hash = short_hash(self.package_id());
|
||||
let hash = short_hash(&self.package_id());
|
||||
target_dir
|
||||
.into_path_unlocked()
|
||||
.join(".metabuild")
|
||||
|
@ -41,7 +41,7 @@ pub struct Package {
|
||||
|
||||
impl Ord for Package {
|
||||
fn cmp(&self, other: &Package) -> Ordering {
|
||||
self.package_id().cmp(other.package_id())
|
||||
self.package_id().cmp(&other.package_id())
|
||||
}
|
||||
}
|
||||
|
||||
@ -56,7 +56,7 @@ impl PartialOrd for Package {
|
||||
struct SerializedPackage<'a> {
|
||||
name: &'a str,
|
||||
version: &'a str,
|
||||
id: &'a PackageId,
|
||||
id: PackageId,
|
||||
license: Option<&'a str>,
|
||||
license_file: Option<&'a str>,
|
||||
description: Option<&'a str>,
|
||||
@ -153,7 +153,7 @@ impl Package {
|
||||
self.package_id().name()
|
||||
}
|
||||
/// Get the PackageId object for the package (fully defines a package)
|
||||
pub fn package_id(&self) -> &PackageId {
|
||||
pub fn package_id(&self) -> PackageId {
|
||||
self.manifest.package_id()
|
||||
}
|
||||
/// Get the root folder of the package
|
||||
@ -241,7 +241,7 @@ impl fmt::Display for Package {
|
||||
impl fmt::Debug for Package {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("Package")
|
||||
.field("id", self.summary().package_id())
|
||||
.field("id", &self.summary().package_id())
|
||||
.field("..", &"..")
|
||||
.finish()
|
||||
}
|
||||
@ -354,7 +354,7 @@ impl<'cfg> PackageSet<'cfg> {
|
||||
Ok(PackageSet {
|
||||
packages: package_ids
|
||||
.iter()
|
||||
.map(|id| (id.clone(), LazyCell::new()))
|
||||
.map(|&id| (id, LazyCell::new()))
|
||||
.collect(),
|
||||
sources: RefCell::new(sources),
|
||||
config,
|
||||
@ -364,8 +364,8 @@ impl<'cfg> PackageSet<'cfg> {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item = &'a PackageId> + 'a> {
|
||||
Box::new(self.packages.keys())
|
||||
pub fn package_ids<'a>(&'a self) -> impl Iterator<Item = PackageId> + 'a {
|
||||
self.packages.keys().cloned()
|
||||
}
|
||||
|
||||
pub fn enable_download<'a>(&'a self) -> CargoResult<Downloads<'a, 'cfg>> {
|
||||
@ -394,14 +394,11 @@ impl<'cfg> PackageSet<'cfg> {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_one(&self, id: &PackageId) -> CargoResult<&Package> {
|
||||
pub fn get_one(&self, id: PackageId) -> CargoResult<&Package> {
|
||||
Ok(self.get_many(Some(id))?.remove(0))
|
||||
}
|
||||
|
||||
pub fn get_many<'a>(
|
||||
&self,
|
||||
ids: impl IntoIterator<Item = &'a PackageId>,
|
||||
) -> CargoResult<Vec<&Package>> {
|
||||
pub fn get_many(&self, ids: impl IntoIterator<Item = PackageId>) -> CargoResult<Vec<&Package>> {
|
||||
let mut pkgs = Vec::new();
|
||||
let mut downloads = self.enable_download()?;
|
||||
for id in ids {
|
||||
@ -425,13 +422,13 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
|
||||
/// Returns `None` if the package is queued up for download and will
|
||||
/// eventually be returned from `wait_for_download`. Returns `Some(pkg)` if
|
||||
/// the package is ready and doesn't need to be downloaded.
|
||||
pub fn start(&mut self, id: &PackageId) -> CargoResult<Option<&'a Package>> {
|
||||
pub fn start(&mut self, id: PackageId) -> CargoResult<Option<&'a Package>> {
|
||||
// First up see if we've already cached this package, in which case
|
||||
// there's nothing to do.
|
||||
let slot = self
|
||||
.set
|
||||
.packages
|
||||
.get(id)
|
||||
.get(&id)
|
||||
.ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?;
|
||||
if let Some(pkg) = slot.borrow() {
|
||||
return Ok(Some(pkg));
|
||||
@ -463,7 +460,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
|
||||
let token = self.next;
|
||||
self.next += 1;
|
||||
debug!("downloading {} as {}", id, token);
|
||||
assert!(self.pending_ids.insert(id.clone()));
|
||||
assert!(self.pending_ids.insert(id));
|
||||
|
||||
let (mut handle, _timeout) = ops::http_handle_and_timeout(self.set.config)?;
|
||||
handle.get(true)?;
|
||||
@ -542,7 +539,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
|
||||
let dl = Download {
|
||||
token,
|
||||
data: RefCell::new(Vec::new()),
|
||||
id: id.clone(),
|
||||
id,
|
||||
url,
|
||||
descriptor,
|
||||
total: Cell::new(0),
|
||||
@ -632,7 +629,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
|
||||
match ret {
|
||||
Some(()) => break (dl, data),
|
||||
None => {
|
||||
self.pending_ids.insert(dl.id.clone());
|
||||
self.pending_ids.insert(dl.id);
|
||||
self.enqueue(dl, handle)?
|
||||
}
|
||||
}
|
||||
@ -671,7 +668,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
|
||||
.get_mut(dl.id.source_id())
|
||||
.ok_or_else(|| internal(format!("couldn't find source for `{}`", dl.id)))?;
|
||||
let start = Instant::now();
|
||||
let pkg = source.finish_download(&dl.id, data)?;
|
||||
let pkg = source.finish_download(dl.id, data)?;
|
||||
|
||||
// Assume that no time has passed while we were calling
|
||||
// `finish_download`, update all speed checks and timeout limits of all
|
||||
|
@ -15,7 +15,8 @@ use core::source::SourceId;
|
||||
use util::{CargoResult, ToSemver};
|
||||
|
||||
lazy_static! {
|
||||
static ref PACKAGE_ID_CACHE: Mutex<HashSet<&'static PackageIdInner>> = Mutex::new(HashSet::new());
|
||||
static ref PACKAGE_ID_CACHE: Mutex<HashSet<&'static PackageIdInner>> =
|
||||
Mutex::new(HashSet::new());
|
||||
}
|
||||
|
||||
/// Identifier for a specific version of a package in a specific source.
|
||||
@ -36,7 +37,7 @@ impl PartialEq for PackageIdInner {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.name == other.name
|
||||
&& self.version == other.version
|
||||
&& self.source_id.full_eq(&other.source_id)
|
||||
&& self.source_id.full_eq(other.source_id)
|
||||
}
|
||||
}
|
||||
|
||||
@ -87,13 +88,11 @@ impl<'de> de::Deserialize<'de> for PackageId {
|
||||
};
|
||||
let source_id = SourceId::from_url(url).map_err(de::Error::custom)?;
|
||||
|
||||
Ok(PackageId::wrap(
|
||||
PackageIdInner {
|
||||
name: InternedString::new(name),
|
||||
version,
|
||||
source_id,
|
||||
}
|
||||
))
|
||||
Ok(PackageId::wrap(PackageIdInner {
|
||||
name: InternedString::new(name),
|
||||
version,
|
||||
source_id,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@ -120,18 +119,16 @@ impl PackageId {
|
||||
pub fn new<T: ToSemver>(name: &str, version: T, sid: SourceId) -> CargoResult<PackageId> {
|
||||
let v = version.to_semver()?;
|
||||
|
||||
Ok(PackageId::wrap(
|
||||
PackageIdInner {
|
||||
name: InternedString::new(name),
|
||||
version: v,
|
||||
source_id: sid,
|
||||
}
|
||||
))
|
||||
Ok(PackageId::wrap(PackageIdInner {
|
||||
name: InternedString::new(name),
|
||||
version: v,
|
||||
source_id: sid,
|
||||
}))
|
||||
}
|
||||
|
||||
fn wrap(inner: PackageIdInner) -> PackageId {
|
||||
let mut cache = PACKAGE_ID_CACHE.lock().unwrap();
|
||||
let inner = cache.get(&inner).map(|&x| x).unwrap_or_else(|| {
|
||||
let inner = cache.get(&inner).cloned().unwrap_or_else(|| {
|
||||
let inner = Box::leak(Box::new(inner));
|
||||
cache.insert(inner);
|
||||
inner
|
||||
@ -139,42 +136,38 @@ impl PackageId {
|
||||
PackageId { inner }
|
||||
}
|
||||
|
||||
pub fn name(&self) -> InternedString {
|
||||
pub fn name(self) -> InternedString {
|
||||
self.inner.name
|
||||
}
|
||||
pub fn version(&self) -> &semver::Version {
|
||||
pub fn version(self) -> &'static semver::Version {
|
||||
&self.inner.version
|
||||
}
|
||||
pub fn source_id(&self) -> SourceId {
|
||||
pub fn source_id(self) -> SourceId {
|
||||
self.inner.source_id
|
||||
}
|
||||
|
||||
pub fn with_precise(&self, precise: Option<String>) -> PackageId {
|
||||
PackageId::wrap(
|
||||
PackageIdInner {
|
||||
name: self.inner.name,
|
||||
version: self.inner.version.clone(),
|
||||
source_id: self.inner.source_id.with_precise(precise),
|
||||
}
|
||||
)
|
||||
pub fn with_precise(self, precise: Option<String>) -> PackageId {
|
||||
PackageId::wrap(PackageIdInner {
|
||||
name: self.inner.name,
|
||||
version: self.inner.version.clone(),
|
||||
source_id: self.inner.source_id.with_precise(precise),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn with_source_id(&self, source: SourceId) -> PackageId {
|
||||
PackageId::wrap(
|
||||
PackageIdInner {
|
||||
name: self.inner.name,
|
||||
version: self.inner.version.clone(),
|
||||
source_id: source,
|
||||
}
|
||||
)
|
||||
pub fn with_source_id(self, source: SourceId) -> PackageId {
|
||||
PackageId::wrap(PackageIdInner {
|
||||
name: self.inner.name,
|
||||
version: self.inner.version.clone(),
|
||||
source_id: source,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn stable_hash<'a>(&'a self, workspace: &'a Path) -> PackageIdStableHash<'a> {
|
||||
pub fn stable_hash(self, workspace: &Path) -> PackageIdStableHash {
|
||||
PackageIdStableHash(self, workspace)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PackageIdStableHash<'a>(&'a PackageId, &'a Path);
|
||||
pub struct PackageIdStableHash<'a>(PackageId, &'a Path);
|
||||
|
||||
impl<'a> Hash for PackageIdStableHash<'a> {
|
||||
fn hash<S: hash::Hasher>(&self, state: &mut S) {
|
||||
@ -236,7 +229,8 @@ PackageId {
|
||||
version: "1.0.0",
|
||||
source: "registry `https://github.com/rust-lang/crates.io-index`"
|
||||
}
|
||||
"#.trim();
|
||||
"#
|
||||
.trim();
|
||||
assert_eq!(pretty, format!("{:#?}", pkg_id));
|
||||
}
|
||||
|
||||
|
@ -77,9 +77,9 @@ impl PackageIdSpec {
|
||||
}
|
||||
|
||||
/// Roughly equivalent to `PackageIdSpec::parse(spec)?.query(i)`
|
||||
pub fn query_str<'a, I>(spec: &str, i: I) -> CargoResult<&'a PackageId>
|
||||
pub fn query_str<I>(spec: &str, i: I) -> CargoResult<PackageId>
|
||||
where
|
||||
I: IntoIterator<Item = &'a PackageId>,
|
||||
I: IntoIterator<Item = PackageId>,
|
||||
{
|
||||
let spec = PackageIdSpec::parse(spec)
|
||||
.chain_err(|| format_err!("invalid package id specification: `{}`", spec))?;
|
||||
@ -88,7 +88,7 @@ impl PackageIdSpec {
|
||||
|
||||
/// Convert a `PackageId` to a `PackageIdSpec`, which will have both the `Version` and `Url`
|
||||
/// fields filled in.
|
||||
pub fn from_package_id(package_id: &PackageId) -> PackageIdSpec {
|
||||
pub fn from_package_id(package_id: PackageId) -> PackageIdSpec {
|
||||
PackageIdSpec {
|
||||
name: package_id.name().to_string(),
|
||||
version: Some(package_id.version().clone()),
|
||||
@ -160,7 +160,7 @@ impl PackageIdSpec {
|
||||
}
|
||||
|
||||
/// Checks whether the given `PackageId` matches the `PackageIdSpec`.
|
||||
pub fn matches(&self, package_id: &PackageId) -> bool {
|
||||
pub fn matches(&self, package_id: PackageId) -> bool {
|
||||
if self.name() != &*package_id.name() {
|
||||
return false;
|
||||
}
|
||||
@ -179,9 +179,9 @@ impl PackageIdSpec {
|
||||
|
||||
/// Checks a list of `PackageId`s to find 1 that matches this `PackageIdSpec`. If 0, 2, or
|
||||
/// more are found, then this returns an error.
|
||||
pub fn query<'a, I>(&self, i: I) -> CargoResult<&'a PackageId>
|
||||
pub fn query<I>(&self, i: I) -> CargoResult<PackageId>
|
||||
where
|
||||
I: IntoIterator<Item = &'a PackageId>,
|
||||
I: IntoIterator<Item = PackageId>,
|
||||
{
|
||||
let mut ids = i.into_iter().filter(|p| self.matches(*p));
|
||||
let ret = match ids.next() {
|
||||
@ -212,7 +212,7 @@ impl PackageIdSpec {
|
||||
None => Ok(ret),
|
||||
};
|
||||
|
||||
fn minimize(msg: &mut String, ids: &[&PackageId], spec: &PackageIdSpec) {
|
||||
fn minimize(msg: &mut String, ids: &[PackageId], spec: &PackageIdSpec) {
|
||||
let mut version_cnt = HashMap::new();
|
||||
for id in ids {
|
||||
*version_cnt.entry(id.version()).or_insert(0) += 1;
|
||||
@ -371,9 +371,9 @@ mod tests {
|
||||
let foo = PackageId::new("foo", "1.2.3", sid).unwrap();
|
||||
let bar = PackageId::new("bar", "1.2.3", sid).unwrap();
|
||||
|
||||
assert!(PackageIdSpec::parse("foo").unwrap().matches(&foo));
|
||||
assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar));
|
||||
assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo));
|
||||
assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(&foo));
|
||||
assert!(PackageIdSpec::parse("foo").unwrap().matches(foo));
|
||||
assert!(!PackageIdSpec::parse("foo").unwrap().matches(bar));
|
||||
assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(foo));
|
||||
assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(foo));
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ impl Profiles {
|
||||
/// workspace.
|
||||
pub fn get_profile(
|
||||
&self,
|
||||
pkg_id: &PackageId,
|
||||
pkg_id: PackageId,
|
||||
is_member: bool,
|
||||
unit_for: UnitFor,
|
||||
mode: CompileMode,
|
||||
@ -163,7 +163,7 @@ struct ProfileMaker {
|
||||
impl ProfileMaker {
|
||||
fn get_profile(
|
||||
&self,
|
||||
pkg_id: Option<&PackageId>,
|
||||
pkg_id: Option<PackageId>,
|
||||
is_member: bool,
|
||||
unit_for: UnitFor,
|
||||
) -> Profile {
|
||||
@ -292,7 +292,7 @@ impl ProfileMaker {
|
||||
}
|
||||
|
||||
fn merge_toml(
|
||||
pkg_id: Option<&PackageId>,
|
||||
pkg_id: Option<PackageId>,
|
||||
is_member: bool,
|
||||
unit_for: UnitFor,
|
||||
profile: &mut Profile,
|
||||
|
@ -264,11 +264,7 @@ impl<'cfg> PackageRegistry<'cfg> {
|
||||
// we want to fill in the `patches_available` map (later used in the
|
||||
// `lock` method) and otherwise store the unlocked summaries in
|
||||
// `patches` to get locked in a future call to `lock_patches`.
|
||||
let ids = unlocked_summaries
|
||||
.iter()
|
||||
.map(|s| s.package_id())
|
||||
.cloned()
|
||||
.collect();
|
||||
let ids = unlocked_summaries.iter().map(|s| s.package_id()).collect();
|
||||
self.patches_available.insert(url.clone(), ids);
|
||||
self.patches.insert(url.clone(), unlocked_summaries);
|
||||
|
||||
@ -558,7 +554,7 @@ fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Sum
|
||||
let pair = locked
|
||||
.get(&summary.source_id())
|
||||
.and_then(|map| map.get(&*summary.name()))
|
||||
.and_then(|vec| vec.iter().find(|&&(ref id, _)| id == summary.package_id()));
|
||||
.and_then(|vec| vec.iter().find(|&&(id, _)| id == summary.package_id()));
|
||||
|
||||
trace!("locking summary of {}", summary.package_id());
|
||||
|
||||
@ -595,8 +591,8 @@ fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Sum
|
||||
// Cases 1/2 are handled by `matches_id` and case 3 is handled by
|
||||
// falling through to the logic below.
|
||||
if let Some(&(_, ref locked_deps)) = pair {
|
||||
let locked = locked_deps.iter().find(|id| dep.matches_id(id));
|
||||
if let Some(locked) = locked {
|
||||
let locked = locked_deps.iter().find(|&&id| dep.matches_id(id));
|
||||
if let Some(&locked) = locked {
|
||||
trace!("\tfirst hit on {}", locked);
|
||||
let mut dep = dep.clone();
|
||||
dep.lock_to(locked);
|
||||
@ -610,8 +606,8 @@ fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Sum
|
||||
let v = locked
|
||||
.get(&dep.source_id())
|
||||
.and_then(|map| map.get(&*dep.package_name()))
|
||||
.and_then(|vec| vec.iter().find(|&&(ref id, _)| dep.matches_id(id)));
|
||||
if let Some(&(ref id, _)) = v {
|
||||
.and_then(|vec| vec.iter().find(|&&(id, _)| dep.matches_id(id)));
|
||||
if let Some(&(id, _)) = v {
|
||||
trace!("\tsecond hit on {}", id);
|
||||
let mut dep = dep.clone();
|
||||
dep.lock_to(id);
|
||||
@ -622,7 +618,9 @@ fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Sum
|
||||
// this dependency.
|
||||
let v = patches.get(dep.source_id().url()).map(|vec| {
|
||||
let dep2 = dep.clone();
|
||||
let mut iter = vec.iter().filter(move |p| dep2.matches_ignoring_source(p));
|
||||
let mut iter = vec
|
||||
.iter()
|
||||
.filter(move |&&p| dep2.matches_ignoring_source(p));
|
||||
(iter.next(), iter)
|
||||
});
|
||||
if let Some((Some(patch_id), mut remaining)) = v {
|
||||
|
@ -37,28 +37,28 @@ impl ConflictStoreTrie {
|
||||
}
|
||||
}
|
||||
ConflictStoreTrie::Node(m) => {
|
||||
for (pid, store) in m {
|
||||
for (&pid, store) in m {
|
||||
// if the key is active then we need to check all of the corresponding subTrie.
|
||||
if cx.is_active(pid) {
|
||||
if let Some(o) = store.find_conflicting(cx, filter) {
|
||||
return Some(o);
|
||||
}
|
||||
} // else, if it is not active then there is no way any of the corresponding
|
||||
// subTrie will be conflicting.
|
||||
// subTrie will be conflicting.
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn insert<'a>(
|
||||
fn insert(
|
||||
&mut self,
|
||||
mut iter: impl Iterator<Item = &'a PackageId>,
|
||||
mut iter: impl Iterator<Item = PackageId>,
|
||||
con: BTreeMap<PackageId, ConflictReason>,
|
||||
) {
|
||||
if let Some(pid) = iter.next() {
|
||||
if let ConflictStoreTrie::Node(p) = self {
|
||||
p.entry(pid.clone())
|
||||
p.entry(pid)
|
||||
.or_insert_with(|| ConflictStoreTrie::Node(HashMap::new()))
|
||||
.insert(iter, con);
|
||||
} // else, We already have a subset of this in the ConflictStore
|
||||
@ -160,7 +160,7 @@ impl ConflictCache {
|
||||
self.con_from_dep
|
||||
.entry(dep.clone())
|
||||
.or_insert_with(|| ConflictStoreTrie::Node(HashMap::new()))
|
||||
.insert(con.keys(), con.clone());
|
||||
.insert(con.keys().cloned(), con.clone());
|
||||
|
||||
trace!(
|
||||
"{} = \"{}\" adding a skip {:?}",
|
||||
@ -176,7 +176,7 @@ impl ConflictCache {
|
||||
.insert(dep.clone());
|
||||
}
|
||||
}
|
||||
pub fn dependencies_conflicting_with(&self, pid: &PackageId) -> Option<&HashSet<Dependency>> {
|
||||
self.dep_from_pid.get(pid)
|
||||
pub fn dependencies_conflicting_with(&self, pid: PackageId) -> Option<&HashSet<Dependency>> {
|
||||
self.dep_from_pid.get(&pid)
|
||||
}
|
||||
}
|
||||
|
@ -58,10 +58,10 @@ impl Context {
|
||||
.entry((id.name(), id.source_id()))
|
||||
.or_insert_with(|| Rc::new(Vec::new()));
|
||||
if !prev.iter().any(|c| c == summary) {
|
||||
self.resolve_graph.push(GraphNode::Add(id.clone()));
|
||||
self.resolve_graph.push(GraphNode::Add(id));
|
||||
if let Some(link) = summary.links() {
|
||||
ensure!(
|
||||
self.links.insert(link, id.clone()).is_none(),
|
||||
self.links.insert(link, id).is_none(),
|
||||
"Attempting to resolve a with more then one crate with the links={}. \n\
|
||||
This will not build as is. Consider rebuilding the .lock file.",
|
||||
&*link
|
||||
@ -84,7 +84,7 @@ impl Context {
|
||||
};
|
||||
|
||||
let has_default_feature = summary.features().contains_key("default");
|
||||
Ok(match self.resolve_features.get(id) {
|
||||
Ok(match self.resolve_features.get(&id) {
|
||||
Some(prev) => {
|
||||
features.iter().all(|f| prev.contains(f))
|
||||
&& (!use_default || prev.contains("default") || !has_default_feature)
|
||||
@ -131,7 +131,7 @@ impl Context {
|
||||
.unwrap_or(&[])
|
||||
}
|
||||
|
||||
pub fn is_active(&self, id: &PackageId) -> bool {
|
||||
pub fn is_active(&self, id: PackageId) -> bool {
|
||||
self.activations
|
||||
.get(&(id.name(), id.source_id()))
|
||||
.map(|v| v.iter().any(|s| s.package_id() == id))
|
||||
@ -142,13 +142,13 @@ impl Context {
|
||||
/// are still active
|
||||
pub fn is_conflicting(
|
||||
&self,
|
||||
parent: Option<&PackageId>,
|
||||
parent: Option<PackageId>,
|
||||
conflicting_activations: &BTreeMap<PackageId, ConflictReason>,
|
||||
) -> bool {
|
||||
conflicting_activations
|
||||
.keys()
|
||||
.chain(parent)
|
||||
.all(|id| self.is_active(id))
|
||||
.chain(parent.as_ref())
|
||||
.all(|&id| self.is_active(id))
|
||||
}
|
||||
|
||||
/// Return all dependencies and the features we want from them.
|
||||
@ -230,11 +230,7 @@ impl Context {
|
||||
features
|
||||
)
|
||||
.into(),
|
||||
Some(p) => (
|
||||
p.package_id().clone(),
|
||||
ConflictReason::MissingFeatures(features),
|
||||
)
|
||||
.into(),
|
||||
Some(p) => (p.package_id(), ConflictReason::MissingFeatures(features)).into(),
|
||||
});
|
||||
}
|
||||
|
||||
@ -244,7 +240,7 @@ impl Context {
|
||||
|
||||
let set = Rc::make_mut(
|
||||
self.resolve_features
|
||||
.entry(pkgid.clone())
|
||||
.entry(pkgid)
|
||||
.or_insert_with(|| Rc::new(HashSet::new())),
|
||||
);
|
||||
|
||||
@ -260,7 +256,7 @@ impl Context {
|
||||
let mut replacements = HashMap::new();
|
||||
let mut cur = &self.resolve_replacements;
|
||||
while let Some(ref node) = cur.head {
|
||||
let (k, v) = node.0.clone();
|
||||
let (k, v) = node.0;
|
||||
replacements.insert(k, v);
|
||||
cur = &node.1;
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ impl EncodableResolve {
|
||||
};
|
||||
|
||||
let lookup_id = |enc_id: &EncodablePackageId| -> Option<PackageId> {
|
||||
live_pkgs.get(enc_id).map(|&(ref id, _)| id.clone())
|
||||
live_pkgs.get(enc_id).map(|&(id, _)| id)
|
||||
};
|
||||
|
||||
let g = {
|
||||
@ -343,8 +343,8 @@ impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
|
||||
|
||||
let mut metadata = self.resolve.metadata().clone();
|
||||
|
||||
for id in ids.iter().filter(|id| !id.source_id().is_path()) {
|
||||
let checksum = match self.resolve.checksums()[*id] {
|
||||
for &id in ids.iter().filter(|id| !id.source_id().is_path()) {
|
||||
let checksum = match self.resolve.checksums()[&id] {
|
||||
Some(ref s) => &s[..],
|
||||
None => "<none>",
|
||||
};
|
||||
@ -382,7 +382,7 @@ impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
|
||||
}
|
||||
}
|
||||
|
||||
fn encodable_resolve_node(id: &PackageId, resolve: &Resolve) -> EncodableDependency {
|
||||
fn encodable_resolve_node(id: PackageId, resolve: &Resolve) -> EncodableDependency {
|
||||
let (replace, deps) = match resolve.replacement(id) {
|
||||
Some(id) => (Some(encodable_package_id(id)), None),
|
||||
None => {
|
||||
@ -404,7 +404,7 @@ fn encodable_resolve_node(id: &PackageId, resolve: &Resolve) -> EncodableDepende
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encodable_package_id(id: &PackageId) -> EncodablePackageId {
|
||||
pub fn encodable_package_id(id: PackageId) -> EncodablePackageId {
|
||||
EncodablePackageId {
|
||||
name: id.name().to_string(),
|
||||
version: id.version().to_string(),
|
||||
|
@ -82,7 +82,7 @@ pub(super) fn activation_error(
|
||||
ResolveError::new(
|
||||
err,
|
||||
graph
|
||||
.path_to_top(parent.package_id())
|
||||
.path_to_top(&parent.package_id())
|
||||
.into_iter()
|
||||
.cloned()
|
||||
.collect(),
|
||||
@ -92,7 +92,7 @@ pub(super) fn activation_error(
|
||||
if !candidates.is_empty() {
|
||||
let mut msg = format!("failed to select a version for `{}`.", dep.package_name());
|
||||
msg.push_str("\n ... required by ");
|
||||
msg.push_str(&describe_path(&graph.path_to_top(parent.package_id())));
|
||||
msg.push_str(&describe_path(&graph.path_to_top(&parent.package_id())));
|
||||
|
||||
msg.push_str("\nversions that meet the requirements `");
|
||||
msg.push_str(&dep.version_req().to_string());
|
||||
@ -204,7 +204,7 @@ pub(super) fn activation_error(
|
||||
registry.describe_source(dep.source_id()),
|
||||
);
|
||||
msg.push_str("required by ");
|
||||
msg.push_str(&describe_path(&graph.path_to_top(parent.package_id())));
|
||||
msg.push_str(&describe_path(&graph.path_to_top(&parent.package_id())));
|
||||
|
||||
// If we have a path dependency with a locked version, then this may
|
||||
// indicate that we updated a sub-package and forgot to run `cargo
|
||||
@ -258,7 +258,7 @@ pub(super) fn activation_error(
|
||||
msg.push_str("\n");
|
||||
}
|
||||
msg.push_str("required by ");
|
||||
msg.push_str(&describe_path(&graph.path_to_top(parent.package_id())));
|
||||
msg.push_str(&describe_path(&graph.path_to_top(&parent.package_id())));
|
||||
|
||||
msg
|
||||
};
|
||||
|
@ -111,7 +111,7 @@ pub fn resolve(
|
||||
summaries: &[(Summary, Method)],
|
||||
replacements: &[(PackageIdSpec, Dependency)],
|
||||
registry: &mut Registry,
|
||||
try_to_use: &HashSet<&PackageId>,
|
||||
try_to_use: &HashSet<PackageId>,
|
||||
config: Option<&Config>,
|
||||
print_warnings: bool,
|
||||
) -> CargoResult<Resolve> {
|
||||
@ -127,14 +127,14 @@ pub fn resolve(
|
||||
let mut cksums = HashMap::new();
|
||||
for summary in cx.activations.values().flat_map(|v| v.iter()) {
|
||||
let cksum = summary.checksum().map(|s| s.to_string());
|
||||
cksums.insert(summary.package_id().clone(), cksum);
|
||||
cksums.insert(summary.package_id(), cksum);
|
||||
}
|
||||
let resolve = Resolve::new(
|
||||
cx.graph(),
|
||||
cx.resolve_replacements(),
|
||||
cx.resolve_features
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.iter().map(|x| x.to_string()).collect()))
|
||||
.map(|(k, v)| (*k, v.iter().map(|x| x.to_string()).collect()))
|
||||
.collect(),
|
||||
cksums,
|
||||
BTreeMap::new(),
|
||||
@ -358,7 +358,7 @@ fn activate_deps_loop(
|
||||
None
|
||||
};
|
||||
|
||||
let pid = candidate.summary.package_id().clone();
|
||||
let pid = candidate.summary.package_id();
|
||||
let method = Method::Required {
|
||||
dev_deps: false,
|
||||
features: &features,
|
||||
@ -417,7 +417,7 @@ fn activate_deps_loop(
|
||||
conflicting
|
||||
.iter()
|
||||
.filter(|&(p, _)| p != &pid)
|
||||
.map(|(p, r)| (p.clone(), r.clone())),
|
||||
.map(|(&p, r)| (p, r.clone())),
|
||||
);
|
||||
|
||||
has_past_conflicting_dep = true;
|
||||
@ -432,7 +432,7 @@ fn activate_deps_loop(
|
||||
// parent conflict with us.
|
||||
if !has_past_conflicting_dep {
|
||||
if let Some(known_related_bad_deps) =
|
||||
past_conflicting_activations.dependencies_conflicting_with(&pid)
|
||||
past_conflicting_activations.dependencies_conflicting_with(pid)
|
||||
{
|
||||
if let Some((other_parent, conflict)) = remaining_deps
|
||||
.iter()
|
||||
@ -462,9 +462,9 @@ fn activate_deps_loop(
|
||||
conflict
|
||||
.iter()
|
||||
.filter(|&(p, _)| p != &pid)
|
||||
.map(|(p, r)| (p.clone(), r.clone())),
|
||||
.map(|(&p, r)| (p, r.clone())),
|
||||
);
|
||||
conflicting_activations.insert(other_parent.clone(), rel);
|
||||
conflicting_activations.insert(other_parent, rel);
|
||||
has_past_conflicting_dep = true;
|
||||
}
|
||||
}
|
||||
@ -589,8 +589,8 @@ fn activate(
|
||||
) -> ActivateResult<Option<(DepsFrame, Duration)>> {
|
||||
if let Some((parent, dep)) = parent {
|
||||
cx.resolve_graph.push(GraphNode::Link(
|
||||
parent.package_id().clone(),
|
||||
candidate.summary.package_id().clone(),
|
||||
parent.package_id(),
|
||||
candidate.summary.package_id(),
|
||||
dep.clone(),
|
||||
));
|
||||
}
|
||||
@ -599,10 +599,8 @@ fn activate(
|
||||
|
||||
let candidate = match candidate.replace {
|
||||
Some(replace) => {
|
||||
cx.resolve_replacements.push((
|
||||
candidate.summary.package_id().clone(),
|
||||
replace.package_id().clone(),
|
||||
));
|
||||
cx.resolve_replacements
|
||||
.push((candidate.summary.package_id(), replace.package_id()));
|
||||
if cx.flag_activated(&replace, method)? && activated {
|
||||
return Ok(None);
|
||||
}
|
||||
@ -700,10 +698,10 @@ impl RemainingCandidates {
|
||||
// `links` key. If this candidate links to something that's already
|
||||
// linked to by a different package then we've gotta skip this.
|
||||
if let Some(link) = b.summary.links() {
|
||||
if let Some(a) = cx.links.get(&link) {
|
||||
if let Some(&a) = cx.links.get(&link) {
|
||||
if a != b.summary.package_id() {
|
||||
conflicting_prev_active
|
||||
.entry(a.clone())
|
||||
.entry(a)
|
||||
.or_insert_with(|| ConflictReason::Links(link));
|
||||
continue;
|
||||
}
|
||||
@ -724,7 +722,7 @@ impl RemainingCandidates {
|
||||
{
|
||||
if *a != b.summary {
|
||||
conflicting_prev_active
|
||||
.entry(a.package_id().clone())
|
||||
.entry(a.package_id())
|
||||
.or_insert(ConflictReason::Semver);
|
||||
continue;
|
||||
}
|
||||
@ -823,7 +821,7 @@ fn find_candidate(
|
||||
}
|
||||
|
||||
fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()> {
|
||||
let summaries: HashMap<&PackageId, &Summary> = activations
|
||||
let summaries: HashMap<PackageId, &Summary> = activations
|
||||
.values()
|
||||
.flat_map(|v| v.iter())
|
||||
.map(|s| (s.package_id(), s))
|
||||
@ -834,25 +832,25 @@ fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()>
|
||||
all_packages.sort_unstable();
|
||||
let mut checked = HashSet::new();
|
||||
for pkg in all_packages {
|
||||
if !checked.contains(pkg) {
|
||||
if !checked.contains(&pkg) {
|
||||
visit(resolve, pkg, &summaries, &mut HashSet::new(), &mut checked)?
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
|
||||
fn visit<'a>(
|
||||
resolve: &'a Resolve,
|
||||
id: &'a PackageId,
|
||||
summaries: &HashMap<&'a PackageId, &Summary>,
|
||||
visited: &mut HashSet<&'a PackageId>,
|
||||
checked: &mut HashSet<&'a PackageId>,
|
||||
fn visit(
|
||||
resolve: &Resolve,
|
||||
id: PackageId,
|
||||
summaries: &HashMap<PackageId, &Summary>,
|
||||
visited: &mut HashSet<PackageId>,
|
||||
checked: &mut HashSet<PackageId>,
|
||||
) -> CargoResult<()> {
|
||||
// See if we visited ourselves
|
||||
if !visited.insert(id) {
|
||||
bail!(
|
||||
"cyclic package dependency: package `{}` depends on itself. Cycle:\n{}",
|
||||
id,
|
||||
errors::describe_path(&resolve.path_to_top(id))
|
||||
errors::describe_path(&resolve.path_to_top(&id))
|
||||
);
|
||||
}
|
||||
|
||||
@ -864,7 +862,7 @@ fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()>
|
||||
// visitation list as we can't induce a cycle through transitive
|
||||
// dependencies.
|
||||
if checked.insert(id) {
|
||||
let summary = summaries[id];
|
||||
let summary = summaries[&id];
|
||||
for dep in resolve.deps_not_replaced(id) {
|
||||
let is_transitive = summary
|
||||
.dependencies()
|
||||
@ -885,7 +883,7 @@ fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()>
|
||||
}
|
||||
|
||||
// Ok, we're done, no longer visiting our node any more
|
||||
visited.remove(id);
|
||||
visited.remove(&id);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -41,10 +41,7 @@ impl Resolve {
|
||||
metadata: Metadata,
|
||||
unused_patches: Vec<PackageId>,
|
||||
) -> Resolve {
|
||||
let reverse_replacements = replacements
|
||||
.iter()
|
||||
.map(|p| (p.1.clone(), p.0.clone()))
|
||||
.collect();
|
||||
let reverse_replacements = replacements.iter().map(|(&p, &r)| (r, p)).collect();
|
||||
Resolve {
|
||||
graph,
|
||||
replacements,
|
||||
@ -68,7 +65,7 @@ impl Resolve {
|
||||
if self.iter().any(|id| id == summary.package_id()) {
|
||||
continue;
|
||||
}
|
||||
self.unused_patches.push(summary.package_id().clone());
|
||||
self.unused_patches.push(summary.package_id());
|
||||
}
|
||||
}
|
||||
|
||||
@ -175,39 +172,39 @@ unable to verify that `{0}` is the same as when the lockfile was generated
|
||||
self.graph.sort()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = &PackageId> {
|
||||
self.graph.iter()
|
||||
pub fn iter<'a>(&'a self) -> impl Iterator<Item = PackageId> + 'a {
|
||||
self.graph.iter().cloned()
|
||||
}
|
||||
|
||||
pub fn deps(&self, pkg: &PackageId) -> impl Iterator<Item = (&PackageId, &[Dependency])> {
|
||||
pub fn deps(&self, pkg: PackageId) -> impl Iterator<Item = (PackageId, &[Dependency])> {
|
||||
self.graph
|
||||
.edges(pkg)
|
||||
.map(move |(id, deps)| (self.replacement(id).unwrap_or(id), deps.as_slice()))
|
||||
.edges(&pkg)
|
||||
.map(move |(&id, deps)| (self.replacement(id).unwrap_or(id), deps.as_slice()))
|
||||
}
|
||||
|
||||
pub fn deps_not_replaced(&self, pkg: &PackageId) -> impl Iterator<Item = &PackageId> {
|
||||
self.graph.edges(pkg).map(|(id, _)| id)
|
||||
pub fn deps_not_replaced<'a>(&'a self, pkg: PackageId) -> impl Iterator<Item = PackageId> + 'a {
|
||||
self.graph.edges(&pkg).map(|(&id, _)| id)
|
||||
}
|
||||
|
||||
pub fn replacement(&self, pkg: &PackageId) -> Option<&PackageId> {
|
||||
self.replacements.get(pkg)
|
||||
pub fn replacement(&self, pkg: PackageId) -> Option<PackageId> {
|
||||
self.replacements.get(&pkg).cloned()
|
||||
}
|
||||
|
||||
pub fn replacements(&self) -> &HashMap<PackageId, PackageId> {
|
||||
&self.replacements
|
||||
}
|
||||
|
||||
pub fn features(&self, pkg: &PackageId) -> &HashSet<String> {
|
||||
self.features.get(pkg).unwrap_or(&self.empty_features)
|
||||
pub fn features(&self, pkg: PackageId) -> &HashSet<String> {
|
||||
self.features.get(&pkg).unwrap_or(&self.empty_features)
|
||||
}
|
||||
|
||||
pub fn features_sorted(&self, pkg: &PackageId) -> Vec<&str> {
|
||||
pub fn features_sorted(&self, pkg: PackageId) -> Vec<&str> {
|
||||
let mut v = Vec::from_iter(self.features(pkg).iter().map(|s| s.as_ref()));
|
||||
v.sort_unstable();
|
||||
v
|
||||
}
|
||||
|
||||
pub fn query(&self, spec: &str) -> CargoResult<&PackageId> {
|
||||
pub fn query(&self, spec: &str) -> CargoResult<PackageId> {
|
||||
PackageIdSpec::query_str(spec, self.iter())
|
||||
}
|
||||
|
||||
@ -225,8 +222,8 @@ unable to verify that `{0}` is the same as when the lockfile was generated
|
||||
|
||||
pub fn extern_crate_name(
|
||||
&self,
|
||||
from: &PackageId,
|
||||
to: &PackageId,
|
||||
from: PackageId,
|
||||
to: PackageId,
|
||||
to_target: &Target,
|
||||
) -> CargoResult<String> {
|
||||
let deps = if from == to {
|
||||
@ -256,7 +253,7 @@ unable to verify that `{0}` is the same as when the lockfile was generated
|
||||
Ok(name.to_string())
|
||||
}
|
||||
|
||||
fn dependencies_listed(&self, from: &PackageId, to: &PackageId) -> &[Dependency] {
|
||||
fn dependencies_listed(&self, from: PackageId, to: PackageId) -> &[Dependency] {
|
||||
// We've got a dependency on `from` to `to`, but this dependency edge
|
||||
// may be affected by [replace]. If the `to` package is listed as the
|
||||
// target of a replacement (aka the key of a reverse replacement map)
|
||||
@ -266,12 +263,12 @@ unable to verify that `{0}` is the same as when the lockfile was generated
|
||||
// Note that we don't treat `from` as if it's been replaced because
|
||||
// that's where the dependency originates from, and we only replace
|
||||
// targets of dependencies not the originator.
|
||||
if let Some(replace) = self.reverse_replacements.get(to) {
|
||||
if let Some(deps) = self.graph.edge(from, replace) {
|
||||
if let Some(replace) = self.reverse_replacements.get(&to) {
|
||||
if let Some(deps) = self.graph.edge(&from, replace) {
|
||||
return deps;
|
||||
}
|
||||
}
|
||||
match self.graph.edge(from, to) {
|
||||
match self.graph.edge(&from, &to) {
|
||||
Some(ret) => ret,
|
||||
None => panic!("no Dependency listed for `{}` => `{}`", from, to),
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ impl ResolverProgress {
|
||||
pub struct RegistryQueryer<'a> {
|
||||
pub registry: &'a mut (Registry + 'a),
|
||||
replacements: &'a [(PackageIdSpec, Dependency)],
|
||||
try_to_use: &'a HashSet<&'a PackageId>,
|
||||
try_to_use: &'a HashSet<PackageId>,
|
||||
cache: HashMap<Dependency, Rc<Vec<Candidate>>>,
|
||||
// If set the list of dependency candidates will be sorted by minimal
|
||||
// versions first. That allows `cargo update -Z minimal-versions` which will
|
||||
@ -88,7 +88,7 @@ impl<'a> RegistryQueryer<'a> {
|
||||
pub fn new(
|
||||
registry: &'a mut Registry,
|
||||
replacements: &'a [(PackageIdSpec, Dependency)],
|
||||
try_to_use: &'a HashSet<&'a PackageId>,
|
||||
try_to_use: &'a HashSet<PackageId>,
|
||||
minimal_versions: bool,
|
||||
) -> Self {
|
||||
RegistryQueryer {
|
||||
@ -203,8 +203,8 @@ impl<'a> RegistryQueryer<'a> {
|
||||
// prioritized summaries (those in `try_to_use`) and failing that we
|
||||
// list everything from the maximum version to the lowest version.
|
||||
ret.sort_unstable_by(|a, b| {
|
||||
let a_in_previous = self.try_to_use.contains(a.summary.package_id());
|
||||
let b_in_previous = self.try_to_use.contains(b.summary.package_id());
|
||||
let a_in_previous = self.try_to_use.contains(&a.summary.package_id());
|
||||
let b_in_previous = self.try_to_use.contains(&b.summary.package_id());
|
||||
let previous_cmp = a_in_previous.cmp(&b_in_previous).reverse();
|
||||
match previous_cmp {
|
||||
Ordering::Equal => {
|
||||
@ -279,7 +279,7 @@ impl DepsFrame {
|
||||
.unwrap_or(0)
|
||||
}
|
||||
|
||||
pub fn flatten(&self) -> impl Iterator<Item = (&PackageId, Dependency)> {
|
||||
pub fn flatten<'a>(&'a self) -> impl Iterator<Item = (PackageId, Dependency)> + 'a {
|
||||
self.remaining_siblings
|
||||
.clone()
|
||||
.map(move |(_, (d, _, _))| (self.parent.package_id(), d))
|
||||
@ -353,7 +353,7 @@ impl RemainingDeps {
|
||||
}
|
||||
None
|
||||
}
|
||||
pub fn iter(&mut self) -> impl Iterator<Item = (&PackageId, Dependency)> {
|
||||
pub fn iter<'a>(&'a mut self) -> impl Iterator<Item = (PackageId, Dependency)> + 'a {
|
||||
self.data.iter().flat_map(|(other, _)| other.flatten())
|
||||
}
|
||||
}
|
||||
|
@ -49,9 +49,9 @@ pub trait Source {
|
||||
|
||||
/// The download method fetches the full package for each name and
|
||||
/// version specified.
|
||||
fn download(&mut self, package: &PackageId) -> CargoResult<MaybePackage>;
|
||||
fn download(&mut self, package: PackageId) -> CargoResult<MaybePackage>;
|
||||
|
||||
fn finish_download(&mut self, package: &PackageId, contents: Vec<u8>) -> CargoResult<Package>;
|
||||
fn finish_download(&mut self, package: PackageId, contents: Vec<u8>) -> CargoResult<Package>;
|
||||
|
||||
/// Generates a unique string which represents the fingerprint of the
|
||||
/// current state of the source.
|
||||
@ -71,7 +71,7 @@ pub trait Source {
|
||||
/// verification during the `download` step, but this is intended to be run
|
||||
/// just before a crate is compiled so it may perform more expensive checks
|
||||
/// which may not be cacheable.
|
||||
fn verify(&self, _pkg: &PackageId) -> CargoResult<()> {
|
||||
fn verify(&self, _pkg: PackageId) -> CargoResult<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -127,11 +127,11 @@ impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
|
||||
}
|
||||
|
||||
/// Forwards to `Source::download`
|
||||
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
|
||||
fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
|
||||
(**self).download(id)
|
||||
}
|
||||
|
||||
fn finish_download(&mut self, id: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
|
||||
fn finish_download(&mut self, id: PackageId, data: Vec<u8>) -> CargoResult<Package> {
|
||||
(**self).finish_download(id, data)
|
||||
}
|
||||
|
||||
@ -141,7 +141,7 @@ impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
|
||||
}
|
||||
|
||||
/// Forwards to `Source::verify`
|
||||
fn verify(&self, pkg: &PackageId) -> CargoResult<()> {
|
||||
fn verify(&self, pkg: PackageId) -> CargoResult<()> {
|
||||
(**self).verify(pkg)
|
||||
}
|
||||
|
||||
@ -183,11 +183,11 @@ impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T {
|
||||
(**self).update()
|
||||
}
|
||||
|
||||
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
|
||||
fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
|
||||
(**self).download(id)
|
||||
}
|
||||
|
||||
fn finish_download(&mut self, id: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
|
||||
fn finish_download(&mut self, id: PackageId, data: Vec<u8>) -> CargoResult<Package> {
|
||||
(**self).finish_download(id, data)
|
||||
}
|
||||
|
||||
@ -195,7 +195,7 @@ impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T {
|
||||
(**self).fingerprint(pkg)
|
||||
}
|
||||
|
||||
fn verify(&self, pkg: &PackageId) -> CargoResult<()> {
|
||||
fn verify(&self, pkg: PackageId) -> CargoResult<()> {
|
||||
(**self).verify(pkg)
|
||||
}
|
||||
|
||||
@ -255,7 +255,7 @@ impl<'src> SourceMap<'src> {
|
||||
|
||||
/// Like `HashMap::get`, but first calculates the `SourceId` from a
|
||||
/// `PackageId`
|
||||
pub fn get_by_package_id(&self, pkg_id: &PackageId) -> Option<&(Source + 'src)> {
|
||||
pub fn get_by_package_id(&self, pkg_id: PackageId) -> Option<&(Source + 'src)> {
|
||||
self.get(pkg_id.source_id())
|
||||
}
|
||||
|
||||
|
@ -336,11 +336,11 @@ impl SourceId {
|
||||
self.hash(into)
|
||||
}
|
||||
|
||||
pub fn full_eq(&self, other: &SourceId) -> bool {
|
||||
pub fn full_eq(self, other: SourceId) -> bool {
|
||||
ptr::eq(self.inner, other.inner)
|
||||
}
|
||||
|
||||
pub fn full_hash<S: hash::Hasher>(&self, into: &mut S) {
|
||||
pub fn full_hash<S: hash::Hasher>(self, into: &mut S) {
|
||||
ptr::NonNull::from(self.inner).hash(into)
|
||||
}
|
||||
}
|
||||
|
@ -71,8 +71,8 @@ impl Summary {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn package_id(&self) -> &PackageId {
|
||||
&self.inner.package_id
|
||||
pub fn package_id(&self) -> PackageId {
|
||||
self.inner.package_id
|
||||
}
|
||||
pub fn name(&self) -> InternedString {
|
||||
self.package_id().name()
|
||||
|
@ -134,7 +134,8 @@ fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> {
|
||||
config
|
||||
.shell()
|
||||
.verbose(|shell| shell.status("Removing", path.display()))?;
|
||||
paths::remove_dir_all(path).chain_err(|| format_err!("could not remove build directory"))?;
|
||||
paths::remove_dir_all(path)
|
||||
.chain_err(|| format_err!("could not remove build directory"))?;
|
||||
} else if m.is_ok() {
|
||||
config
|
||||
.shell()
|
||||
|
@ -28,7 +28,7 @@ use std::sync::Arc;
|
||||
|
||||
use core::compiler::{BuildConfig, BuildContext, Compilation, Context, DefaultExecutor, Executor};
|
||||
use core::compiler::{CompileMode, Kind, Unit};
|
||||
use core::profiles::{UnitFor, Profiles};
|
||||
use core::profiles::{Profiles, UnitFor};
|
||||
use core::resolver::{Method, Resolve};
|
||||
use core::{Package, Source, Target};
|
||||
use core::{PackageId, PackageIdSpec, TargetKind, Workspace};
|
||||
@ -109,11 +109,13 @@ impl Packages {
|
||||
|
||||
pub fn to_package_id_specs(&self, ws: &Workspace) -> CargoResult<Vec<PackageIdSpec>> {
|
||||
let specs = match *self {
|
||||
Packages::All => ws.members()
|
||||
Packages::All => ws
|
||||
.members()
|
||||
.map(Package::package_id)
|
||||
.map(PackageIdSpec::from_package_id)
|
||||
.collect(),
|
||||
Packages::OptOut(ref opt_out) => ws.members()
|
||||
Packages::OptOut(ref opt_out) => ws
|
||||
.members()
|
||||
.map(Package::package_id)
|
||||
.map(PackageIdSpec::from_package_id)
|
||||
.filter(|p| opt_out.iter().position(|x| *x == p.name()).is_none())
|
||||
@ -125,7 +127,8 @@ impl Packages {
|
||||
.iter()
|
||||
.map(|p| PackageIdSpec::parse(p))
|
||||
.collect::<CargoResult<Vec<_>>>()?,
|
||||
Packages::Default => ws.default_members()
|
||||
Packages::Default => ws
|
||||
.default_members()
|
||||
.map(Package::package_id)
|
||||
.map(PackageIdSpec::from_package_id)
|
||||
.collect(),
|
||||
@ -159,7 +162,8 @@ impl Packages {
|
||||
.ok_or_else(|| {
|
||||
format_err!("package `{}` is not a member of the workspace", name)
|
||||
})
|
||||
}).collect::<CargoResult<Vec<_>>>()?,
|
||||
})
|
||||
.collect::<CargoResult<Vec<_>>>()?,
|
||||
};
|
||||
Ok(packages)
|
||||
}
|
||||
@ -243,7 +247,8 @@ pub fn compile_ws<'a>(
|
||||
let resolve = ops::resolve_ws_with_method(ws, source, method, &specs)?;
|
||||
let (packages, resolve_with_overrides) = resolve;
|
||||
|
||||
let to_build_ids = specs.iter()
|
||||
let to_build_ids = specs
|
||||
.iter()
|
||||
.map(|s| s.query(resolve_with_overrides.iter()))
|
||||
.collect::<CargoResult<Vec<_>>>()?;
|
||||
let mut to_builds = packages.get_many(to_build_ids)?;
|
||||
@ -390,8 +395,11 @@ impl CompileFilter {
|
||||
benches: FilterRule::All,
|
||||
tests: FilterRule::All,
|
||||
}
|
||||
} else if lib_only || rule_bins.is_specific() || rule_tsts.is_specific()
|
||||
|| rule_exms.is_specific() || rule_bens.is_specific()
|
||||
} else if lib_only
|
||||
|| rule_bins.is_specific()
|
||||
|| rule_tsts.is_specific()
|
||||
|| rule_exms.is_specific()
|
||||
|| rule_bens.is_specific()
|
||||
{
|
||||
CompileFilter::Only {
|
||||
all_targets: false,
|
||||
@ -695,7 +703,13 @@ fn generate_targets<'a>(
|
||||
// features available.
|
||||
let mut features_map = HashMap::new();
|
||||
let mut units = HashSet::new();
|
||||
for Proposal { pkg, target, requires_features, mode} in proposals {
|
||||
for Proposal {
|
||||
pkg,
|
||||
target,
|
||||
requires_features,
|
||||
mode,
|
||||
} in proposals
|
||||
{
|
||||
let unavailable_features = match target.required_features() {
|
||||
Some(rf) => {
|
||||
let features = features_map
|
||||
@ -730,7 +744,7 @@ fn generate_targets<'a>(
|
||||
|
||||
fn resolve_all_features(
|
||||
resolve_with_overrides: &Resolve,
|
||||
package_id: &PackageId,
|
||||
package_id: PackageId,
|
||||
) -> HashSet<String> {
|
||||
let mut features = resolve_with_overrides.features(package_id).clone();
|
||||
|
||||
@ -843,7 +857,8 @@ fn find_named_targets<'a>(
|
||||
pkg.targets()
|
||||
.iter()
|
||||
.filter(|target| is_expected_kind(target))
|
||||
}).map(|target| (lev_distance(target_name, target.name()), target))
|
||||
})
|
||||
.map(|target| (lev_distance(target_name, target.name()), target))
|
||||
.filter(|&(d, _)| d < 4)
|
||||
.min_by_key(|t| t.0)
|
||||
.map(|t| t.1);
|
||||
|
@ -31,7 +31,8 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
|
||||
)?;
|
||||
let (packages, resolve_with_overrides) = resolve;
|
||||
|
||||
let ids = specs.iter()
|
||||
let ids = specs
|
||||
.iter()
|
||||
.map(|s| s.query(resolve_with_overrides.iter()))
|
||||
.collect::<CargoResult<Vec<_>>>()?;
|
||||
let pkgs = packages.get_many(ids)?;
|
||||
|
@ -34,31 +34,31 @@ pub fn fetch<'a>(
|
||||
continue;
|
||||
}
|
||||
|
||||
to_download.push(id.clone());
|
||||
let deps = resolve.deps(id)
|
||||
to_download.push(id);
|
||||
let deps = resolve
|
||||
.deps(id)
|
||||
.filter(|&(_id, deps)| {
|
||||
deps.iter()
|
||||
.any(|d| {
|
||||
// If no target was specified then all dependencies can
|
||||
// be fetched.
|
||||
let target = match options.target {
|
||||
Some(ref t) => t,
|
||||
None => return true,
|
||||
};
|
||||
// If this dependency is only available for certain
|
||||
// platforms, make sure we're only fetching it for that
|
||||
// platform.
|
||||
let platform = match d.platform() {
|
||||
Some(p) => p,
|
||||
None => return true,
|
||||
};
|
||||
platform.matches(target, target_info.cfg())
|
||||
})
|
||||
deps.iter().any(|d| {
|
||||
// If no target was specified then all dependencies can
|
||||
// be fetched.
|
||||
let target = match options.target {
|
||||
Some(ref t) => t,
|
||||
None => return true,
|
||||
};
|
||||
// If this dependency is only available for certain
|
||||
// platforms, make sure we're only fetching it for that
|
||||
// platform.
|
||||
let platform = match d.platform() {
|
||||
Some(p) => p,
|
||||
None => return true,
|
||||
};
|
||||
platform.matches(target, target_info.cfg())
|
||||
})
|
||||
})
|
||||
.map(|(id, _deps)| id);
|
||||
deps_to_fetch.extend(deps);
|
||||
}
|
||||
packages.get_many(&to_download)?;
|
||||
packages.get_many(to_download)?;
|
||||
}
|
||||
|
||||
Ok((resolve, packages))
|
||||
|
@ -124,9 +124,9 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()>
|
||||
|
||||
fn fill_with_deps<'a>(
|
||||
resolve: &'a Resolve,
|
||||
dep: &'a PackageId,
|
||||
set: &mut HashSet<&'a PackageId>,
|
||||
visited: &mut HashSet<&'a PackageId>,
|
||||
dep: PackageId,
|
||||
set: &mut HashSet<PackageId>,
|
||||
visited: &mut HashSet<PackageId>,
|
||||
) {
|
||||
if !visited.insert(dep) {
|
||||
return;
|
||||
@ -137,11 +137,11 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()>
|
||||
}
|
||||
}
|
||||
|
||||
fn compare_dependency_graphs<'a>(
|
||||
previous_resolve: &'a Resolve,
|
||||
resolve: &'a Resolve,
|
||||
) -> Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> {
|
||||
fn key(dep: &PackageId) -> (&str, SourceId) {
|
||||
fn compare_dependency_graphs(
|
||||
previous_resolve: &Resolve,
|
||||
resolve: &Resolve,
|
||||
) -> Vec<(Vec<PackageId>, Vec<PackageId>)> {
|
||||
fn key(dep: PackageId) -> (&'static str, SourceId) {
|
||||
(dep.name().as_str(), dep.source_id())
|
||||
}
|
||||
|
||||
@ -149,7 +149,7 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()>
|
||||
// more complicated because the equality for source ids does not take
|
||||
// precise versions into account (e.g. git shas), but we want to take
|
||||
// that into account here.
|
||||
fn vec_subtract<'a>(a: &[&'a PackageId], b: &[&'a PackageId]) -> Vec<&'a PackageId> {
|
||||
fn vec_subtract(a: &[PackageId], b: &[PackageId]) -> Vec<PackageId> {
|
||||
a.iter()
|
||||
.filter(|a| {
|
||||
// If this package id is not found in `b`, then it's definitely
|
||||
|
@ -359,11 +359,11 @@ fn install_one(
|
||||
}
|
||||
// Failsafe to force replacing metadata for git packages
|
||||
// https://github.com/rust-lang/cargo/issues/4582
|
||||
if let Some(set) = list.v1.remove(&pkg.package_id().clone()) {
|
||||
list.v1.insert(pkg.package_id().clone(), set);
|
||||
if let Some(set) = list.v1.remove(&pkg.package_id()) {
|
||||
list.v1.insert(pkg.package_id(), set);
|
||||
}
|
||||
list.v1
|
||||
.entry(pkg.package_id().clone())
|
||||
.entry(pkg.package_id())
|
||||
.or_insert_with(BTreeSet::new)
|
||||
.insert(bin.to_string());
|
||||
}
|
||||
@ -372,13 +372,7 @@ fn install_one(
|
||||
let pkgs = list
|
||||
.v1
|
||||
.iter()
|
||||
.filter_map(|(p, set)| {
|
||||
if set.is_empty() {
|
||||
Some(p.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.filter_map(|(&p, set)| if set.is_empty() { Some(p) } else { None })
|
||||
.collect::<Vec<_>>();
|
||||
for p in pkgs.iter() {
|
||||
list.v1.remove(p);
|
||||
@ -387,7 +381,7 @@ fn install_one(
|
||||
// If installation was successful record newly installed binaries.
|
||||
if result.is_ok() {
|
||||
list.v1
|
||||
.entry(pkg.package_id().clone())
|
||||
.entry(pkg.package_id())
|
||||
.or_insert_with(BTreeSet::new)
|
||||
.extend(to_install.iter().map(|s| s.to_string()));
|
||||
}
|
||||
@ -518,8 +512,8 @@ where
|
||||
let pkg = {
|
||||
let mut map = SourceMap::new();
|
||||
map.insert(Box::new(&mut source));
|
||||
PackageSet::new(&[pkgid.clone()], map, config)?
|
||||
.get_one(&pkgid)?
|
||||
PackageSet::new(&[pkgid], map, config)?
|
||||
.get_one(pkgid)?
|
||||
.clone()
|
||||
};
|
||||
Ok((pkg, Box::new(source)))
|
||||
@ -617,8 +611,8 @@ fn find_duplicates(
|
||||
let name = format!("{}{}", name, env::consts::EXE_SUFFIX);
|
||||
if fs::metadata(dst.join(&name)).is_err() {
|
||||
None
|
||||
} else if let Some((p, _)) = prev.v1.iter().find(|&(_, v)| v.contains(&name)) {
|
||||
Some((name, Some(p.clone())))
|
||||
} else if let Some((&p, _)) = prev.v1.iter().find(|&(_, v)| v.contains(&name)) {
|
||||
Some((name, Some(p)))
|
||||
} else {
|
||||
Some((name, None))
|
||||
}
|
||||
@ -779,8 +773,8 @@ pub fn uninstall_one(
|
||||
) -> CargoResult<()> {
|
||||
let crate_metadata = metadata(config, root)?;
|
||||
let metadata = read_crate_list(&crate_metadata)?;
|
||||
let pkgid = PackageIdSpec::query_str(spec, metadata.v1.keys())?.clone();
|
||||
uninstall_pkgid(&crate_metadata, metadata, &pkgid, bins, config)
|
||||
let pkgid = PackageIdSpec::query_str(spec, metadata.v1.keys().cloned())?;
|
||||
uninstall_pkgid(&crate_metadata, metadata, pkgid, bins, config)
|
||||
}
|
||||
|
||||
fn uninstall_cwd(root: &Filesystem, bins: &[String], config: &Config) -> CargoResult<()> {
|
||||
@ -798,13 +792,13 @@ fn uninstall_cwd(root: &Filesystem, bins: &[String], config: &Config) -> CargoRe
|
||||
fn uninstall_pkgid(
|
||||
crate_metadata: &FileLock,
|
||||
mut metadata: CrateListingV1,
|
||||
pkgid: &PackageId,
|
||||
pkgid: PackageId,
|
||||
bins: &[String],
|
||||
config: &Config,
|
||||
) -> CargoResult<()> {
|
||||
let mut to_remove = Vec::new();
|
||||
{
|
||||
let mut installed = match metadata.v1.entry(pkgid.clone()) {
|
||||
let mut installed = match metadata.v1.entry(pkgid) {
|
||||
Entry::Occupied(e) => e,
|
||||
Entry::Vacant(..) => bail!("package `{}` is not installed", pkgid),
|
||||
};
|
||||
|
@ -38,7 +38,7 @@ pub fn output_metadata(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResu
|
||||
fn metadata_no_deps(ws: &Workspace, _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
|
||||
Ok(ExportInfo {
|
||||
packages: ws.members().cloned().collect(),
|
||||
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
|
||||
workspace_members: ws.members().map(|pkg| pkg.package_id()).collect(),
|
||||
resolve: None,
|
||||
target_directory: ws.target_dir().display().to_string(),
|
||||
version: VERSION,
|
||||
@ -58,15 +58,15 @@ fn metadata_full(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<Exp
|
||||
)?;
|
||||
let mut packages = HashMap::new();
|
||||
for pkg in package_set.get_many(package_set.package_ids())? {
|
||||
packages.insert(pkg.package_id().clone(), pkg.clone());
|
||||
packages.insert(pkg.package_id(), pkg.clone());
|
||||
}
|
||||
|
||||
Ok(ExportInfo {
|
||||
packages: packages.values().map(|p| (*p).clone()).collect(),
|
||||
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
|
||||
workspace_members: ws.members().map(|pkg| pkg.package_id()).collect(),
|
||||
resolve: Some(MetadataResolve {
|
||||
resolve: (packages, resolve),
|
||||
root: ws.current_opt().map(|pkg| pkg.package_id().clone()),
|
||||
root: ws.current_opt().map(|pkg| pkg.package_id()),
|
||||
}),
|
||||
target_directory: ws.target_dir().display().to_string(),
|
||||
version: VERSION,
|
||||
@ -94,40 +94,43 @@ struct MetadataResolve {
|
||||
root: Option<PackageId>,
|
||||
}
|
||||
|
||||
fn serialize_resolve<S>((packages, resolve): &(HashMap<PackageId, Package>, Resolve), s: S) -> Result<S::Ok, S::Error>
|
||||
fn serialize_resolve<S>(
|
||||
(packages, resolve): &(HashMap<PackageId, Package>, Resolve),
|
||||
s: S,
|
||||
) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: ser::Serializer,
|
||||
{
|
||||
#[derive(Serialize)]
|
||||
struct Dep<'a> {
|
||||
struct Dep {
|
||||
name: Option<String>,
|
||||
pkg: &'a PackageId
|
||||
pkg: PackageId,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Node<'a> {
|
||||
id: &'a PackageId,
|
||||
dependencies: Vec<&'a PackageId>,
|
||||
deps: Vec<Dep<'a>>,
|
||||
id: PackageId,
|
||||
dependencies: Vec<PackageId>,
|
||||
deps: Vec<Dep>,
|
||||
features: Vec<&'a str>,
|
||||
}
|
||||
|
||||
s.collect_seq(resolve
|
||||
.iter()
|
||||
.map(|id| Node {
|
||||
s.collect_seq(resolve.iter().map(|id| {
|
||||
Node {
|
||||
id,
|
||||
dependencies: resolve.deps(id).map(|(pkg, _deps)| pkg).collect(),
|
||||
deps: resolve.deps(id)
|
||||
deps: resolve
|
||||
.deps(id)
|
||||
.map(|(pkg, _deps)| {
|
||||
let name = packages.get(pkg)
|
||||
let name = packages
|
||||
.get(&pkg)
|
||||
.and_then(|pkg| pkg.targets().iter().find(|t| t.is_lib()))
|
||||
.and_then(|lib_target| {
|
||||
resolve.extern_crate_name(id, pkg, lib_target).ok()
|
||||
});
|
||||
.and_then(|lib_target| resolve.extern_crate_name(id, pkg, lib_target).ok());
|
||||
|
||||
Dep { name, pkg }
|
||||
})
|
||||
.collect(),
|
||||
features: resolve.features_sorted(id),
|
||||
}))
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
@ -165,7 +165,7 @@ fn read_nested_packages(
|
||||
};
|
||||
let pkg = Package::new(manifest, &manifest_path);
|
||||
|
||||
let pkg_id = pkg.package_id().clone();
|
||||
let pkg_id = pkg.package_id();
|
||||
use std::collections::hash_map::Entry;
|
||||
match all_packages.entry(pkg_id) {
|
||||
Entry::Vacant(v) => {
|
||||
|
@ -1,10 +1,10 @@
|
||||
use std::ffi::OsString;
|
||||
|
||||
use ops;
|
||||
use core::compiler::{Compilation, Doctest};
|
||||
use util::{self, CargoTestError, ProcessError, Test};
|
||||
use util::errors::CargoResult;
|
||||
use core::Workspace;
|
||||
use ops;
|
||||
use util::errors::CargoResult;
|
||||
use util::{self, CargoTestError, ProcessError, Test};
|
||||
|
||||
pub struct TestOptions<'a> {
|
||||
pub compile_opts: ops::CompileOptions<'a>,
|
||||
@ -172,7 +172,7 @@ fn run_doc_tests(
|
||||
p.arg("--test-args").arg(arg);
|
||||
}
|
||||
|
||||
if let Some(cfgs) = compilation.cfgs.get(package.package_id()) {
|
||||
if let Some(cfgs) = compilation.cfgs.get(&package.package_id()) {
|
||||
for cfg in cfgs.iter() {
|
||||
p.arg("--cfg").arg(cfg);
|
||||
}
|
||||
@ -185,7 +185,7 @@ fn run_doc_tests(
|
||||
p.arg("--extern").arg(&arg);
|
||||
}
|
||||
|
||||
if let Some(flags) = compilation.rustdocflags.get(package.package_id()) {
|
||||
if let Some(flags) = compilation.rustdocflags.get(&package.package_id()) {
|
||||
p.args(flags);
|
||||
}
|
||||
|
||||
|
@ -133,12 +133,12 @@ fn resolve_with_registry<'cfg>(
|
||||
///
|
||||
/// The previous resolve normally comes from a lockfile. This function does not
|
||||
/// read or write lockfiles from the filesystem.
|
||||
pub fn resolve_with_previous<'a, 'cfg>(
|
||||
pub fn resolve_with_previous<'cfg>(
|
||||
registry: &mut PackageRegistry<'cfg>,
|
||||
ws: &Workspace<'cfg>,
|
||||
method: Method,
|
||||
previous: Option<&'a Resolve>,
|
||||
to_avoid: Option<&HashSet<&'a PackageId>>,
|
||||
previous: Option<&Resolve>,
|
||||
to_avoid: Option<&HashSet<PackageId>>,
|
||||
specs: &[PackageIdSpec],
|
||||
register_patches: bool,
|
||||
warn: bool,
|
||||
@ -160,7 +160,7 @@ pub fn resolve_with_previous<'a, 'cfg>(
|
||||
);
|
||||
}
|
||||
|
||||
let keep = |p: &&'a PackageId| {
|
||||
let keep = |p: &PackageId| {
|
||||
!to_avoid_sources.contains(&p.source_id())
|
||||
&& match to_avoid {
|
||||
Some(set) => !set.contains(p),
|
||||
@ -196,9 +196,9 @@ pub fn resolve_with_previous<'a, 'cfg>(
|
||||
let patches = patches
|
||||
.iter()
|
||||
.map(|dep| {
|
||||
let unused = previous.unused_patches();
|
||||
let unused = previous.unused_patches().iter().cloned();
|
||||
let candidates = previous.iter().chain(unused);
|
||||
match candidates.filter(keep).find(|id| dep.matches_id(id)) {
|
||||
match candidates.filter(keep).find(|&id| dep.matches_id(id)) {
|
||||
Some(id) => {
|
||||
let mut dep = dep.clone();
|
||||
dep.lock_to(id);
|
||||
@ -309,7 +309,7 @@ pub fn resolve_with_previous<'a, 'cfg>(
|
||||
Some(r) => root_replace
|
||||
.iter()
|
||||
.map(|&(ref spec, ref dep)| {
|
||||
for (key, val) in r.replacements().iter() {
|
||||
for (&key, &val) in r.replacements().iter() {
|
||||
if spec.matches(key) && dep.matches_id(val) && keep(&val) {
|
||||
let mut dep = dep.clone();
|
||||
dep.lock_to(val);
|
||||
@ -376,7 +376,7 @@ pub fn get_resolved_packages<'a>(
|
||||
resolve: &Resolve,
|
||||
registry: PackageRegistry<'a>,
|
||||
) -> CargoResult<PackageSet<'a>> {
|
||||
let ids: Vec<PackageId> = resolve.iter().cloned().collect();
|
||||
let ids: Vec<PackageId> = resolve.iter().collect();
|
||||
registry.get(&ids)
|
||||
}
|
||||
|
||||
@ -396,11 +396,11 @@ pub fn get_resolved_packages<'a>(
|
||||
///
|
||||
/// Note that this function, at the time of this writing, is basically the
|
||||
/// entire fix for #4127
|
||||
fn register_previous_locks<'a>(
|
||||
fn register_previous_locks(
|
||||
ws: &Workspace,
|
||||
registry: &mut PackageRegistry,
|
||||
resolve: &'a Resolve,
|
||||
keep: &Fn(&&'a PackageId) -> bool,
|
||||
resolve: &Resolve,
|
||||
keep: &Fn(&PackageId) -> bool,
|
||||
) {
|
||||
let path_pkg = |id: SourceId| {
|
||||
if !id.is_path() {
|
||||
@ -489,7 +489,7 @@ fn register_previous_locks<'a>(
|
||||
let mut path_deps = ws.members().cloned().collect::<Vec<_>>();
|
||||
let mut visited = HashSet::new();
|
||||
while let Some(member) = path_deps.pop() {
|
||||
if !visited.insert(member.package_id().clone()) {
|
||||
if !visited.insert(member.package_id()) {
|
||||
continue;
|
||||
}
|
||||
for dep in member.dependencies() {
|
||||
@ -547,19 +547,15 @@ fn register_previous_locks<'a>(
|
||||
// function let's put it to action. Take a look at the previous lockfile,
|
||||
// filter everything by this callback, and then shove everything else into
|
||||
// the registry as a locked dependency.
|
||||
let keep = |id: &&'a PackageId| keep(id) && !avoid_locking.contains(id);
|
||||
let keep = |id: &PackageId| keep(id) && !avoid_locking.contains(id);
|
||||
|
||||
for node in resolve.iter().filter(keep) {
|
||||
let deps = resolve
|
||||
.deps_not_replaced(node)
|
||||
.filter(keep)
|
||||
.cloned()
|
||||
.collect();
|
||||
registry.register_lock(node.clone(), deps);
|
||||
let deps = resolve.deps_not_replaced(node).filter(keep).collect();
|
||||
registry.register_lock(node, deps);
|
||||
}
|
||||
|
||||
/// recursively add `node` and all its transitive dependencies to `set`
|
||||
fn add_deps<'a>(resolve: &'a Resolve, node: &'a PackageId, set: &mut HashSet<&'a PackageId>) {
|
||||
fn add_deps(resolve: &Resolve, node: PackageId, set: &mut HashSet<PackageId>) {
|
||||
if !set.insert(node) {
|
||||
return;
|
||||
}
|
||||
|
@ -145,22 +145,22 @@ impl<'cfg> Source for DirectorySource<'cfg> {
|
||||
}
|
||||
manifest.set_summary(summary);
|
||||
let pkg = Package::new(manifest, pkg.manifest_path());
|
||||
self.packages.insert(pkg.package_id().clone(), (pkg, cksum));
|
||||
self.packages.insert(pkg.package_id(), (pkg, cksum));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
|
||||
fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
|
||||
self.packages
|
||||
.get(id)
|
||||
.get(&id)
|
||||
.map(|p| &p.0)
|
||||
.cloned()
|
||||
.map(MaybePackage::Ready)
|
||||
.ok_or_else(|| format_err!("failed to find package with id: {}", id))
|
||||
}
|
||||
|
||||
fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
|
||||
fn finish_download(&mut self, _id: PackageId, _data: Vec<u8>) -> CargoResult<Package> {
|
||||
panic!("no downloads to do")
|
||||
}
|
||||
|
||||
@ -168,8 +168,8 @@ impl<'cfg> Source for DirectorySource<'cfg> {
|
||||
Ok(pkg.package_id().version().to_string())
|
||||
}
|
||||
|
||||
fn verify(&self, id: &PackageId) -> CargoResult<()> {
|
||||
let (pkg, cksum) = match self.packages.get(id) {
|
||||
fn verify(&self, id: PackageId) -> CargoResult<()> {
|
||||
let (pkg, cksum) = match self.packages.get(&id) {
|
||||
Some(&(ref pkg, ref cksum)) => (pkg, cksum),
|
||||
None => bail!("failed to find entry for `{}` in directory source", id),
|
||||
};
|
||||
|
@ -214,7 +214,7 @@ impl<'cfg> Source for GitSource<'cfg> {
|
||||
self.path_source.as_mut().unwrap().update()
|
||||
}
|
||||
|
||||
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
|
||||
fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
|
||||
trace!(
|
||||
"getting packages for package id `{}` from `{:?}`",
|
||||
id,
|
||||
@ -226,7 +226,7 @@ impl<'cfg> Source for GitSource<'cfg> {
|
||||
.download(id)
|
||||
}
|
||||
|
||||
fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
|
||||
fn finish_download(&mut self, _id: PackageId, _data: Vec<u8>) -> CargoResult<Package> {
|
||||
panic!("no download should have started")
|
||||
}
|
||||
|
||||
|
@ -545,7 +545,7 @@ impl<'cfg> Source for PathSource<'cfg> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
|
||||
fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
|
||||
trace!("getting packages; id={}", id);
|
||||
|
||||
let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id);
|
||||
@ -554,7 +554,7 @@ impl<'cfg> Source for PathSource<'cfg> {
|
||||
.ok_or_else(|| internal(format!("failed to find {} in path source", id)))
|
||||
}
|
||||
|
||||
fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
|
||||
fn finish_download(&mut self, _id: PackageId, _data: Vec<u8>) -> CargoResult<Package> {
|
||||
panic!("no download should have started")
|
||||
}
|
||||
|
||||
|
@ -124,7 +124,7 @@ impl<'cfg> RegistryIndex<'cfg> {
|
||||
}
|
||||
|
||||
/// Return the hash listed for a specified PackageId.
|
||||
pub fn hash(&mut self, pkg: &PackageId, load: &mut RegistryData) -> CargoResult<String> {
|
||||
pub fn hash(&mut self, pkg: PackageId, load: &mut RegistryData) -> CargoResult<String> {
|
||||
let name = pkg.name().as_str();
|
||||
let version = pkg.version();
|
||||
if let Some(s) = self.hashes.get(name).and_then(|v| v.get(version)) {
|
||||
|
@ -69,7 +69,7 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<MaybeLock> {
|
||||
fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult<MaybeLock> {
|
||||
let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version());
|
||||
let mut crate_file = self.root.open_ro(&crate_file, self.config, "crate file")?;
|
||||
|
||||
@ -106,7 +106,7 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
|
||||
|
||||
fn finish_download(
|
||||
&mut self,
|
||||
_pkg: &PackageId,
|
||||
_pkg: PackageId,
|
||||
_checksum: &str,
|
||||
_data: &[u8],
|
||||
) -> CargoResult<FileLock> {
|
||||
|
@ -348,15 +348,15 @@ pub trait RegistryData {
|
||||
) -> CargoResult<()>;
|
||||
fn config(&mut self) -> CargoResult<Option<RegistryConfig>>;
|
||||
fn update_index(&mut self) -> CargoResult<()>;
|
||||
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<MaybeLock>;
|
||||
fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult<MaybeLock>;
|
||||
fn finish_download(
|
||||
&mut self,
|
||||
pkg: &PackageId,
|
||||
pkg: PackageId,
|
||||
checksum: &str,
|
||||
data: &[u8],
|
||||
) -> CargoResult<FileLock>;
|
||||
|
||||
fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool {
|
||||
fn is_crate_downloaded(&self, _pkg: PackageId) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
@ -418,7 +418,7 @@ impl<'cfg> RegistrySource<'cfg> {
|
||||
/// compiled.
|
||||
///
|
||||
/// No action is taken if the source looks like it's already unpacked.
|
||||
fn unpack_package(&self, pkg: &PackageId, tarball: &FileLock) -> CargoResult<PathBuf> {
|
||||
fn unpack_package(&self, pkg: PackageId, tarball: &FileLock) -> CargoResult<PathBuf> {
|
||||
let dst = self
|
||||
.src_path
|
||||
.join(&format!("{}-{}", pkg.name(), pkg.version()));
|
||||
@ -475,7 +475,7 @@ impl<'cfg> RegistrySource<'cfg> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_pkg(&mut self, package: &PackageId, path: &FileLock) -> CargoResult<Package> {
|
||||
fn get_pkg(&mut self, package: PackageId, path: &FileLock) -> CargoResult<Package> {
|
||||
let path = self
|
||||
.unpack_package(package, path)
|
||||
.chain_err(|| internal(format!("failed to unpack package `{}`", package)))?;
|
||||
@ -566,7 +566,7 @@ impl<'cfg> Source for RegistrySource<'cfg> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn download(&mut self, package: &PackageId) -> CargoResult<MaybePackage> {
|
||||
fn download(&mut self, package: PackageId) -> CargoResult<MaybePackage> {
|
||||
let hash = self.index.hash(package, &mut *self.ops)?;
|
||||
match self.ops.download(package, &hash)? {
|
||||
MaybeLock::Ready(file) => self.get_pkg(package, &file).map(MaybePackage::Ready),
|
||||
@ -576,7 +576,7 @@ impl<'cfg> Source for RegistrySource<'cfg> {
|
||||
}
|
||||
}
|
||||
|
||||
fn finish_download(&mut self, package: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
|
||||
fn finish_download(&mut self, package: PackageId, data: Vec<u8>) -> CargoResult<Package> {
|
||||
let hash = self.index.hash(package, &mut *self.ops)?;
|
||||
let file = self.ops.finish_download(package, &hash, &data)?;
|
||||
self.get_pkg(package, &file)
|
||||
|
@ -126,7 +126,7 @@ impl<'cfg> RemoteRegistry<'cfg> {
|
||||
Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap()))
|
||||
}
|
||||
|
||||
fn filename(&self, pkg: &PackageId) -> String {
|
||||
fn filename(&self, pkg: PackageId) -> String {
|
||||
format!("{}-{}.crate", pkg.name(), pkg.version())
|
||||
}
|
||||
}
|
||||
@ -213,7 +213,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn download(&mut self, pkg: &PackageId, _checksum: &str) -> CargoResult<MaybeLock> {
|
||||
fn download(&mut self, pkg: PackageId, _checksum: &str) -> CargoResult<MaybeLock> {
|
||||
let filename = self.filename(pkg);
|
||||
|
||||
// Attempt to open an read-only copy first to avoid an exclusive write
|
||||
@ -246,7 +246,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
|
||||
|
||||
fn finish_download(
|
||||
&mut self,
|
||||
pkg: &PackageId,
|
||||
pkg: PackageId,
|
||||
checksum: &str,
|
||||
data: &[u8],
|
||||
) -> CargoResult<FileLock> {
|
||||
@ -269,7 +269,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
|
||||
Ok(dst)
|
||||
}
|
||||
|
||||
fn is_crate_downloaded(&self, pkg: &PackageId) -> bool {
|
||||
fn is_crate_downloaded(&self, pkg: PackageId) -> bool {
|
||||
let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
|
||||
let path = Path::new(&filename);
|
||||
|
||||
|
@ -70,11 +70,11 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
|
||||
fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
|
||||
let id = id.with_source_id(self.replace_with);
|
||||
let pkg = self
|
||||
.inner
|
||||
.download(&id)
|
||||
.download(id)
|
||||
.chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
|
||||
Ok(match pkg {
|
||||
MaybePackage::Ready(pkg) => {
|
||||
@ -84,11 +84,11 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
|
||||
})
|
||||
}
|
||||
|
||||
fn finish_download(&mut self, id: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
|
||||
fn finish_download(&mut self, id: PackageId, data: Vec<u8>) -> CargoResult<Package> {
|
||||
let id = id.with_source_id(self.replace_with);
|
||||
let pkg = self
|
||||
.inner
|
||||
.finish_download(&id, data)
|
||||
.finish_download(id, data)
|
||||
.chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
|
||||
Ok(pkg.map_source(self.replace_with, self.to_replace))
|
||||
}
|
||||
@ -97,9 +97,9 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
|
||||
self.inner.fingerprint(id)
|
||||
}
|
||||
|
||||
fn verify(&self, id: &PackageId) -> CargoResult<()> {
|
||||
fn verify(&self, id: PackageId) -> CargoResult<()> {
|
||||
let id = id.with_source_id(self.replace_with);
|
||||
self.inner.verify(&id)
|
||||
self.inner.verify(id)
|
||||
}
|
||||
|
||||
fn describe(&self) -> String {
|
||||
|
@ -16,7 +16,7 @@ pub fn emit<T: Message>(t: &T) {
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct FromCompiler<'a> {
|
||||
pub package_id: &'a PackageId,
|
||||
pub package_id: PackageId,
|
||||
pub target: &'a Target,
|
||||
pub message: Box<RawValue>,
|
||||
}
|
||||
@ -29,7 +29,7 @@ impl<'a> Message for FromCompiler<'a> {
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Artifact<'a> {
|
||||
pub package_id: &'a PackageId,
|
||||
pub package_id: PackageId,
|
||||
pub target: &'a Target,
|
||||
pub profile: ArtifactProfile,
|
||||
pub features: Vec<String>,
|
||||
@ -57,7 +57,7 @@ pub struct ArtifactProfile {
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct BuildScript<'a> {
|
||||
pub package_id: &'a PackageId,
|
||||
pub package_id: PackageId,
|
||||
pub linked_libs: &'a [String],
|
||||
pub linked_paths: &'a [String],
|
||||
pub cfgs: &'a [String],
|
||||
|
@ -666,7 +666,7 @@ impl TomlProject {
|
||||
}
|
||||
|
||||
struct Context<'a, 'b> {
|
||||
pkgid: Option<&'a PackageId>,
|
||||
pkgid: Option<PackageId>,
|
||||
deps: &'a mut Vec<Dependency>,
|
||||
source_id: SourceId,
|
||||
nested_paths: &'a mut Vec<PathBuf>,
|
||||
@ -873,7 +873,7 @@ impl TomlManifest {
|
||||
|
||||
{
|
||||
let mut cx = Context {
|
||||
pkgid: Some(&pkgid),
|
||||
pkgid: Some(pkgid),
|
||||
deps: &mut deps,
|
||||
source_id,
|
||||
nested_paths: &mut nested_paths,
|
||||
|
@ -1,7 +1,7 @@
|
||||
use cargo::core::resolver::ResolveError;
|
||||
use cargo::core::{compiler::CompileMode, Workspace};
|
||||
use cargo::ops::{self, CompileOptions};
|
||||
use cargo::util::{config::Config, errors::ManifestError};
|
||||
use cargo::core::resolver::ResolveError;
|
||||
|
||||
use support::project;
|
||||
|
||||
@ -150,5 +150,5 @@ fn member_manifest_version_error() {
|
||||
let resolve_err: &ResolveError = error.downcast_ref().expect("Not a ResolveError");
|
||||
let package_path = resolve_err.package_path();
|
||||
assert_eq!(package_path.len(), 1, "package_path: {:?}", package_path);
|
||||
assert_eq!(&package_path[0], member_bar.package_id());
|
||||
assert_eq!(package_path[0], member_bar.package_id());
|
||||
}
|
||||
|
@ -133,7 +133,7 @@ proptest! {
|
||||
let not_selected: Vec<_> = input
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter(|x| !r.contains(x.package_id()))
|
||||
.filter(|x| !r.contains(&x.package_id()))
|
||||
.collect();
|
||||
if !not_selected.is_empty() {
|
||||
let indexs_to_unpublish: Vec<_> = indexs_to_unpublish.iter().map(|x| x.get(¬_selected)).collect();
|
||||
@ -1001,7 +1001,7 @@ fn incomplete_information_skiping() {
|
||||
input
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter(|x| &package_to_yank != x.package_id())
|
||||
.filter(|x| package_to_yank != x.package_id())
|
||||
.collect(),
|
||||
);
|
||||
assert_eq!(input.len(), new_reg.len() + 1);
|
||||
@ -1070,7 +1070,7 @@ fn incomplete_information_skiping_2() {
|
||||
input
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter(|x| &package_to_yank != x.package_id())
|
||||
.filter(|x| package_to_yank != x.package_id())
|
||||
.collect(),
|
||||
);
|
||||
assert_eq!(input.len(), new_reg.len() + 1);
|
||||
@ -1120,7 +1120,7 @@ fn incomplete_information_skiping_3() {
|
||||
input
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter(|x| &package_to_yank != x.package_id())
|
||||
.filter(|x| package_to_yank != x.package_id())
|
||||
.collect(),
|
||||
);
|
||||
assert_eq!(input.len(), new_reg.len() + 1);
|
||||
|
@ -42,7 +42,7 @@ pub fn resolve_and_validated(
|
||||
if p.name().ends_with("-sys") {
|
||||
assert!(links.insert(p.name()));
|
||||
}
|
||||
stack.extend(resolve.deps(&p).map(|(dp, deps)| {
|
||||
stack.extend(resolve.deps(p).map(|(dp, deps)| {
|
||||
for d in deps {
|
||||
assert!(d.matches_id(dp));
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user