Cleanup: Remove unnecessary borrows.

This commit is contained in:
Eric Huss 2019-03-26 17:53:53 -07:00
parent a57b96dc52
commit ef0b47769b
28 changed files with 110 additions and 107 deletions

View File

@ -47,7 +47,7 @@ Run with 'cargo -Z [FLAG] [SUBCOMMAND]'"
return Ok(()); return Ok(());
} }
if let Some(ref code) = args.value_of("explain") { if let Some(code) = args.value_of("explain") {
let mut procss = config.rustc(None)?.process(); let mut procss = config.rustc(None)?.process();
procss.arg("--explain").arg(code).exec()?; procss.arg("--explain").arg(code).exec()?;
return Ok(()); return Ok(());

View File

@ -1,6 +1,7 @@
#![warn(rust_2018_idioms)] // while we're getting used to 2018 #![warn(rust_2018_idioms)] // while we're getting used to 2018
#![allow(clippy::too_many_arguments)] // large project #![allow(clippy::too_many_arguments)] // large project
#![allow(clippy::redundant_closure)] // there's a false positive #![allow(clippy::redundant_closure)] // there's a false positive
#![warn(clippy::needless_borrow)]
use std::collections::BTreeSet; use std::collections::BTreeSet;
use std::env; use std::env;

View File

@ -113,7 +113,7 @@ impl BuildPlan {
let id = self.plan.invocations.len(); let id = self.plan.invocations.len();
self.invocation_map.insert(unit.buildkey(), id); self.invocation_map.insert(unit.buildkey(), id);
let deps = cx let deps = cx
.dep_targets(&unit) .dep_targets(unit)
.iter() .iter()
.map(|dep| self.invocation_map[&dep.buildkey()]) .map(|dep| self.invocation_map[&dep.buildkey()])
.collect(); .collect();

View File

@ -120,7 +120,7 @@ impl<'cfg> Compilation<'cfg> {
rustc_process: rustc, rustc_process: rustc,
host: bcx.host_triple().to_string(), host: bcx.host_triple().to_string(),
target: bcx.target_triple().to_string(), target: bcx.target_triple().to_string(),
target_runner: target_runner(&bcx)?, target_runner: target_runner(bcx)?,
}) })
} }

View File

@ -491,7 +491,7 @@ fn compute_metadata<'a, 'cfg>(
// settings like debuginfo and whatnot. // settings like debuginfo and whatnot.
unit.profile.hash(&mut hasher); unit.profile.hash(&mut hasher);
unit.mode.hash(&mut hasher); unit.mode.hash(&mut hasher);
if let Some(ref args) = bcx.extra_args_for(unit) { if let Some(args) = bcx.extra_args_for(unit) {
args.hash(&mut hasher); args.hash(&mut hasher);
} }

View File

@ -474,11 +474,11 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
for unit in keys { for unit in keys {
for output in self.outputs(unit)?.iter() { for output in self.outputs(unit)?.iter() {
if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) { if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
report_collision(unit, &other_unit, &output.path)?; report_collision(unit, other_unit, &output.path)?;
} }
if let Some(hardlink) = output.hardlink.as_ref() { if let Some(hardlink) = output.hardlink.as_ref() {
if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) { if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
report_collision(unit, &other_unit, hardlink)?; report_collision(unit, other_unit, hardlink)?;
} }
} }
if let Some(ref export_path) = output.export_path { if let Some(ref export_path) = output.export_path {
@ -488,7 +488,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
{}\ {}\
The exported filenames should be unique.\n\ The exported filenames should be unique.\n\
{}", {}",
describe_collision(unit, &other_unit, &export_path), describe_collision(unit, other_unit, export_path),
suggestion suggestion
))?; ))?;
} }

View File

@ -160,7 +160,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
.env( .env(
"TARGET", "TARGET",
&match unit.kind { &match unit.kind {
Kind::Host => &bcx.host_triple(), Kind::Host => bcx.host_triple(),
Kind::Target => bcx.target_triple(), Kind::Target => bcx.target_triple(),
}, },
) )

View File

@ -682,7 +682,7 @@ fn calculate<'a, 'cfg>(
local.extend(local_fingerprint_run_custom_build_deps(cx, unit)); local.extend(local_fingerprint_run_custom_build_deps(cx, unit));
local local
} else { } else {
let fingerprint = pkg_fingerprint(&cx.bcx, unit.pkg)?; let fingerprint = pkg_fingerprint(cx.bcx, unit.pkg)?;
vec![LocalFingerprint::Precalculated(fingerprint)] vec![LocalFingerprint::Precalculated(fingerprint)]
}; };
@ -701,7 +701,7 @@ fn calculate<'a, 'cfg>(
profile: profile_hash, profile: profile_hash,
// Note that .0 is hashed here, not .1 which is the cwd. That doesn't // Note that .0 is hashed here, not .1 which is the cwd. That doesn't
// actually affect the output artifact so there's no need to hash it. // actually affect the output artifact so there's no need to hash it.
path: util::hash_u64(&super::path_args(&cx.bcx, unit).0), path: util::hash_u64(&super::path_args(cx.bcx, unit).0),
features: format!("{:?}", bcx.resolve.features_sorted(unit.pkg.package_id())), features: format!("{:?}", bcx.resolve.features_sorted(unit.pkg.package_id())),
deps, deps,
local, local,
@ -855,7 +855,7 @@ fn build_script_local_fingerprints<'a, 'cfg>(
let output = deps.build_script_output.clone(); let output = deps.build_script_output.clone();
if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() { if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() {
debug!("old local fingerprints deps"); debug!("old local fingerprints deps");
let s = pkg_fingerprint(&cx.bcx, unit.pkg)?; let s = pkg_fingerprint(cx.bcx, unit.pkg)?;
return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output))); return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output)));
} }

View File

@ -631,7 +631,7 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult
add_error_format(bcx, &mut rustdoc); add_error_format(bcx, &mut rustdoc);
if let Some(ref args) = bcx.extra_args_for(unit) { if let Some(args) = bcx.extra_args_for(unit) {
rustdoc.args(args); rustdoc.args(args);
} }
@ -822,7 +822,7 @@ fn build_base_args<'a, 'cfg>(
cmd.arg("-C").arg(format!("debuginfo={}", debuginfo)); cmd.arg("-C").arg(format!("debuginfo={}", debuginfo));
} }
if let Some(ref args) = bcx.extra_args_for(unit) { if let Some(args) = bcx.extra_args_for(unit) {
cmd.args(args); cmd.args(args);
} }

View File

@ -105,7 +105,7 @@ impl ser::Serialize for Package {
SerializedPackage { SerializedPackage {
name: &*package_id.name(), name: &*package_id.name(),
version: &package_id.version(), version: package_id.version(),
id: package_id, id: package_id,
license, license,
license_file, license_file,
@ -740,7 +740,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
self.set.multi.messages(|msg| { self.set.multi.messages(|msg| {
let token = msg.token().expect("failed to read token"); let token = msg.token().expect("failed to read token");
let handle = &pending[&token].1; let handle = &pending[&token].1;
if let Some(result) = msg.result_for(&handle) { if let Some(result) = msg.result_for(handle) {
results.push((token, result)); results.push((token, result));
} else { } else {
debug!("message without a result (?)"); debug!("message without a result (?)");

View File

@ -368,7 +368,7 @@ impl PartialOrd for SourceId {
impl Ord for SourceId { impl Ord for SourceId {
fn cmp(&self, other: &SourceId) -> Ordering { fn cmp(&self, other: &SourceId) -> Ordering {
self.inner.cmp(&other.inner) self.inner.cmp(other.inner)
} }
} }

View File

@ -58,7 +58,7 @@ impl Summary {
) )
} }
} }
let feature_map = build_feature_map(&features, &dependencies, namespaced_features)?; let feature_map = build_feature_map(features, &dependencies, namespaced_features)?;
Ok(Summary { Ok(Summary {
inner: Rc::new(Inner { inner: Rc::new(Inner {
package_id: pkg_id, package_id: pkg_id,
@ -170,7 +170,7 @@ where
// iteration over the list if the dependency is found in the list. // iteration over the list if the dependency is found in the list.
let mut dependency_found = if namespaced { let mut dependency_found = if namespaced {
match dep_map.get(feature.borrow()) { match dep_map.get(feature.borrow()) {
Some(ref dep_data) => { Some(dep_data) => {
if !dep_data.iter().any(|d| d.is_optional()) { if !dep_data.iter().any(|d| d.is_optional()) {
failure::bail!( failure::bail!(
"Feature `{}` includes the dependency of the same name, but this is \ "Feature `{}` includes the dependency of the same name, but this is \

View File

@ -14,6 +14,7 @@
#![allow(clippy::too_many_arguments)] // large project #![allow(clippy::too_many_arguments)] // large project
#![allow(clippy::type_complexity)] // there's an exceptionally complex type #![allow(clippy::type_complexity)] // there's an exceptionally complex type
#![allow(clippy::wrong_self_convention)] // perhaps `Rc` should be special-cased in Clippy? #![allow(clippy::wrong_self_convention)] // perhaps `Rc` should be special-cased in Clippy?
#![warn(clippy::needless_borrow)]
use std::fmt; use std::fmt;

View File

@ -372,12 +372,12 @@ pub fn compile_ws<'a>(
&resolve_with_overrides, &resolve_with_overrides,
&packages, &packages,
config, config,
&build_config, build_config,
profiles, profiles,
extra_compiler_args, extra_compiler_args,
)?; )?;
let cx = Context::new(config, &bcx)?; let cx = Context::new(config, &bcx)?;
cx.compile(&units, export_dir.clone(), &exec)? cx.compile(&units, export_dir.clone(), exec)?
}; };
Ok(ret) Ok(ret)

View File

@ -59,7 +59,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Option
// dirty. This will `bail!` if dirty, unless allow_dirty. Produce json // dirty. This will `bail!` if dirty, unless allow_dirty. Produce json
// info for any sha1 (HEAD revision) returned. // info for any sha1 (HEAD revision) returned.
let vcs_info = if !opts.allow_dirty { let vcs_info = if !opts.allow_dirty {
check_repo_state(pkg, &src_files, &config, opts.allow_dirty)? check_repo_state(pkg, &src_files, config, opts.allow_dirty)?
.map(|h| json!({"git":{"sha1": h}})) .map(|h| json!({"git":{"sha1": h}}))
} else { } else {
None None
@ -364,7 +364,7 @@ fn tar(
} }
} }
if let Some(ref json) = vcs_info { if let Some(json) = vcs_info {
let filename: PathBuf = Path::new(VCS_INFO_FILE).into(); let filename: PathBuf = Path::new(VCS_INFO_FILE).into();
debug_assert!(check_filename(&filename).is_ok()); debug_assert!(check_filename(&filename).is_ok());
let fnd = filename.display(); let fnd = filename.display();

View File

@ -147,7 +147,7 @@ fn uninstall_pkgid(
installed.remove(); installed.remove();
} }
} }
write_crate_list(&crate_metadata, metadata)?; write_crate_list(crate_metadata, metadata)?;
for bin in to_remove { for bin in to_remove {
config.shell().status("Removing", bin.display())?; config.shell().status("Removing", bin.display())?;
paths::remove_file(bin)?; paths::remove_file(bin)?;

View File

@ -238,7 +238,7 @@ fn transmit(
.map(|(feat, values)| { .map(|(feat, values)| {
( (
feat.to_string(), feat.to_string(),
values.iter().map(|fv| fv.to_string(&summary)).collect(), values.iter().map(|fv| fv.to_string(summary)).collect(),
) )
}) })
.collect::<BTreeMap<String, Vec<String>>>(); .collect::<BTreeMap<String, Vec<String>>>();

View File

@ -434,19 +434,19 @@ about this warning.";
compile_opts: &CompileOptions<'_>, compile_opts: &CompileOptions<'_>,
) -> CargoResult<()> { ) -> CargoResult<()> {
if self.is_present_with_zero_values("example") { if self.is_present_with_zero_values("example") {
print_available_examples(&workspace, &compile_opts)?; print_available_examples(workspace, compile_opts)?;
} }
if self.is_present_with_zero_values("bin") { if self.is_present_with_zero_values("bin") {
print_available_binaries(&workspace, &compile_opts)?; print_available_binaries(workspace, compile_opts)?;
} }
if self.is_present_with_zero_values("bench") { if self.is_present_with_zero_values("bench") {
print_available_benches(&workspace, &compile_opts)?; print_available_benches(workspace, compile_opts)?;
} }
if self.is_present_with_zero_values("test") { if self.is_present_with_zero_values("test") {
print_available_tests(&workspace, &compile_opts)?; print_available_tests(workspace, compile_opts)?;
} }
Ok(()) Ok(())

View File

@ -124,7 +124,7 @@ impl<K: Hash + Eq + Clone, V> DependencyQueue<K, V> {
results.insert(key.clone(), IN_PROGRESS); results.insert(key.clone(), IN_PROGRESS);
let depth = 1 + map let depth = 1 + map
.get(&key) .get(key)
.into_iter() .into_iter()
.flat_map(|it| it) .flat_map(|it| it)
.map(|dep| depth(dep, map, results)) .map(|dep| depth(dep, map, results))

View File

@ -835,7 +835,7 @@ impl TomlManifest {
// Parse features first so they will be available when parsing other parts of the TOML. // Parse features first so they will be available when parsing other parts of the TOML.
let empty = Vec::new(); let empty = Vec::new();
let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty);
let features = Features::new(&cargo_features, &mut warnings)?; let features = Features::new(cargo_features, &mut warnings)?;
let project = me.project.as_ref().or_else(|| me.package.as_ref()); let project = me.project.as_ref().or_else(|| me.package.as_ref());
let project = project.ok_or_else(|| failure::format_err!("no `package` section found"))?; let project = project.ok_or_else(|| failure::format_err!("no `package` section found"))?;
@ -1010,7 +1010,7 @@ impl TomlManifest {
let workspace_config = match (me.workspace.as_ref(), project.workspace.as_ref()) { let workspace_config = match (me.workspace.as_ref(), project.workspace.as_ref()) {
(Some(config), None) => WorkspaceConfig::Root(WorkspaceRootConfig::new( (Some(config), None) => WorkspaceConfig::Root(WorkspaceRootConfig::new(
&package_root, package_root,
&config.members, &config.members,
&config.default_members, &config.default_members,
&config.exclude, &config.exclude,
@ -1138,7 +1138,7 @@ impl TomlManifest {
let mut deps = Vec::new(); let mut deps = Vec::new();
let empty = Vec::new(); let empty = Vec::new();
let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty);
let features = Features::new(&cargo_features, &mut warnings)?; let features = Features::new(cargo_features, &mut warnings)?;
let (replace, patch) = { let (replace, patch) = {
let mut cx = Context { let mut cx = Context {
@ -1157,7 +1157,7 @@ impl TomlManifest {
let profiles = Profiles::new(me.profile.as_ref(), config, &features, &mut warnings)?; let profiles = Profiles::new(me.profile.as_ref(), config, &features, &mut warnings)?;
let workspace_config = match me.workspace { let workspace_config = match me.workspace {
Some(ref config) => WorkspaceConfig::Root(WorkspaceRootConfig::new( Some(ref config) => WorkspaceConfig::Root(WorkspaceRootConfig::new(
&root, root,
&config.members, &config.members,
&config.default_members, &config.default_members,
&config.exclude, &config.exclude,

View File

@ -577,7 +577,7 @@ fn git_lock_file_doesnt_change() {
let root = paths::root(); let root = paths::root();
t!(fs::create_dir(&root.join(".cargo"))); t!(fs::create_dir(&root.join(".cargo")));
t!(t!(File::create(root.join(".cargo/config"))).write_all( t!(t!(File::create(root.join(".cargo/config"))).write_all(
&format!( format!(
r#" r#"
[source.my-git-repo] [source.my-git-repo]
git = '{}' git = '{}'

View File

@ -158,7 +158,7 @@ fn cargo_compile_offline_with_cached_git_dep() {
File::create(&prj.root().join("Cargo.toml")) File::create(&prj.root().join("Cargo.toml"))
.unwrap() .unwrap()
.write_all( .write_all(
&format!( format!(
r#" r#"
[project] [project]
name = "cache_git_dep" name = "cache_git_dep"
@ -220,7 +220,7 @@ fn cargo_compile_offline_with_cached_git_dep() {
File::create(&p.root().join("Cargo.toml")) File::create(&p.root().join("Cargo.toml"))
.unwrap() .unwrap()
.write_all( .write_all(
&format!( format!(
r#" r#"
[project] [project]
name = "foo" name = "foo"
@ -2861,7 +2861,7 @@ fn templatedir_doesnt_cause_problems() {
File::create(paths::home().join(".gitconfig")) File::create(paths::home().join(".gitconfig"))
.unwrap() .unwrap()
.write_all( .write_all(
&format!( format!(
r#" r#"
[init] [init]
templatedir = {} templatedir = {}

View File

@ -2,6 +2,7 @@
#![cfg_attr(feature = "deny-warnings", deny(warnings))] #![cfg_attr(feature = "deny-warnings", deny(warnings))]
#![allow(clippy::blacklisted_name)] #![allow(clippy::blacklisted_name)]
#![allow(clippy::explicit_iter_loop)] #![allow(clippy::explicit_iter_loop)]
#![warn(clippy::needless_borrow)]
#[macro_use] #[macro_use]
mod support; mod support;

View File

@ -910,7 +910,7 @@ authors = []
f, f,
"bar-0.1.0.crate", "bar-0.1.0.crate",
&["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"],
&[("Cargo.toml", &rewritten_toml)], &[("Cargo.toml", rewritten_toml)],
); );
} }

View File

@ -201,7 +201,7 @@ fn profile_override_bad_settings() {
), ),
("overrides = {}", "Profile overrides cannot be nested."), ("overrides = {}", "Profile overrides cannot be nested."),
]; ];
for &(ref snippet, ref expected) in bad_values.iter() { for &(snippet, expected) in bad_values.iter() {
let p = project() let p = project()
.file( .file(
"Cargo.toml", "Cargo.toml",

View File

@ -46,7 +46,7 @@ proptest! {
// So we try some of the most complicated. // So we try some of the most complicated.
for this in input.iter().rev().take(20) { for this in input.iter().rev().take(20) {
let _ = resolve_and_validated( let _ = resolve_and_validated(
&pkg_id("root"), pkg_id("root"),
vec![dep_req(&this.name(), &format!("={}", this.version()))], vec![dep_req(&this.name(), &format!("={}", this.version()))],
&reg, &reg,
); );
@ -81,13 +81,13 @@ proptest! {
// minimal-versions change what order the candidates // minimal-versions change what order the candidates
// are tried but not the existence of a solution // are tried but not the existence of a solution
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req(&this.name(), &format!("={}", this.version()))], vec![dep_req(&this.name(), &format!("={}", this.version()))],
&reg, &reg,
); );
let mres = resolve_with_config( let mres = resolve_with_config(
&pkg_id("root"), pkg_id("root"),
vec![dep_req(&this.name(), &format!("={}", this.version()))], vec![dep_req(&this.name(), &format!("={}", this.version()))],
&reg, &reg,
Some(&config), Some(&config),
@ -127,13 +127,13 @@ proptest! {
// So we try some of the most complicated. // So we try some of the most complicated.
for this in input.iter().rev().take(10) { for this in input.iter().rev().take(10) {
if resolve( if resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req(&this.name(), &format!("={}", this.version()))], vec![dep_req(&this.name(), &format!("={}", this.version()))],
&reg, &reg,
).is_ok() { ).is_ok() {
prop_assert!( prop_assert!(
resolve( resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req(&this.name(), &format!("={}", this.version()))], vec![dep_req(&this.name(), &format!("={}", this.version()))],
&removed_reg, &removed_reg,
).is_ok(), ).is_ok(),
@ -157,7 +157,7 @@ proptest! {
// So we try some of the most complicated. // So we try some of the most complicated.
for this in input.iter().rev().take(10) { for this in input.iter().rev().take(10) {
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req(&this.name(), &format!("={}", this.version()))], vec![dep_req(&this.name(), &format!("={}", this.version()))],
&reg, &reg,
); );
@ -183,7 +183,7 @@ proptest! {
); );
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req(&this.name(), &format!("={}", this.version()))], vec![dep_req(&this.name(), &format!("={}", this.version()))],
&new_reg, &new_reg,
); );
@ -217,7 +217,7 @@ proptest! {
); );
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req(&this.name(), &format!("={}", this.version()))], vec![dep_req(&this.name(), &format!("={}", this.version()))],
&new_reg, &new_reg,
); );
@ -244,7 +244,7 @@ fn basic_public_dependency() {
pkg!("C" => [dep("A"), dep("B")]), pkg!("C" => [dep("A"), dep("B")]),
]); ]);
let res = resolve_and_validated(&pkg_id("root"), vec![dep("C")], &reg).unwrap(); let res = resolve_and_validated(pkg_id("root"), vec![dep("C")], &reg).unwrap();
assert_same( assert_same(
&res, &res,
&names(&[ &names(&[
@ -280,7 +280,7 @@ fn public_dependency_filling_in() {
pkg!("d" => [dep("c"), dep("a"), dep("b")]), pkg!("d" => [dep("c"), dep("a"), dep("b")]),
]); ]);
let res = resolve_and_validated(&pkg_id("root"), vec![dep("d")], &reg).unwrap(); let res = resolve_and_validated(pkg_id("root"), vec![dep("d")], &reg).unwrap();
assert_same( assert_same(
&res, &res,
&names(&[ &names(&[
@ -315,7 +315,7 @@ fn public_dependency_filling_in_and_update() {
pkg!("C" => [dep("A"),dep("B")]), pkg!("C" => [dep("A"),dep("B")]),
pkg!("D" => [dep("B"),dep("C")]), pkg!("D" => [dep("B"),dep("C")]),
]); ]);
let res = resolve_and_validated(&pkg_id("root"), vec![dep("D")], &reg).unwrap(); let res = resolve_and_validated(pkg_id("root"), vec![dep("D")], &reg).unwrap();
assert_same( assert_same(
&res, &res,
&names(&[ &names(&[
@ -342,7 +342,7 @@ fn public_dependency_skiping() {
]; ];
let reg = registry(input.clone()); let reg = registry(input.clone());
resolve(&pkg_id("root"), vec![dep("c")], &reg).unwrap(); resolve(pkg_id("root"), vec![dep("c")], &reg).unwrap();
} }
#[test] #[test]
@ -362,7 +362,7 @@ fn public_dependency_skiping_in_backtracking() {
]; ];
let reg = registry(input.clone()); let reg = registry(input.clone());
resolve(&pkg_id("root"), vec![dep("C")], &reg).unwrap(); resolve(pkg_id("root"), vec![dep("C")], &reg).unwrap();
} }
#[test] #[test]
@ -374,7 +374,7 @@ fn test_dependency_with_empty_name() {
#[test] #[test]
fn test_resolving_empty_dependency_list() { fn test_resolving_empty_dependency_list() {
let res = resolve(&pkg_id("root"), Vec::new(), &registry(vec![])).unwrap(); let res = resolve(pkg_id("root"), Vec::new(), &registry(vec![])).unwrap();
assert_eq!(res, names(&["root"])); assert_eq!(res, names(&["root"]));
} }
@ -382,28 +382,28 @@ fn test_resolving_empty_dependency_list() {
#[test] #[test]
fn test_resolving_only_package() { fn test_resolving_only_package() {
let reg = registry(vec![pkg!("foo")]); let reg = registry(vec![pkg!("foo")]);
let res = resolve(&pkg_id("root"), vec![dep("foo")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep("foo")], &reg).unwrap();
assert_same(&res, &names(&["root", "foo"])); assert_same(&res, &names(&["root", "foo"]));
} }
#[test] #[test]
fn test_resolving_one_dep() { fn test_resolving_one_dep() {
let reg = registry(vec![pkg!("foo"), pkg!("bar")]); let reg = registry(vec![pkg!("foo"), pkg!("bar")]);
let res = resolve(&pkg_id("root"), vec![dep("foo")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep("foo")], &reg).unwrap();
assert_same(&res, &names(&["root", "foo"])); assert_same(&res, &names(&["root", "foo"]));
} }
#[test] #[test]
fn test_resolving_multiple_deps() { fn test_resolving_multiple_deps() {
let reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]); let reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]);
let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("baz")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep("foo"), dep("baz")], &reg).unwrap();
assert_same(&res, &names(&["root", "foo", "baz"])); assert_same(&res, &names(&["root", "foo", "baz"]));
} }
#[test] #[test]
fn test_resolving_transitive_deps() { fn test_resolving_transitive_deps() {
let reg = registry(vec![pkg!("foo"), pkg!("bar" => ["foo"])]); let reg = registry(vec![pkg!("foo"), pkg!("bar" => ["foo"])]);
let res = resolve(&pkg_id("root"), vec![dep("bar")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep("bar")], &reg).unwrap();
assert_same(&res, &names(&["root", "foo", "bar"])); assert_same(&res, &names(&["root", "foo", "bar"]));
} }
@ -411,7 +411,7 @@ fn test_resolving_transitive_deps() {
#[test] #[test]
fn test_resolving_common_transitive_deps() { fn test_resolving_common_transitive_deps() {
let reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]); let reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]);
let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("bar")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep("foo"), dep("bar")], &reg).unwrap();
assert_same(&res, &names(&["root", "foo", "bar"])); assert_same(&res, &names(&["root", "foo", "bar"]));
} }
@ -425,7 +425,7 @@ fn test_resolving_with_same_name() {
let reg = registry(list); let reg = registry(list);
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![ vec![
dep_loc("foo", "https://first.example.com"), dep_loc("foo", "https://first.example.com"),
dep_loc("bar", "https://second.example.com"), dep_loc("bar", "https://second.example.com"),
@ -453,7 +453,7 @@ fn test_resolving_with_dev_deps() {
]); ]);
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep("foo"), dep_kind("baz", Kind::Development)], vec![dep("foo"), dep_kind("baz", Kind::Development)],
&reg, &reg,
) )
@ -466,7 +466,7 @@ fn test_resolving_with_dev_deps() {
fn resolving_with_many_versions() { fn resolving_with_many_versions() {
let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]); let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
let res = resolve(&pkg_id("root"), vec![dep("foo")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep("foo")], &reg).unwrap();
assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.2")])); assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.2")]));
} }
@ -475,7 +475,7 @@ fn resolving_with_many_versions() {
fn resolving_with_specific_version() { fn resolving_with_specific_version() {
let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]); let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
let res = resolve(&pkg_id("root"), vec![dep_req("foo", "=1.0.1")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep_req("foo", "=1.0.1")], &reg).unwrap();
assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.1")])); assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.1")]));
} }
@ -491,7 +491,7 @@ fn test_resolving_maximum_version_with_transitive_deps() {
]); ]);
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")],
&reg, &reg,
) )
@ -539,7 +539,7 @@ fn test_resolving_minimum_version_with_transitive_deps() {
.unwrap(); .unwrap();
let res = resolve_with_config( let res = resolve_with_config(
&pkg_id("root"), pkg_id("root"),
vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")],
&reg, &reg,
Some(&config), Some(&config),
@ -600,7 +600,7 @@ fn resolving_incompat_versions() {
]); ]);
assert!(resolve( assert!(resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req("foo", "=1.0.1"), dep("bar")], vec![dep_req("foo", "=1.0.1"), dep("bar")],
&reg &reg
) )
@ -615,7 +615,7 @@ fn resolving_wrong_case_from_registry() {
// This test documents the current behavior. // This test documents the current behavior.
let reg = registry(vec![pkg!(("foo", "1.0.0")), pkg!("bar" => ["Foo"])]); let reg = registry(vec![pkg!(("foo", "1.0.0")), pkg!("bar" => ["Foo"])]);
assert!(resolve(&pkg_id("root"), vec![dep("bar")], &reg).is_err()); assert!(resolve(pkg_id("root"), vec![dep("bar")], &reg).is_err());
} }
#[test] #[test]
@ -626,7 +626,7 @@ fn resolving_mis_hyphenated_from_registry() {
// This test documents the current behavior. // This test documents the current behavior.
let reg = registry(vec![pkg!(("fo-o", "1.0.0")), pkg!("bar" => ["fo_o"])]); let reg = registry(vec![pkg!(("fo-o", "1.0.0")), pkg!("bar" => ["fo_o"])]);
assert!(resolve(&pkg_id("root"), vec![dep("bar")], &reg).is_err()); assert!(resolve(pkg_id("root"), vec![dep("bar")], &reg).is_err());
} }
#[test] #[test]
@ -638,7 +638,7 @@ fn resolving_backtrack() {
pkg!("baz"), pkg!("baz"),
]); ]);
let res = resolve(&pkg_id("root"), vec![dep_req("foo", "^1")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep_req("foo", "^1")], &reg).unwrap();
assert_contains( assert_contains(
&res, &res,
@ -658,7 +658,7 @@ fn resolving_backtrack_features() {
pkg!("bar"), pkg!("bar"),
]); ]);
let res = resolve(&pkg_id("root"), vec![dep_req("foo", "^1")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep_req("foo", "^1")], &reg).unwrap();
assert_contains( assert_contains(
&res, &res,
@ -680,7 +680,7 @@ fn resolving_allows_multiple_compatible_versions() {
pkg!("d4" => [dep_req("foo", "0.2")]), pkg!("d4" => [dep_req("foo", "0.2")]),
]); ]);
let res = resolve(&pkg_id("root"), vec![dep("bar")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep("bar")], &reg).unwrap();
assert_same( assert_same(
&res, &res,
@ -713,7 +713,7 @@ fn resolving_with_deep_backtracking() {
pkg!(("dep_req", "2.0.0")), pkg!(("dep_req", "2.0.0")),
]); ]);
let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep_req("foo", "1")], &reg).unwrap();
assert_same( assert_same(
&res, &res,
@ -741,7 +741,7 @@ fn resolving_with_sys_crates() {
]); ]);
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req("d", "1"), dep_req("r", "1")], vec![dep_req("d", "1"), dep_req("r", "1")],
&reg, &reg,
) )
@ -794,7 +794,7 @@ fn resolving_with_constrained_sibling_backtrack_parent() {
} }
let reg = registry(reglist); let reg = registry(reglist);
let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep_req("foo", "1")], &reg).unwrap();
assert_contains( assert_contains(
&res, &res,
@ -829,7 +829,7 @@ fn resolving_with_many_equivalent_backtracking() {
let reg = registry(reglist.clone()); let reg = registry(reglist.clone());
let res = resolve(&pkg_id("root"), vec![dep("level0")], &reg); let res = resolve(pkg_id("root"), vec![dep("level0")], &reg);
assert!(res.is_err()); assert!(res.is_err());
@ -839,7 +839,7 @@ fn resolving_with_many_equivalent_backtracking() {
let reg = registry(reglist.clone()); let reg = registry(reglist.clone());
let res = resolve(&pkg_id("root"), vec![dep("level0")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep("level0")], &reg).unwrap();
assert_contains(&res, &names(&[("root", "1.0.0"), ("level0", "1.0.0")])); assert_contains(&res, &names(&[("root", "1.0.0"), ("level0", "1.0.0")]));
@ -853,7 +853,7 @@ fn resolving_with_many_equivalent_backtracking() {
let reg = registry(reglist.clone()); let reg = registry(reglist.clone());
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep("level0"), dep("constrained")], vec![dep("level0"), dep("constrained")],
&reg, &reg,
) )
@ -871,7 +871,7 @@ fn resolving_with_many_equivalent_backtracking() {
let reg = registry(reglist.clone()); let reg = registry(reglist.clone());
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req("level0", "1.0.1"), dep("constrained")], vec![dep_req("level0", "1.0.1"), dep("constrained")],
&reg, &reg,
) )
@ -889,7 +889,7 @@ fn resolving_with_many_equivalent_backtracking() {
let reg = registry(reglist.clone()); let reg = registry(reglist.clone());
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req("level0", "1.0.1"), dep_req("constrained", "1.1.0")], vec![dep_req("level0", "1.0.1"), dep_req("constrained", "1.1.0")],
&reg, &reg,
); );
@ -932,7 +932,7 @@ fn resolving_with_deep_traps() {
let reg = registry(reglist.clone()); let reg = registry(reglist.clone());
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep("backtrack_trap0"), dep("cloaking")], vec![dep("backtrack_trap0"), dep("cloaking")],
&reg, &reg,
); );
@ -984,7 +984,7 @@ fn resolving_with_constrained_cousins_backtrack() {
// but `constrained= "2.0.1"` is already picked. // but `constrained= "2.0.1"` is already picked.
// Only then to try and solve `constrained= "~1.0.0"` which is incompatible. // Only then to try and solve `constrained= "~1.0.0"` which is incompatible.
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![ vec![
dep("backtrack_trap0"), dep("backtrack_trap0"),
dep_req("constrained", "2.0.1"), dep_req("constrained", "2.0.1"),
@ -1014,7 +1014,7 @@ fn resolving_with_constrained_cousins_backtrack() {
let reg = registry(reglist.clone()); let reg = registry(reglist.clone());
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep("level0"), dep_req("constrained", "2.0.1")], vec![dep("level0"), dep_req("constrained", "2.0.1")],
&reg, &reg,
); );
@ -1022,7 +1022,7 @@ fn resolving_with_constrained_cousins_backtrack() {
assert!(res.is_err()); assert!(res.is_err());
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep("level0"), dep_req("constrained", "2.0.0")], vec![dep("level0"), dep_req("constrained", "2.0.0")],
&reg, &reg,
) )
@ -1066,7 +1066,7 @@ fn resolving_with_constrained_sibling_backtrack_activation() {
} }
let reg = registry(reglist); let reg = registry(reglist);
let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep_req("foo", "1")], &reg).unwrap();
assert_contains( assert_contains(
&res, &res,
@ -1112,7 +1112,7 @@ fn resolving_with_constrained_sibling_transitive_dep_effects() {
pkg!(("D", "1.0.105")), pkg!(("D", "1.0.105")),
]); ]);
let res = resolve(&pkg_id("root"), vec![dep_req("A", "1")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep_req("A", "1")], &reg).unwrap();
assert_same( assert_same(
&res, &res,
@ -1158,7 +1158,7 @@ fn incomplete_information_skiping() {
]; ];
let reg = registry(input.clone()); let reg = registry(input.clone());
let res = resolve(&pkg_id("root"), vec![dep("g")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep("g")], &reg).unwrap();
let package_to_yank = "to_yank".to_pkgid(); let package_to_yank = "to_yank".to_pkgid();
// this package is not used in the resolution. // this package is not used in the resolution.
assert!(!res.contains(&package_to_yank)); assert!(!res.contains(&package_to_yank));
@ -1172,7 +1172,7 @@ fn incomplete_information_skiping() {
); );
assert_eq!(input.len(), new_reg.len() + 1); assert_eq!(input.len(), new_reg.len() + 1);
// it should still build // it should still build
assert!(resolve(&pkg_id("root"), vec![dep("g")], &new_reg).is_ok()); assert!(resolve(pkg_id("root"), vec![dep("g")], &new_reg).is_ok());
} }
#[test] #[test]
@ -1227,7 +1227,7 @@ fn incomplete_information_skiping_2() {
]; ];
let reg = registry(input.clone()); let reg = registry(input.clone());
let res = resolve(&pkg_id("root"), vec![dep("i")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep("i")], &reg).unwrap();
let package_to_yank = ("to_yank", "8.8.1").to_pkgid(); let package_to_yank = ("to_yank", "8.8.1").to_pkgid();
// this package is not used in the resolution. // this package is not used in the resolution.
assert!(!res.contains(&package_to_yank)); assert!(!res.contains(&package_to_yank));
@ -1241,7 +1241,7 @@ fn incomplete_information_skiping_2() {
); );
assert_eq!(input.len(), new_reg.len() + 1); assert_eq!(input.len(), new_reg.len() + 1);
// it should still build // it should still build
assert!(resolve(&pkg_id("root"), vec![dep("i")], &new_reg).is_ok()); assert!(resolve(pkg_id("root"), vec![dep("i")], &new_reg).is_ok());
} }
#[test] #[test]
@ -1277,7 +1277,7 @@ fn incomplete_information_skiping_3() {
]; ];
let reg = registry(input.clone()); let reg = registry(input.clone());
let res = resolve(&pkg_id("root"), vec![dep("b")], &reg).unwrap(); let res = resolve(pkg_id("root"), vec![dep("b")], &reg).unwrap();
let package_to_yank = ("to_yank", "3.0.3").to_pkgid(); let package_to_yank = ("to_yank", "3.0.3").to_pkgid();
// this package is not used in the resolution. // this package is not used in the resolution.
assert!(!res.contains(&package_to_yank)); assert!(!res.contains(&package_to_yank));
@ -1291,14 +1291,14 @@ fn incomplete_information_skiping_3() {
); );
assert_eq!(input.len(), new_reg.len() + 1); assert_eq!(input.len(), new_reg.len() + 1);
// it should still build // it should still build
assert!(resolve(&pkg_id("root"), vec![dep("b")], &new_reg).is_ok()); assert!(resolve(pkg_id("root"), vec![dep("b")], &new_reg).is_ok());
} }
#[test] #[test]
fn resolving_but_no_exists() { fn resolving_but_no_exists() {
let reg = registry(vec![]); let reg = registry(vec![]);
let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], &reg); let res = resolve(pkg_id("root"), vec![dep_req("foo", "1")], &reg);
assert!(res.is_err()); assert!(res.is_err());
assert_eq!( assert_eq!(
@ -1315,7 +1315,7 @@ fn resolving_but_no_exists() {
fn resolving_cycle() { fn resolving_cycle() {
let reg = registry(vec![pkg!("foo" => ["foo"])]); let reg = registry(vec![pkg!("foo" => ["foo"])]);
let _ = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], &reg); let _ = resolve(pkg_id("root"), vec![dep_req("foo", "1")], &reg);
} }
#[test] #[test]
@ -1327,7 +1327,7 @@ fn hard_equality() {
]); ]);
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req("bar", "1"), dep_req("foo", "=1.0.0")], vec![dep_req("bar", "1"), dep_req("foo", "=1.0.0")],
&reg, &reg,
) )
@ -1365,7 +1365,7 @@ fn large_conflict_cache() {
} }
} }
let reg = registry(input); let reg = registry(input);
let _ = resolve(&pkg_id("root"), root_deps, &reg); let _ = resolve(pkg_id("root"), root_deps, &reg);
} }
#[test] #[test]
@ -1405,5 +1405,5 @@ fn conflict_store_bug() {
]; ];
let reg = registry(input.clone()); let reg = registry(input.clone());
let _ = resolve_and_validated(&pkg_id("root"), vec![dep("j")], &reg); let _ = resolve_and_validated(pkg_id("root"), vec![dep("j")], &reg);
} }

View File

@ -515,7 +515,7 @@ pub fn main_file(println: &str, deps: &[&str]) -> String {
} }
buf.push_str("fn main() { println!("); buf.push_str("fn main() { println!(");
buf.push_str(&println); buf.push_str(println);
buf.push_str("); }\n"); buf.push_str("); }\n");
buf.to_string() buf.to_string()
@ -926,7 +926,7 @@ impl Execs {
} }
for &(ref expect, number) in self.expect_stdout_contains_n.iter() { for &(ref expect, number) in self.expect_stdout_contains_n.iter() {
self.match_std( self.match_std(
Some(&expect), Some(expect),
&actual.stdout, &actual.stdout,
"stdout", "stdout",
&actual.stderr, &actual.stderr,
@ -1246,7 +1246,7 @@ impl Execs {
.enumerate() .enumerate()
.filter_map(|(i, (a, e))| match (a, e) { .filter_map(|(i, (a, e))| match (a, e) {
(Some(a), Some(e)) => { (Some(a), Some(e)) => {
if lines_match(&e, &a) { if lines_match(e, a) {
None None
} else { } else {
Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a)) Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a))
@ -1327,7 +1327,7 @@ fn lines_match_works() {
/// arbitrary nested JSON (useful for parts of object emitted by other programs /// arbitrary nested JSON (useful for parts of object emitted by other programs
/// (e.g., rustc) rather than Cargo itself). Arrays are sorted before comparison. /// (e.g., rustc) rather than Cargo itself). Arrays are sorted before comparison.
pub fn find_json_mismatch(expected: &Value, actual: &Value) -> Result<(), String> { pub fn find_json_mismatch(expected: &Value, actual: &Value) -> Result<(), String> {
match find_json_mismatch_r(expected, &actual) { match find_json_mismatch_r(expected, actual) {
Some((expected_part, actual_part)) => Err(format!( Some((expected_part, actual_part)) => Err(format!(
"JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n",
serde_json::to_string_pretty(expected).unwrap(), serde_json::to_string_pretty(expected).unwrap(),
@ -1368,7 +1368,7 @@ fn find_json_mismatch_r<'a>(
if !l.is_empty() { if !l.is_empty() {
assert!(!r.is_empty()); assert!(!r.is_empty());
Some((&l[0], &r[0])) Some((l[0], r[0]))
} else { } else {
assert_eq!(r.len(), 0); assert_eq!(r.len(), 0);
None None

View File

@ -21,7 +21,7 @@ use proptest::string::string_regex;
use proptest::test_runner::TestRunner; use proptest::test_runner::TestRunner;
pub fn resolve( pub fn resolve(
pkg: &PackageId, pkg: PackageId,
deps: Vec<Dependency>, deps: Vec<Dependency>,
registry: &[Summary], registry: &[Summary],
) -> CargoResult<Vec<PackageId>> { ) -> CargoResult<Vec<PackageId>> {
@ -29,7 +29,7 @@ pub fn resolve(
} }
pub fn resolve_and_validated( pub fn resolve_and_validated(
pkg: &PackageId, pkg: PackageId,
deps: Vec<Dependency>, deps: Vec<Dependency>,
registry: &[Summary], registry: &[Summary],
) -> CargoResult<Vec<PackageId>> { ) -> CargoResult<Vec<PackageId>> {
@ -58,7 +58,7 @@ pub fn resolve_and_validated(
} }
pub fn resolve_with_config( pub fn resolve_with_config(
pkg: &PackageId, pkg: PackageId,
deps: Vec<Dependency>, deps: Vec<Dependency>,
registry: &[Summary], registry: &[Summary],
config: Option<&Config>, config: Option<&Config>,
@ -68,7 +68,7 @@ pub fn resolve_with_config(
} }
pub fn resolve_with_config_raw( pub fn resolve_with_config_raw(
pkg: &PackageId, pkg: PackageId,
deps: Vec<Dependency>, deps: Vec<Dependency>,
registry: &[Summary], registry: &[Summary],
config: Option<&Config>, config: Option<&Config>,
@ -461,7 +461,7 @@ pub fn registry_strategy(
let (c, d) = order_index(c, d, s.len()); let (c, d) = order_index(c, d, s.len());
dependency_by_pkgid[b].push(dep_req_kind( dependency_by_pkgid[b].push(dep_req_kind(
&dep_name, dep_name,
&if c == 0 && d == s_last_index { &if c == 0 && d == s_last_index {
"*".to_string() "*".to_string()
} else if c == 0 { } else if c == 0 {
@ -525,7 +525,7 @@ fn meta_test_deep_trees_from_strategy() {
let reg = registry(input.clone()); let reg = registry(input.clone());
for this in input.iter().rev().take(10) { for this in input.iter().rev().take(10) {
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req(&this.name(), &format!("={}", this.version()))], vec![dep_req(&this.name(), &format!("={}", this.version()))],
&reg, &reg,
); );
@ -564,7 +564,7 @@ fn meta_test_multiple_versions_strategy() {
let reg = registry(input.clone()); let reg = registry(input.clone());
for this in input.iter().rev().take(10) { for this in input.iter().rev().take(10) {
let res = resolve( let res = resolve(
&pkg_id("root"), pkg_id("root"),
vec![dep_req(&this.name(), &format!("={}", this.version()))], vec![dep_req(&this.name(), &format!("={}", this.version()))],
&reg, &reg,
); );