fix a bunch of clippy warnings (invocation: cargo clippy --all-targets --all-features -- --cap-lints warn )

Special thanks to dwijnand for helping me with this! :)
This commit is contained in:
Matthias Krüger 2018-08-09 00:57:20 +02:00
parent 578e2533b8
commit 8798bf0d28
25 changed files with 89 additions and 88 deletions

View File

@ -322,7 +322,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
} else { } else {
state.running(&cmd); state.running(&cmd);
let output = if extra_verbose { let output = if extra_verbose {
state.capture_output(cmd, true) state.capture_output(&cmd, true)
} else { } else {
cmd.exec_with_output() cmd.exec_with_output()
}; };

View File

@ -110,7 +110,7 @@ impl<'a> JobState<'a> {
pub fn capture_output( pub fn capture_output(
&self, &self,
cmd: ProcessBuilder, cmd: &ProcessBuilder,
print_output: bool, print_output: bool,
) -> CargoResult<Output> { ) -> CargoResult<Output> {
cmd.exec_with_streaming( cmd.exec_with_streaming(

View File

@ -118,7 +118,7 @@ impl Executor for DefaultExecutor {
_mode: CompileMode, _mode: CompileMode,
state: &job_queue::JobState<'_>, state: &job_queue::JobState<'_>,
) -> CargoResult<()> { ) -> CargoResult<()> {
state.capture_output(cmd, false).map(drop) state.capture_output(&cmd, false).map(drop)
} }
} }
@ -645,7 +645,7 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult
false, false,
).map(drop) ).map(drop)
} else if should_capture_output { } else if should_capture_output {
state.capture_output(rustdoc, false).map(drop) state.capture_output(&rustdoc, false).map(drop)
} else { } else {
rustdoc.exec() rustdoc.exec()
}; };

View File

@ -110,7 +110,7 @@ impl fmt::Display for VersionInfo {
if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) { if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) {
if channel != "stable" { if channel != "stable" {
write!(f, "-{}", channel)?; write!(f, "-{}", channel)?;
let empty = String::from(""); let empty = String::new();
write!(f, "{}", self.pre_release.as_ref().unwrap_or(&empty))?; write!(f, "{}", self.pre_release.as_ref().unwrap_or(&empty))?;
} }
}; };

View File

@ -270,7 +270,7 @@ fn rustfix_crate(lock_addr: &str, rustc: &Path, filename: &Path, args: &FixArgs)
rustfix_and_fix(&mut fixes, rustc, filename, args)?; rustfix_and_fix(&mut fixes, rustc, filename, args)?;
let mut progress_yet_to_be_made = false; let mut progress_yet_to_be_made = false;
for (path, file) in fixes.files.iter_mut() { for (path, file) in fixes.files.iter_mut() {
if file.errors_applying_fixes.len() == 0 { if file.errors_applying_fixes.is_empty() {
continue continue
} }
// If anything was successfully fixed *and* there's at least one // If anything was successfully fixed *and* there's at least one
@ -523,7 +523,7 @@ impl FixArgs {
ret.prepare_for_edition = PrepareFor::Next; ret.prepare_for_edition = PrepareFor::Next;
} }
ret.idioms = env::var(IDIOMS_ENV).is_ok(); ret.idioms = env::var(IDIOMS_ENV).is_ok();
return ret ret
} }
fn apply(&self, cmd: &mut Command) { fn apply(&self, cmd: &mut Command) {
@ -535,10 +535,7 @@ impl FixArgs {
if let Some(edition) = &self.enabled_edition { if let Some(edition) = &self.enabled_edition {
cmd.arg("--edition").arg(edition); cmd.arg("--edition").arg(edition);
if self.idioms { if self.idioms {
match &edition[..] { if edition == "2018" { cmd.arg("-Wrust-2018-idioms"); }
"2018" => { cmd.arg("-Wrust-2018-idioms"); }
_ => {}
}
} }
} }
match &self.prepare_for_edition { match &self.prepare_for_edition {

View File

@ -199,7 +199,7 @@ guide can be found at
file, file,
match edition { match edition {
Some(s) => format!("with the {} edition", s), Some(s) => format!("with the {} edition", s),
None => format!("without an edition"), None => "without an edition".to_string(),
}, },
))?; ))?;
Ok(()) Ok(())

View File

@ -1024,12 +1024,12 @@ fn cargo_compile_with_downloaded_dependency_with_offline() {
p2.cargo("build") p2.cargo("build")
.masquerade_as_nightly_cargo() .masquerade_as_nightly_cargo()
.arg("-Zoffline"), .arg("-Zoffline"),
execs().with_stderr(format!( execs().with_stderr(
"\ "\
[COMPILING] present_dep v1.2.3 [COMPILING] present_dep v1.2.3
[COMPILING] bar v0.1.0 ([..]) [COMPILING] bar v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]" [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"
)), ),
); );
} }
@ -4439,7 +4439,7 @@ fn target_edition_feature_gated() {
assert_that( assert_that(
p.cargo("build").arg("-v").masquerade_as_nightly_cargo(), p.cargo("build").arg("-v").masquerade_as_nightly_cargo(),
execs().with_status(101).with_stderr(format!( execs().with_status(101).with_stderr(
"\ "\
error: failed to parse manifest at `[..]` error: failed to parse manifest at `[..]`
@ -4451,7 +4451,7 @@ Caused by:
consider adding `cargo-features = [\"edition\"]` to the manifest consider adding `cargo-features = [\"edition\"]` to the manifest
" "
)), ),
); );
} }

View File

@ -52,7 +52,7 @@ fn multiple_installs() {
#[test] #[test]
fn concurrent_installs() { fn concurrent_installs() {
const LOCKED_BUILD: &'static str = "waiting for file lock on build directory"; const LOCKED_BUILD: &str = "waiting for file lock on build directory";
pkg("foo", "0.0.1"); pkg("foo", "0.0.1");
pkg("bar", "0.0.1"); pkg("bar", "0.0.1");

View File

@ -4,6 +4,7 @@ use cargo::util::toml::{self, VecStringOrBool as VSOB};
use cargo::CargoError; use cargo::CargoError;
use support::{execs, lines_match, paths, project}; use support::{execs, lines_match, paths, project};
use support::hamcrest::assert_that; use support::hamcrest::assert_that;
use std::borrow::Borrow;
use std::collections; use std::collections;
use std::fs; use std::fs;
@ -68,8 +69,9 @@ fn new_config(env: &[(&str, &str)]) -> Config {
config config
} }
fn assert_error(error: CargoError, msgs: &str) { fn assert_error<E: Borrow<CargoError>>(error: E, msgs: &str) {
let causes = error let causes = error
.borrow()
.iter_chain() .iter_chain()
.map(|e| e.to_string()) .map(|e| e.to_string())
.collect::<Vec<_>>() .collect::<Vec<_>>()

View File

@ -38,7 +38,7 @@ fn deleting_database_files() {
let mut files = Vec::new(); let mut files = Vec::new();
find_files(&paths::home().join(".cargo/git/db"), &mut files); find_files(&paths::home().join(".cargo/git/db"), &mut files);
assert!(files.len() > 0); assert!(!files.is_empty());
let log = "cargo::sources::git=trace"; let log = "cargo::sources::git=trace";
for file in files { for file in files {
@ -120,7 +120,7 @@ fn deleting_checkout_files() {
.join(".git"); .join(".git");
let mut files = Vec::new(); let mut files = Vec::new();
find_files(&dir, &mut files); find_files(&dir, &mut files);
assert!(files.len() > 0); assert!(!files.is_empty());
let log = "cargo::sources::git=trace"; let log = "cargo::sources::git=trace";
for file in files { for file in files {

View File

@ -626,7 +626,7 @@ fn doc_same_name() {
#[test] #[test]
fn doc_target() { fn doc_target() {
const TARGET: &'static str = "arm-unknown-linux-gnueabihf"; const TARGET: &str = "arm-unknown-linux-gnueabihf";
let p = project() let p = project()
.file( .file(

View File

@ -61,7 +61,7 @@ fn cargo_compile_simple_git_dep() {
[COMPILING] dep1 v0.5.0 ({}#[..])\n\ [COMPILING] dep1 v0.5.0 ({}#[..])\n\
[COMPILING] foo v0.5.0 ({})\n\ [COMPILING] foo v0.5.0 ({})\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
path2url(git_root.clone()), path2url(&git_root),
path2url(git_root), path2url(git_root),
path2url(root) path2url(root)
)), )),
@ -146,7 +146,7 @@ fn cargo_compile_offline_with_cached_git_dep() {
rev = "{}" rev = "{}"
"#, "#,
git_project.url(), git_project.url(),
rev1.clone() rev1
), ),
) )
.file("src/main.rs", "fn main(){}") .file("src/main.rs", "fn main(){}")
@ -166,7 +166,7 @@ fn cargo_compile_offline_with_cached_git_dep() {
rev = "{}" rev = "{}"
"#, "#,
git_project.url(), git_project.url(),
rev2.clone() rev2
).as_bytes()) ).as_bytes())
.unwrap(); .unwrap();
assert_that(prj.cargo("build"), execs()); assert_that(prj.cargo("build"), execs());
@ -303,7 +303,7 @@ fn cargo_compile_git_dep_branch() {
[COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\ [COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\
[COMPILING] foo v0.5.0 ({})\n\ [COMPILING] foo v0.5.0 ({})\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
path2url(git_root.clone()), path2url(&git_root),
path2url(git_root), path2url(git_root),
path2url(root) path2url(root)
)), )),
@ -376,7 +376,7 @@ fn cargo_compile_git_dep_tag() {
[COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\ [COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\
[COMPILING] foo v0.5.0 ({})\n\ [COMPILING] foo v0.5.0 ({})\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
path2url(git_root.clone()), path2url(&git_root),
path2url(git_root), path2url(git_root),
path2url(root) path2url(root)
)), )),
@ -2674,7 +2674,7 @@ fn invalid_git_dependency_manifest() {
\n\ \n\
Caused by:\n \ Caused by:\n \
duplicate key: `categories` for key `project`", duplicate key: `categories` for key `project`",
path2url(git_root.clone()), path2url(&git_root),
path2url(git_root), path2url(git_root),
)), )),
); );

View File

@ -403,7 +403,7 @@ fn gitignore_no_newline_in_new() {
.unwrap() .unwrap()
.read_to_string(&mut contents) .read_to_string(&mut contents)
.unwrap(); .unwrap();
assert!(!contents.starts_with("\n")); assert!(!contents.starts_with('\n'));
} }
#[test] #[test]
@ -446,7 +446,7 @@ fn mercurial_no_newline_in_new() {
.unwrap() .unwrap()
.read_to_string(&mut contents) .read_to_string(&mut contents)
.unwrap(); .unwrap();
assert!(!contents.starts_with("\n")); assert!(!contents.starts_with('\n'));
} }
#[test] #[test]

View File

@ -60,7 +60,7 @@ fn check_token(expected_token: &str, registry: Option<&str>) -> bool {
.get("registry") .get("registry")
.and_then(|registry_table| registry_table.get("token")) .and_then(|registry_table| registry_table.get("token"))
.and_then(|v| match v { .and_then(|v| match v {
&toml::Value::String(ref token) => Some(token.as_str().to_string()), toml::Value::String(ref token) => Some(token.as_str().to_string()),
_ => None, _ => None,
}), }),
_ => None, _ => None,

View File

@ -1,4 +1,6 @@
#![deny(warnings)] #![deny(warnings)]
#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
#![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))]
extern crate bufstream; extern crate bufstream;
extern crate cargo; extern crate cargo;

View File

@ -1016,7 +1016,7 @@ fn test_edition_malformed() {
assert_that( assert_that(
p.cargo("build").arg("-v").masquerade_as_nightly_cargo(), p.cargo("build").arg("-v").masquerade_as_nightly_cargo(),
execs().with_status(101).with_stderr(format!( execs().with_status(101).with_stderr(
"\ "\
error: failed to parse manifest at `[..]` error: failed to parse manifest at `[..]`
@ -1025,8 +1025,8 @@ Caused by:
Caused by: Caused by:
supported edition values are `2015` or `2018`, but `chicken` is unknown supported edition values are `2015` or `2018`, but `chicken` is unknown
" ".to_string()
)), ),
); );
} }
@ -1048,7 +1048,7 @@ fn test_edition_nightly() {
assert_that( assert_that(
p.cargo("build").arg("-v").masquerade_as_nightly_cargo(), p.cargo("build").arg("-v").masquerade_as_nightly_cargo(),
execs().with_status(101).with_stderr(format!( execs().with_status(101).with_stderr(
"\ "\
error: failed to parse manifest at `[..]` error: failed to parse manifest at `[..]`
@ -1060,7 +1060,7 @@ Caused by:
consider adding `cargo-features = [\"edition\"]` to the manifest consider adding `cargo-features = [\"edition\"]` to the manifest
" "
)), ),
); );
} }

View File

@ -702,7 +702,7 @@ fn remove_patch() {
File::create(p.root().join("Cargo.toml")) File::create(p.root().join("Cargo.toml"))
.unwrap() .unwrap()
.write_all( .write_all(
r#" br#"
[package] [package]
name = "foo" name = "foo"
version = "0.0.1" version = "0.0.1"
@ -713,7 +713,7 @@ fn remove_patch() {
[patch.crates-io] [patch.crates-io]
bar = { path = 'bar' } bar = { path = 'bar' }
"#.as_bytes(), "#,
) )
.unwrap(); .unwrap();
assert_that(p.cargo("build"), execs()); assert_that(p.cargo("build"), execs());

View File

@ -51,9 +51,9 @@ See [..]
// Skip the metadata payload and the size of the tarball // Skip the metadata payload and the size of the tarball
let mut sz = [0; 4]; let mut sz = [0; 4];
assert_eq!(f.read(&mut sz).unwrap(), 4); assert_eq!(f.read(&mut sz).unwrap(), 4);
let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16) let sz = (u32::from(sz[0]) << 0) | (u32::from(sz[1]) << 8) | (u32::from(sz[2]) << 16)
| ((sz[3] as u32) << 24); | (u32::from(sz[3]) << 24);
f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); f.seek(SeekFrom::Current(i64::from(sz) + 4)).unwrap();
// Verify the tarball // Verify the tarball
let mut rdr = GzDecoder::new(f); let mut rdr = GzDecoder::new(f);
@ -127,9 +127,9 @@ See [..]
// Skip the metadata payload and the size of the tarball // Skip the metadata payload and the size of the tarball
let mut sz = [0; 4]; let mut sz = [0; 4];
assert_eq!(f.read(&mut sz).unwrap(), 4); assert_eq!(f.read(&mut sz).unwrap(), 4);
let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16) let sz = (u32::from(sz[0]) << 0) | (u32::from(sz[1]) << 8) | (u32::from(sz[2]) << 16)
| ((sz[3] as u32) << 24); | (u32::from(sz[3]) << 24);
f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); f.seek(SeekFrom::Current(i64::from(sz) + 4)).unwrap();
// Verify the tarball // Verify the tarball
let mut rdr = GzDecoder::new(f); let mut rdr = GzDecoder::new(f);
@ -205,15 +205,15 @@ See [..]
// Skip the metadata payload and the size of the tarball // Skip the metadata payload and the size of the tarball
let mut sz = [0; 4]; let mut sz = [0; 4];
assert_eq!(f.read(&mut sz).unwrap(), 4); assert_eq!(f.read(&mut sz).unwrap(), 4);
let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16) let sz = (u32::from(sz[0]) << 0) | (u32::from(sz[1]) << 8) | (u32::from(sz[2]) << 16)
| ((sz[3] as u32) << 24); | (u32::from(sz[3]) << 24);
f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); f.seek(SeekFrom::Current(i64::from(sz) + 4)).unwrap();
// Verify the tarball // Verify the tarball
let mut rdr = GzDecoder::new(f); let mut rdr = GzDecoder::new(f);
assert_eq!( assert_eq!(
rdr.header().unwrap().filename().unwrap(), rdr.header().unwrap().filename().unwrap(),
"foo-0.0.1.crate".as_bytes() b"foo-0.0.1.crate"
); );
let mut contents = Vec::new(); let mut contents = Vec::new();
rdr.read_to_end(&mut contents).unwrap(); rdr.read_to_end(&mut contents).unwrap();
@ -285,15 +285,15 @@ See [..]
// Skip the metadata payload and the size of the tarball // Skip the metadata payload and the size of the tarball
let mut sz = [0; 4]; let mut sz = [0; 4];
assert_eq!(f.read(&mut sz).unwrap(), 4); assert_eq!(f.read(&mut sz).unwrap(), 4);
let sz = ((sz[0] as u32) << 0) | ((sz[1] as u32) << 8) | ((sz[2] as u32) << 16) let sz = (u32::from(sz[0]) << 0) | (u32::from(sz[1]) << 8) | (u32::from(sz[2]) << 16)
| ((sz[3] as u32) << 24); | (u32::from(sz[3]) << 24);
f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); f.seek(SeekFrom::Current(i64::from(sz) + 4)).unwrap();
// Verify the tarball // Verify the tarball
let mut rdr = GzDecoder::new(f); let mut rdr = GzDecoder::new(f);
assert_eq!( assert_eq!(
rdr.header().unwrap().filename().unwrap(), rdr.header().unwrap().filename().unwrap(),
"foo-0.0.1.crate".as_bytes() b"foo-0.0.1.crate"
); );
let mut contents = Vec::new(); let mut contents = Vec::new();
rdr.read_to_end(&mut contents).unwrap(); rdr.read_to_end(&mut contents).unwrap();

View File

@ -345,20 +345,20 @@ fn test_default_features() {
assert_that( assert_that(
p.cargo("test").arg("--no-default-features"), p.cargo("test").arg("--no-default-features"),
execs() execs()
.with_stderr(format!( .with_stderr(
"[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]" "[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"
)) )
.with_stdout(""), .with_stdout(""),
); );
assert_that( assert_that(
p.cargo("test").arg("--test=foo"), p.cargo("test").arg("--test=foo"),
execs() execs()
.with_stderr(format!( .with_stderr(
"\ "\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target/debug/deps/foo-[..][EXE]" [RUNNING] target/debug/deps/foo-[..][EXE]"
)) )
.with_stdout_contains("test test ... ok"), .with_stdout_contains("test test ... ok"),
); );
@ -471,9 +471,9 @@ fn test_multiple_required_features() {
assert_that( assert_that(
p.cargo("test").arg("--no-default-features"), p.cargo("test").arg("--no-default-features"),
execs() execs()
.with_stderr(format!( .with_stderr(
"[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]" "[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"
)) )
.with_stdout(""), .with_stdout(""),
); );
} }
@ -530,18 +530,18 @@ fn bench_default_features() {
assert_that( assert_that(
p.cargo("bench").arg("--no-default-features"), p.cargo("bench").arg("--no-default-features"),
execs() execs()
.with_stderr(format!("[FINISHED] release [optimized] target(s) in [..]")) .with_stderr("[FINISHED] release [optimized] target(s) in [..]".to_string())
.with_stdout(""), .with_stdout(""),
); );
assert_that( assert_that(
p.cargo("bench").arg("--bench=foo"), p.cargo("bench").arg("--bench=foo"),
execs() execs()
.with_stderr(format!( .with_stderr(
"\ "\
[FINISHED] release [optimized] target(s) in [..] [FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]" [RUNNING] target/release/deps/foo-[..][EXE]"
)) )
.with_stdout_contains("test bench ... bench: [..]"), .with_stdout_contains("test bench ... bench: [..]"),
); );
@ -689,7 +689,7 @@ fn bench_multiple_required_features() {
assert_that( assert_that(
p.cargo("bench").arg("--no-default-features"), p.cargo("bench").arg("--no-default-features"),
execs() execs()
.with_stderr(format!("[FINISHED] release [optimized] target(s) in [..]")) .with_stderr("[FINISHED] release [optimized] target(s) in [..]")
.with_stdout(""), .with_stdout(""),
); );
} }
@ -728,13 +728,13 @@ fn install_default_features() {
assert_that( assert_that(
p.cargo("install --path .").arg("--no-default-features"), p.cargo("install --path .").arg("--no-default-features"),
execs().with_status(101).with_stderr(format!( execs().with_status(101).with_stderr(
"\ "\
[INSTALLING] foo v0.0.1 ([..]) [INSTALLING] foo v0.0.1 ([..])
[FINISHED] release [optimized] target(s) in [..] [FINISHED] release [optimized] target(s) in [..]
[ERROR] no binaries are available for install using the selected features [ERROR] no binaries are available for install using the selected features
" "
)), ),
); );
assert_that(cargo_home(), is_not(has_installed_exe("foo"))); assert_that(cargo_home(), is_not(has_installed_exe("foo")));
@ -749,7 +749,7 @@ fn install_default_features() {
p.cargo("install --path .") p.cargo("install --path .")
.arg("--bin=foo") .arg("--bin=foo")
.arg("--no-default-features"), .arg("--no-default-features"),
execs().with_status(101).with_stderr(format!( execs().with_status(101).with_stderr(
"\ "\
[INSTALLING] foo v0.0.1 ([..]) [INSTALLING] foo v0.0.1 ([..])
[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ [ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \
@ -759,7 +759,7 @@ Caused by:
target `foo` in package `foo` requires the features: `a` target `foo` in package `foo` requires the features: `a`
Consider enabling them by passing e.g. `--features=\"a\"` Consider enabling them by passing e.g. `--features=\"a\"`
" "
)), ),
); );
assert_that(cargo_home(), is_not(has_installed_exe("foo"))); assert_that(cargo_home(), is_not(has_installed_exe("foo")));
@ -774,7 +774,7 @@ Consider enabling them by passing e.g. `--features=\"a\"`
p.cargo("install --path .") p.cargo("install --path .")
.arg("--example=foo") .arg("--example=foo")
.arg("--no-default-features"), .arg("--no-default-features"),
execs().with_status(101).with_stderr(format!( execs().with_status(101).with_stderr(
"\ "\
[INSTALLING] foo v0.0.1 ([..]) [INSTALLING] foo v0.0.1 ([..])
[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ [ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \
@ -784,7 +784,7 @@ Caused by:
target `foo` in package `foo` requires the features: `a` target `foo` in package `foo` requires the features: `a`
Consider enabling them by passing e.g. `--features=\"a\"` Consider enabling them by passing e.g. `--features=\"a\"`
" "
)), ),
); );
assert_that(cargo_home(), is_not(has_installed_exe("foo"))); assert_that(cargo_home(), is_not(has_installed_exe("foo")));
} }
@ -1092,9 +1092,9 @@ Consider enabling them by passing e.g. `--features=\"bar/a\"`
assert_that( assert_that(
p.cargo("test"), p.cargo("test"),
execs() execs()
.with_stderr(format!( .with_stderr(
"[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]" "[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"
)) )
.with_stdout(""), .with_stdout(""),
); );
@ -1119,7 +1119,7 @@ Consider enabling them by passing e.g. `--features=\"bar/a\"`
assert_that( assert_that(
p.cargo("bench"), p.cargo("bench"),
execs() execs()
.with_stderr(format!("[FINISHED] release [optimized] target(s) in [..]")) .with_stderr("[FINISHED] release [optimized] target(s) in [..]")
.with_stdout(""), .with_stdout(""),
); );
@ -1144,13 +1144,13 @@ Consider enabling them by passing e.g. `--features=\"bar/a\"`
// install // install
assert_that( assert_that(
p.cargo("install --path ."), p.cargo("install --path ."),
execs().with_status(101).with_stderr(format!( execs().with_status(101).with_stderr(
"\ "\
[INSTALLING] foo v0.0.1 ([..]) [INSTALLING] foo v0.0.1 ([..])
[FINISHED] release [optimized] target(s) in [..] [FINISHED] release [optimized] target(s) in [..]
[ERROR] no binaries are available for install using the selected features [ERROR] no binaries are available for install using the selected features
" "
)), ),
); );
assert_that(cargo_home(), is_not(has_installed_exe("foo"))); assert_that(cargo_home(), is_not(has_installed_exe("foo")));

View File

@ -1,7 +1,7 @@
use support::{basic_manifest, basic_bin_manifest, basic_lib_manifest, execs, project}; use support::{basic_manifest, basic_bin_manifest, basic_lib_manifest, execs, project};
use support::hamcrest::assert_that; use support::hamcrest::assert_that;
const CARGO_RUSTC_ERROR: &'static str = const CARGO_RUSTC_ERROR: &str =
"[ERROR] extra arguments to `rustc` can only be passed to one target, consider filtering "[ERROR] extra arguments to `rustc` can only be passed to one target, consider filtering
the package by passing e.g. `--lib` or `--bin NAME` to specify a single target"; the package by passing e.g. `--lib` or `--bin NAME` to specify a single target";

View File

@ -678,7 +678,7 @@ fn build_rustflags_normal_source_with_target() {
) )
.build(); .build();
let ref host = rustc_host(); let host = &rustc_host();
// Use RUSTFLAGS to pass an argument that will generate an error // Use RUSTFLAGS to pass an argument that will generate an error
assert_that( assert_that(

View File

@ -130,7 +130,7 @@ pub fn add_submodule<'a>(
t!(origin.fetch(&[], None, None)); t!(origin.fetch(&[], None, None));
t!(subrepo.checkout_head(None)); t!(subrepo.checkout_head(None));
t!(s.add_finalize()); t!(s.add_finalize());
return s; s
} }
pub fn commit(repo: &git2::Repository) -> git2::Oid { pub fn commit(repo: &git2::Repository) -> git2::Oid {

View File

@ -351,7 +351,7 @@ impl Project {
pub fn process<T: AsRef<OsStr>>(&self, program: T) -> ProcessBuilder { pub fn process<T: AsRef<OsStr>>(&self, program: T) -> ProcessBuilder {
let mut p = ::support::process(program); let mut p = ::support::process(program);
p.cwd(self.root()); p.cwd(self.root());
return p; p
} }
/// Create a `ProcessBuilder` to run cargo. /// Create a `ProcessBuilder` to run cargo.
@ -361,7 +361,7 @@ impl Project {
pub fn cargo(&self, cmd: &str) -> ProcessBuilder { pub fn cargo(&self, cmd: &str) -> ProcessBuilder {
let mut p = self.process(&cargo_exe()); let mut p = self.process(&cargo_exe());
split_and_add_args(&mut p, cmd); split_and_add_args(&mut p, cmd);
return p; p
} }
/// Returns the contents of `Cargo.lock`. /// Returns the contents of `Cargo.lock`.
@ -779,7 +779,7 @@ impl Execs {
.map_err(|_| "stdout was not utf8 encoded".to_owned())?; .map_err(|_| "stdout was not utf8 encoded".to_owned())?;
let lines = stdout let lines = stdout
.lines() .lines()
.filter(|line| line.starts_with("{")) .filter(|line| line.starts_with('{'))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if lines.len() != objects.len() { if lines.len() != objects.len() {
return Err(format!( return Err(format!(
@ -935,7 +935,7 @@ impl Execs {
} }
}; };
} }
if a.len() > 0 { if !a.is_empty() {
Err(format!( Err(format!(
"Output included extra lines:\n\ "Output included extra lines:\n\
{}\n", {}\n",
@ -1073,8 +1073,8 @@ fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Valu
}, },
); );
if l.len() > 0 { if !l.is_empty() {
assert!(r.len() > 0); assert!(!r.is_empty());
Some((&l[0], &r[0])) Some((&l[0], &r[0]))
} else { } else {
assert_eq!(r.len(), 0); assert_eq!(r.len(), 0);
@ -1255,8 +1255,8 @@ pub fn basic_lib_manifest(name: &str) -> String {
) )
} }
pub fn path2url(p: PathBuf) -> Url { pub fn path2url<P: AsRef<Path>>(p: P) -> Url {
Url::from_file_path(&*p).ok().unwrap() Url::from_file_path(p).ok().unwrap()
} }
fn substitute_macros(input: &str) -> String { fn substitute_macros(input: &str) -> String {
@ -1287,10 +1287,10 @@ fn substitute_macros(input: &str) -> String {
("[EXE]", if cfg!(windows) { ".exe" } else { "" }), ("[EXE]", if cfg!(windows) { ".exe" } else { "" }),
]; ];
let mut result = input.to_owned(); let mut result = input.to_owned();
for &(pat, subst) in macros.iter() { for &(pat, subst) in &macros {
result = result.replace(pat, subst) result = result.replace(pat, subst)
} }
return result; result
} }
pub mod install; pub mod install;
@ -1357,7 +1357,7 @@ fn _process(t: &OsStr) -> cargo::util::ProcessBuilder {
.env_remove("GIT_COMMITTER_EMAIL") .env_remove("GIT_COMMITTER_EMAIL")
.env_remove("CARGO_TARGET_DIR") // we assume 'target' .env_remove("CARGO_TARGET_DIR") // we assume 'target'
.env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows .env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows
return p; p
} }
pub trait ChannelChanger: Sized { pub trait ChannelChanger: Sized {

View File

@ -289,7 +289,7 @@ impl Package {
)); ));
} }
return cksum; cksum
} }
fn make_archive(&self) { fn make_archive(&self) {

View File

@ -124,7 +124,7 @@ fn relative_tools() {
.build(); .build();
let foo_path = p.root().join("bar"); let foo_path = p.root().join("bar");
let foo_url = path2url(foo_path.clone()); let foo_url = path2url(&foo_path);
let prefix = p.root().into_os_string().into_string().unwrap(); let prefix = p.root().into_os_string().into_string().unwrap();
let output = if cfg!(windows) { let output = if cfg!(windows) {
( (