mirror of
https://github.com/rust-lang/cargo.git
synced 2025-10-01 11:30:39 +00:00
Some clippy fixes.
This commit is contained in:
parent
00e4c69f2b
commit
ec21e12d8a
@ -219,7 +219,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
|||||||
self.compilation
|
self.compilation
|
||||||
.rustdocflags
|
.rustdocflags
|
||||||
.entry(unit.pkg.package_id())
|
.entry(unit.pkg.package_id())
|
||||||
.or_insert(rustdocflags.to_vec());
|
.or_insert_with(|| rustdocflags.to_vec());
|
||||||
}
|
}
|
||||||
|
|
||||||
super::output_depinfo(&mut self, unit)?;
|
super::output_depinfo(&mut self, unit)?;
|
||||||
|
@ -216,7 +216,7 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> {
|
|||||||
|
|
||||||
self.queue.queue(*unit, job, queue_deps);
|
self.queue.queue(*unit, job, queue_deps);
|
||||||
*self.counts.entry(unit.pkg.package_id()).or_insert(0) += 1;
|
*self.counts.entry(unit.pkg.package_id()).or_insert(0) += 1;
|
||||||
return Ok(());
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Executes all jobs necessary to build the dependency graph.
|
/// Executes all jobs necessary to build the dependency graph.
|
||||||
|
@ -166,6 +166,6 @@ impl<'a> UnitInterner<'a> {
|
|||||||
}
|
}
|
||||||
me.cache.insert(Box::new(item.clone()));
|
me.cache.insert(Box::new(item.clone()));
|
||||||
let item = me.cache.get(item).unwrap();
|
let item = me.cache.get(item).unwrap();
|
||||||
return unsafe { &*(&**item as *const UnitInner<'a>) };
|
unsafe { &*(&**item as *const UnitInner<'a>) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -657,7 +657,7 @@ fn activate(
|
|||||||
|
|
||||||
let candidate = match registry.replacement_summary(candidate_pid) {
|
let candidate = match registry.replacement_summary(candidate_pid) {
|
||||||
Some(replace) => {
|
Some(replace) => {
|
||||||
if cx.flag_activated(&replace, &method)? && activated {
|
if cx.flag_activated(replace, &method)? && activated {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
trace!(
|
trace!(
|
||||||
|
@ -55,7 +55,7 @@ impl Resolve {
|
|||||||
.find(|d| d.kind() == Kind::Normal)
|
.find(|d| d.kind() == Kind::Normal)
|
||||||
.and_then(|d| {
|
.and_then(|d| {
|
||||||
if d.is_public() {
|
if d.is_public() {
|
||||||
Some(dep_package.clone())
|
Some(*dep_package)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@ -64,7 +64,7 @@ impl Resolve {
|
|||||||
})
|
})
|
||||||
.collect::<HashSet<PackageId>>();
|
.collect::<HashSet<PackageId>>();
|
||||||
|
|
||||||
(p.clone(), public_deps)
|
(*p, public_deps)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
#![cfg_attr(test, deny(warnings))]
|
#![cfg_attr(test, deny(warnings))]
|
||||||
#![warn(rust_2018_idioms)]
|
|
||||||
// While we're getting used to 2018:
|
// While we're getting used to 2018:
|
||||||
|
#![warn(rust_2018_idioms)]
|
||||||
// Clippy isn't enforced by CI (@alexcrichton isn't a fan).
|
// Clippy isn't enforced by CI (@alexcrichton isn't a fan).
|
||||||
#![allow(clippy::boxed_local)] // bug rust-lang-nursery/rust-clippy#1123
|
#![allow(clippy::blacklisted_name)] // frequently used in tests
|
||||||
#![allow(clippy::cyclomatic_complexity)] // large project
|
#![allow(clippy::cyclomatic_complexity)] // large project
|
||||||
#![allow(clippy::derive_hash_xor_eq)] // there's an intentional incoherence
|
#![allow(clippy::derive_hash_xor_eq)] // there's an intentional incoherence
|
||||||
#![allow(clippy::explicit_into_iter_loop)] // explicit loops are clearer
|
#![allow(clippy::explicit_into_iter_loop)] // explicit loops are clearer
|
||||||
@ -10,6 +10,7 @@
|
|||||||
#![allow(clippy::identity_op)] // used for vertical alignment
|
#![allow(clippy::identity_op)] // used for vertical alignment
|
||||||
#![allow(clippy::implicit_hasher)] // large project
|
#![allow(clippy::implicit_hasher)] // large project
|
||||||
#![allow(clippy::large_enum_variant)] // large project
|
#![allow(clippy::large_enum_variant)] // large project
|
||||||
|
#![allow(clippy::new_without_default)] // explicit is maybe clearer
|
||||||
#![allow(clippy::redundant_closure)] // closures can be less verbose
|
#![allow(clippy::redundant_closure)] // closures can be less verbose
|
||||||
#![allow(clippy::redundant_closure_call)] // closures over try catch blocks
|
#![allow(clippy::redundant_closure_call)] // closures over try catch blocks
|
||||||
#![allow(clippy::too_many_arguments)] // large project
|
#![allow(clippy::too_many_arguments)] // large project
|
||||||
@ -17,6 +18,12 @@
|
|||||||
#![allow(clippy::wrong_self_convention)] // perhaps `Rc` should be special-cased in Clippy?
|
#![allow(clippy::wrong_self_convention)] // perhaps `Rc` should be special-cased in Clippy?
|
||||||
#![warn(clippy::needless_borrow)]
|
#![warn(clippy::needless_borrow)]
|
||||||
#![warn(clippy::redundant_clone)]
|
#![warn(clippy::redundant_clone)]
|
||||||
|
// Unit is now interned, and would probably be better as pass-by-copy, but
|
||||||
|
// doing so causes a lot of & and * shenanigans that makes the code arguably
|
||||||
|
// less clear and harder to read.
|
||||||
|
#![allow(clippy::trivially_copy_pass_by_ref)]
|
||||||
|
// exhaustively destructuring ensures future fields are handled
|
||||||
|
#![allow(clippy::unneeded_field_pattern)]
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
@ -440,7 +440,7 @@ fn tar(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if pkg.include_lockfile() {
|
if pkg.include_lockfile() {
|
||||||
let new_lock = build_lock(&ws)?;
|
let new_lock = build_lock(ws)?;
|
||||||
|
|
||||||
config
|
config
|
||||||
.shell()
|
.shell()
|
||||||
@ -609,7 +609,7 @@ fn run_verify(ws: &Workspace<'_>, tar: &FileLock, opts: &PackageOpts<'_>) -> Car
|
|||||||
{
|
{
|
||||||
// FIXME: Turn this on at some point in the future
|
// FIXME: Turn this on at some point in the future
|
||||||
//Some(vec!["-D exported_private_dependencies".to_string()])
|
//Some(vec!["-D exported_private_dependencies".to_string()])
|
||||||
None
|
Some(vec![])
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
@ -211,7 +211,7 @@ impl InstallTracker {
|
|||||||
// `cargo install --path ...` is always rebuilt.
|
// `cargo install --path ...` is always rebuilt.
|
||||||
return Ok((Freshness::Dirty, duplicates));
|
return Ok((Freshness::Dirty, duplicates));
|
||||||
}
|
}
|
||||||
if matching_duplicates.iter().all(|dupe_pkg_id| {
|
let is_up_to_date = |dupe_pkg_id| {
|
||||||
let info = self
|
let info = self
|
||||||
.v2
|
.v2
|
||||||
.installs
|
.installs
|
||||||
@ -229,7 +229,8 @@ impl InstallTracker {
|
|||||||
&& dupe_pkg_id.source_id() == source_id
|
&& dupe_pkg_id.source_id() == source_id
|
||||||
&& precise_equal
|
&& precise_equal
|
||||||
&& info.is_up_to_date(opts, target, &exes)
|
&& info.is_up_to_date(opts, target, &exes)
|
||||||
}) {
|
};
|
||||||
|
if matching_duplicates.iter().all(is_up_to_date) {
|
||||||
Ok((Freshness::Fresh, duplicates))
|
Ok((Freshness::Fresh, duplicates))
|
||||||
} else {
|
} else {
|
||||||
Ok((Freshness::Dirty, duplicates))
|
Ok((Freshness::Dirty, duplicates))
|
||||||
|
@ -145,7 +145,7 @@ impl<'cfg> PathSource<'cfg> {
|
|||||||
.exclude()
|
.exclude()
|
||||||
.iter()
|
.iter()
|
||||||
.chain(pkg.manifest().include().iter())
|
.chain(pkg.manifest().include().iter())
|
||||||
.any(|p| p.starts_with("!"));
|
.any(|p| p.starts_with('!'));
|
||||||
// Don't warn about glob mismatch if it doesn't parse.
|
// Don't warn about glob mismatch if it doesn't parse.
|
||||||
let glob_is_valid = glob_exclude.is_ok() && glob_include.is_ok() && !has_negate;
|
let glob_is_valid = glob_exclude.is_ok() && glob_include.is_ok() && !has_negate;
|
||||||
let glob_exclude = glob_exclude.unwrap_or_else(|_| Vec::new());
|
let glob_exclude = glob_exclude.unwrap_or_else(|_| Vec::new());
|
||||||
@ -479,12 +479,9 @@ impl<'cfg> PathSource<'cfg> {
|
|||||||
if name.map(|s| s.starts_with('.')) == Some(true) {
|
if name.map(|s| s.starts_with('.')) == Some(true) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if is_root {
|
if is_root && name == Some("target") {
|
||||||
// Skip Cargo artifacts.
|
// Skip Cargo artifacts.
|
||||||
match name {
|
continue;
|
||||||
Some("target") => continue,
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
PathSource::walk(&path, ret, false, filter)?;
|
PathSource::walk(&path, ret, false, filter)?;
|
||||||
}
|
}
|
||||||
|
@ -268,7 +268,7 @@ impl<'cfg> RegistryIndex<'cfg> {
|
|||||||
where
|
where
|
||||||
'a: 'b,
|
'a: 'b,
|
||||||
{
|
{
|
||||||
let source_id = self.source_id.clone();
|
let source_id = self.source_id;
|
||||||
|
|
||||||
// First up actually parse what summaries we have available. If Cargo
|
// First up actually parse what summaries we have available. If Cargo
|
||||||
// has run previously this will parse a Cargo-specific cache file rather
|
// has run previously this will parse a Cargo-specific cache file rather
|
||||||
@ -337,7 +337,7 @@ impl<'cfg> RegistryIndex<'cfg> {
|
|||||||
for path in UncanonicalizedIter::new(&raw_path).take(1024) {
|
for path in UncanonicalizedIter::new(&raw_path).take(1024) {
|
||||||
let summaries = Summaries::parse(
|
let summaries = Summaries::parse(
|
||||||
index_version.as_ref().map(|s| &**s),
|
index_version.as_ref().map(|s| &**s),
|
||||||
&root,
|
root,
|
||||||
&cache_root,
|
&cache_root,
|
||||||
path.as_ref(),
|
path.as_ref(),
|
||||||
self.source_id,
|
self.source_id,
|
||||||
@ -671,7 +671,7 @@ impl<'a> SummariesCache<'a> {
|
|||||||
contents.extend_from_slice(data);
|
contents.extend_from_slice(data);
|
||||||
contents.push(0);
|
contents.push(0);
|
||||||
}
|
}
|
||||||
return contents;
|
contents
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -704,7 +704,7 @@ impl Config {
|
|||||||
fn resolve_registry_index(&self, index: Value<String>) -> CargoResult<Url> {
|
fn resolve_registry_index(&self, index: Value<String>) -> CargoResult<Url> {
|
||||||
let base = index
|
let base = index
|
||||||
.definition
|
.definition
|
||||||
.root(&self)
|
.root(self)
|
||||||
.join("truncated-by-url_with_base");
|
.join("truncated-by-url_with_base");
|
||||||
// Parse val to check it is a URL, not a relative path without a protocol.
|
// Parse val to check it is a URL, not a relative path without a protocol.
|
||||||
let _parsed = index.val.to_url()?;
|
let _parsed = index.val.to_url()?;
|
||||||
@ -857,7 +857,7 @@ impl Config {
|
|||||||
`acquire_package_cache_lock` before we got to this stack frame",
|
`acquire_package_cache_lock` before we got to this stack frame",
|
||||||
);
|
);
|
||||||
assert!(ret.starts_with(self.home_path.as_path_unlocked()));
|
assert!(ret.starts_with(self.home_path.as_path_unlocked()));
|
||||||
return ret;
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Acquires an exclusive lock on the global "package cache"
|
/// Acquires an exclusive lock on the global "package cache"
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use filetime::FileTime;
|
use filetime::FileTime;
|
||||||
use std::fs::{self, File, OpenOptions};
|
use std::fs::{self, File, OpenOptions};
|
||||||
|
use std::io;
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
use std::net::TcpListener;
|
use std::net::TcpListener;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
@ -1269,10 +1270,11 @@ fn fingerprint_cleaner(mut dir: PathBuf, timestamp: filetime::FileTime) {
|
|||||||
for fing in fs::read_dir(&dir).unwrap() {
|
for fing in fs::read_dir(&dir).unwrap() {
|
||||||
let fing = fing.unwrap();
|
let fing = fing.unwrap();
|
||||||
|
|
||||||
if fs::read_dir(fing.path()).unwrap().all(|f| {
|
let outdated = |f: io::Result<fs::DirEntry>| {
|
||||||
filetime::FileTime::from_last_modification_time(&f.unwrap().metadata().unwrap())
|
filetime::FileTime::from_last_modification_time(&f.unwrap().metadata().unwrap())
|
||||||
<= timestamp
|
<= timestamp
|
||||||
}) {
|
};
|
||||||
|
if fs::read_dir(fing.path()).unwrap().all(outdated) {
|
||||||
fs::remove_dir_all(fing.path()).unwrap();
|
fs::remove_dir_all(fing.path()).unwrap();
|
||||||
println!("remove: {:?}", fing.path());
|
println!("remove: {:?}", fing.path());
|
||||||
// a real cleaner would remove the big files in deps and build as well
|
// a real cleaner would remove the big files in deps and build as well
|
||||||
|
@ -884,8 +884,14 @@ impl Execs {
|
|||||||
panic!("`.stream()` is for local debugging")
|
panic!("`.stream()` is for local debugging")
|
||||||
}
|
}
|
||||||
process.exec_with_streaming(
|
process.exec_with_streaming(
|
||||||
&mut |out| Ok(println!("{}", out)),
|
&mut |out| {
|
||||||
&mut |err| Ok(eprintln!("{}", err)),
|
println!("{}", out);
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
&mut |err| {
|
||||||
|
eprintln!("{}", err);
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
true,
|
true,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
@ -1166,7 +1172,7 @@ impl Execs {
|
|||||||
let e = out.lines();
|
let e = out.lines();
|
||||||
|
|
||||||
let mut diffs = self.diff_lines(a.clone(), e.clone(), true);
|
let mut diffs = self.diff_lines(a.clone(), e.clone(), true);
|
||||||
while let Some(..) = a.next() {
|
while a.next().is_some() {
|
||||||
let a = self.diff_lines(a.clone(), e.clone(), true);
|
let a = self.diff_lines(a.clone(), e.clone(), true);
|
||||||
if a.len() < diffs.len() {
|
if a.len() < diffs.len() {
|
||||||
diffs = a;
|
diffs = a;
|
||||||
@ -1214,7 +1220,7 @@ impl Execs {
|
|||||||
let e = out.lines();
|
let e = out.lines();
|
||||||
|
|
||||||
let mut diffs = self.diff_lines(a.clone(), e.clone(), true);
|
let mut diffs = self.diff_lines(a.clone(), e.clone(), true);
|
||||||
while let Some(..) = a.next() {
|
while a.next().is_some() {
|
||||||
let a = self.diff_lines(a.clone(), e.clone(), true);
|
let a = self.diff_lines(a.clone(), e.clone(), true);
|
||||||
if a.len() < diffs.len() {
|
if a.len() < diffs.len() {
|
||||||
diffs = a;
|
diffs = a;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user