mirror of
https://github.com/rust-lang/cargo.git
synced 2025-09-25 11:14:46 +00:00
refactor: clean up clippy::perf
lint warnings (#15631)
### What does this PR try to resolve? The `clippy::perf` lint group is fairly useful for catching bad practices that might hurt performance marginally. This PR fixes most of them except `clippy::large_enum_variant`, which doesn't feel right at this moment and need more researches. And that is why I didn't enable the lint group. ### How to test and review this PR?
This commit is contained in:
commit
a7be79bca8
@ -66,7 +66,7 @@ pub fn global_root() -> PathBuf {
|
||||
// [*] It does set the thread name, but only when running concurrently. If not
|
||||
// running concurrently, all tests are run on the main thread.
|
||||
thread_local! {
|
||||
static TEST_ID: RefCell<Option<usize>> = RefCell::new(None);
|
||||
static TEST_ID: RefCell<Option<usize>> = const { RefCell::new(None) };
|
||||
}
|
||||
|
||||
/// See [`init_root`]
|
||||
|
@ -322,7 +322,7 @@ fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option<Feature
|
||||
.iter()
|
||||
.any(|value| value.starts_with("dep:") || value.contains("?/"))
|
||||
{
|
||||
let new_values = values.drain(..).collect();
|
||||
let new_values = std::mem::take(values);
|
||||
features2.insert(feat.clone(), new_values);
|
||||
}
|
||||
}
|
||||
|
@ -646,15 +646,15 @@ pub struct HttpServerHandle {
|
||||
|
||||
impl HttpServerHandle {
|
||||
pub fn index_url(&self) -> Url {
|
||||
Url::parse(&format!("sparse+http://{}/index/", self.addr.to_string())).unwrap()
|
||||
Url::parse(&format!("sparse+http://{}/index/", self.addr)).unwrap()
|
||||
}
|
||||
|
||||
pub fn api_url(&self) -> Url {
|
||||
Url::parse(&format!("http://{}/", self.addr.to_string())).unwrap()
|
||||
Url::parse(&format!("http://{}/", self.addr)).unwrap()
|
||||
}
|
||||
|
||||
pub fn dl_url(&self) -> Url {
|
||||
Url::parse(&format!("http://{}/dl", self.addr.to_string())).unwrap()
|
||||
Url::parse(&format!("http://{}/dl", self.addr)).unwrap()
|
||||
}
|
||||
|
||||
fn stop(&self) {
|
||||
@ -892,7 +892,7 @@ impl HttpServer {
|
||||
|
||||
// - The URL matches the registry base URL
|
||||
if footer.url != "https://github.com/rust-lang/crates.io-index"
|
||||
&& footer.url != &format!("sparse+http://{}/index/", self.addr.to_string())
|
||||
&& footer.url != &format!("sparse+http://{}/index/", self.addr)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -427,6 +427,7 @@ impl<'e> ManRenderer<'e> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::collapsible_str_replace)]
|
||||
fn escape(s: &str) -> Result<String, Error> {
|
||||
// Note: Possible source on output escape sequences: https://man7.org/linux/man-pages/man7/groff_char.7.html.
|
||||
// Otherwise, use generic escaping in the form `\[u1EE7]` or `\[u1F994]`.
|
||||
|
@ -205,9 +205,7 @@ proptest! {
|
||||
// If resolution was successful, then unpublishing a version of a crate
|
||||
// that was not selected should not change that.
|
||||
let not_selected: Vec<_> = input
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter(|x| !r.contains(&x.package_id()))
|
||||
.iter().filter(|&x| !r.contains(&x.package_id())).cloned()
|
||||
.collect();
|
||||
|
||||
if !not_selected.is_empty() {
|
||||
@ -215,9 +213,7 @@ proptest! {
|
||||
|
||||
let new_reg = registry(
|
||||
input
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter(|x| !indexes_to_unpublish.contains(&x))
|
||||
.iter().filter(|&x| !indexes_to_unpublish.contains(&x)).cloned()
|
||||
.collect(),
|
||||
);
|
||||
|
||||
@ -248,9 +244,7 @@ proptest! {
|
||||
|
||||
let new_reg = registry(
|
||||
input
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter(|x| !indexes_to_unpublish.contains(&x))
|
||||
.iter().filter(|&x| !indexes_to_unpublish.contains(&x)).cloned()
|
||||
.collect(),
|
||||
);
|
||||
|
||||
|
@ -741,8 +741,8 @@ fn incomplete_information_skipping() {
|
||||
let new_reg = registry(
|
||||
input
|
||||
.iter()
|
||||
.filter(|&x| package_to_yank != x.package_id())
|
||||
.cloned()
|
||||
.filter(|x| package_to_yank != x.package_id())
|
||||
.collect(),
|
||||
);
|
||||
assert_eq!(input.len(), new_reg.len() + 1);
|
||||
@ -810,8 +810,8 @@ fn incomplete_information_skipping_2() {
|
||||
let new_reg = registry(
|
||||
input
|
||||
.iter()
|
||||
.filter(|&x| package_to_yank != x.package_id())
|
||||
.cloned()
|
||||
.filter(|x| package_to_yank != x.package_id())
|
||||
.collect(),
|
||||
);
|
||||
assert_eq!(input.len(), new_reg.len() + 1);
|
||||
@ -860,8 +860,8 @@ fn incomplete_information_skipping_3() {
|
||||
let new_reg = registry(
|
||||
input
|
||||
.iter()
|
||||
.filter(|&x| package_to_yank != x.package_id())
|
||||
.cloned()
|
||||
.filter(|x| package_to_yank != x.package_id())
|
||||
.collect(),
|
||||
);
|
||||
assert_eq!(input.len(), new_reg.len() + 1);
|
||||
|
@ -975,7 +975,7 @@ impl GitFeatures {
|
||||
}
|
||||
|
||||
fn expecting() -> String {
|
||||
let fields = vec!["`shallow-index`", "`shallow-deps`"];
|
||||
let fields = ["`shallow-index`", "`shallow-deps`"];
|
||||
format!(
|
||||
"unstable 'git' only takes {} as valid inputs",
|
||||
fields.join(" and ")
|
||||
@ -1087,7 +1087,7 @@ impl GitoxideFeatures {
|
||||
}
|
||||
|
||||
fn expecting() -> String {
|
||||
let fields = vec!["`fetch`", "`checkout`", "`internal-use-git2`"];
|
||||
let fields = ["`fetch`", "`checkout`", "`internal-use-git2`"];
|
||||
format!(
|
||||
"unstable 'gitoxide' only takes {} as valid inputs, for shallow fetches see `-Zgit=shallow-index,shallow-deps`",
|
||||
fields.join(" and ")
|
||||
|
@ -151,7 +151,7 @@ impl ManifestMetadata {
|
||||
/// Whether the given env var should be tracked by Cargo's dep-info.
|
||||
pub fn should_track(env_key: &str) -> bool {
|
||||
let keys = MetadataEnvs::keys();
|
||||
keys.iter().any(|k| *k == env_key)
|
||||
keys.contains(&env_key)
|
||||
}
|
||||
|
||||
pub fn env_var<'a>(&'a self, env_key: &str) -> Option<Cow<'a, str>> {
|
||||
@ -1016,21 +1016,21 @@ impl Target {
|
||||
|
||||
pub fn is_dylib(&self) -> bool {
|
||||
match self.kind() {
|
||||
TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Dylib),
|
||||
TargetKind::Lib(libs) => libs.contains(&CrateType::Dylib),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_cdylib(&self) -> bool {
|
||||
match self.kind() {
|
||||
TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Cdylib),
|
||||
TargetKind::Lib(libs) => libs.contains(&CrateType::Cdylib),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_staticlib(&self) -> bool {
|
||||
match self.kind() {
|
||||
TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Staticlib),
|
||||
TargetKind::Lib(libs) => libs.contains(&CrateType::Staticlib),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -1182,7 +1182,7 @@ mod tls {
|
||||
|
||||
use super::Downloads;
|
||||
|
||||
thread_local!(static PTR: Cell<usize> = Cell::new(0));
|
||||
thread_local!(static PTR: Cell<usize> = const { Cell::new(0) });
|
||||
|
||||
pub(crate) fn with<R>(f: impl FnOnce(Option<&Downloads<'_, '_>>) -> R) -> R {
|
||||
let ptr = PTR.with(|p| p.get());
|
||||
|
@ -698,7 +698,7 @@ impl ser::Serialize for Resolve {
|
||||
None => "<none>",
|
||||
};
|
||||
let id = encodable_package_id(id, &state, self.version());
|
||||
metadata.insert(format!("checksum {}", id.to_string()), checksum.to_string());
|
||||
metadata.insert(format!("checksum {}", id), checksum.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -813,7 +813,7 @@ fn latest_compatible<'s>(
|
||||
.unwrap_or(true)
|
||||
})
|
||||
.map(|(s, _)| s)
|
||||
.last()
|
||||
.next_back()
|
||||
.copied()
|
||||
}
|
||||
|
||||
|
@ -202,10 +202,8 @@ impl<'gctx> InstallablePackage<'gctx> {
|
||||
// If we're installing in --locked mode and there's no `Cargo.lock` published
|
||||
// ie. the bin was published before https://github.com/rust-lang/cargo/pull/7026
|
||||
} else if !ws.root().join("Cargo.lock").exists() {
|
||||
gctx.shell().warn(format!(
|
||||
"no Cargo.lock file published in {}",
|
||||
pkg.to_string()
|
||||
))?;
|
||||
gctx.shell()
|
||||
.warn(format!("no Cargo.lock file published in {}", pkg))?;
|
||||
}
|
||||
}
|
||||
let pkg = if source_id.is_git() {
|
||||
|
@ -206,8 +206,7 @@ fn run_doc_tests(
|
||||
p.arg("--test");
|
||||
|
||||
add_path_args(ws, unit, &mut p);
|
||||
p.arg("--test-run-directory")
|
||||
.arg(unit.pkg.root().to_path_buf());
|
||||
p.arg("--test-run-directory").arg(unit.pkg.root());
|
||||
|
||||
if let CompileKind::Target(target) = unit.kind {
|
||||
// use `rustc_target()` to properly handle JSON target paths
|
||||
|
@ -136,22 +136,19 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes
|
||||
// Mirror `--workspace` and never avoid workspace members.
|
||||
// Filtering them out here so the above processes them normally
|
||||
// so their dependencies can be updated as requested
|
||||
to_avoid = to_avoid
|
||||
.into_iter()
|
||||
.filter(|id| {
|
||||
for package in ws.members() {
|
||||
let member_id = package.package_id();
|
||||
// Skip checking the `version` because `previous_resolve` might have a stale
|
||||
// value.
|
||||
// When dealing with workspace members, the other fields should be a
|
||||
// sufficiently unique match.
|
||||
if id.name() == member_id.name() && id.source_id() == member_id.source_id() {
|
||||
return false;
|
||||
}
|
||||
to_avoid.retain(|id| {
|
||||
for package in ws.members() {
|
||||
let member_id = package.package_id();
|
||||
// Skip checking the `version` because `previous_resolve` might have a stale
|
||||
// value.
|
||||
// When dealing with workspace members, the other fields should be a
|
||||
// sufficiently unique match.
|
||||
if id.name() == member_id.name() && id.source_id() == member_id.source_id() {
|
||||
return false;
|
||||
}
|
||||
true
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
true
|
||||
});
|
||||
|
||||
registry.add_sources(sources)?;
|
||||
}
|
||||
|
@ -546,7 +546,7 @@ fn check_resolver_change<'gctx>(
|
||||
}
|
||||
// Only trigger if updating the root package from 2018.
|
||||
let pkgs = opts.compile_opts.spec.get_packages(ws)?;
|
||||
if !pkgs.iter().any(|&pkg| pkg == root_pkg) {
|
||||
if !pkgs.contains(&root_pkg) {
|
||||
// The root is not being migrated.
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -88,10 +88,7 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
|
||||
let mut pkgs = ws.members_with_features(&specs, &opts.cli_features)?;
|
||||
// In `members_with_features_old`, it will add "current" package (determined by the cwd)
|
||||
// So we need filter
|
||||
pkgs = pkgs
|
||||
.into_iter()
|
||||
.filter(|(m, _)| specs.iter().any(|spec| spec.matches(m.package_id())))
|
||||
.collect();
|
||||
pkgs.retain(|(m, _)| specs.iter().any(|spec| spec.matches(m.package_id())));
|
||||
|
||||
let (unpublishable, pkgs): (Vec<_>, Vec<_>) = pkgs
|
||||
.into_iter()
|
||||
|
@ -1126,7 +1126,7 @@ fn fetch_with_gitoxide(
|
||||
let config_overrides = cargo_config_to_gitoxide_overrides(gctx)?;
|
||||
let repo_reinitialized = AtomicBool::default();
|
||||
let res = oxide::with_retry_and_progress(
|
||||
&git2_repo.path().to_owned(),
|
||||
git2_repo.path(),
|
||||
gctx,
|
||||
&|repo_path,
|
||||
should_interrupt,
|
||||
|
@ -72,13 +72,7 @@ pub(super) fn download(
|
||||
&& !url.contains(CHECKSUM_TEMPLATE)
|
||||
{
|
||||
// Original format before customizing the download URL was supported.
|
||||
write!(
|
||||
url,
|
||||
"/{}/{}/download",
|
||||
pkg.name(),
|
||||
pkg.version().to_string()
|
||||
)
|
||||
.unwrap();
|
||||
write!(url, "/{}/{}/download", pkg.name(), pkg.version()).unwrap();
|
||||
} else {
|
||||
let prefix = make_dep_path(&pkg.name(), true);
|
||||
url = url
|
||||
|
@ -869,7 +869,7 @@ mod tls {
|
||||
use super::Downloads;
|
||||
use std::cell::Cell;
|
||||
|
||||
thread_local!(static PTR: Cell<usize> = Cell::new(0));
|
||||
thread_local!(static PTR: Cell<usize> = const { Cell::new(0) });
|
||||
|
||||
pub(super) fn with<R>(f: impl FnOnce(Option<&Downloads<'_>>) -> R) -> R {
|
||||
let ptr = PTR.with(|p| p.get());
|
||||
|
@ -152,9 +152,9 @@ fn verify_feature_enabled(
|
||||
&["workspace", "lints", "cargo", lint_name],
|
||||
false,
|
||||
)
|
||||
.expect(&format!(
|
||||
"could not find `cargo::{lint_name}` in `[lints]`, or `[workspace.lints]` "
|
||||
));
|
||||
.unwrap_or_else(|| {
|
||||
panic!("could not find `cargo::{lint_name}` in `[lints]`, or `[workspace.lints]` ")
|
||||
});
|
||||
|
||||
let inherited_note = if let (Some(inherit_span_key), Some(inherit_span_value)) = (
|
||||
get_span(manifest.document(), &["lints", "workspace"], false),
|
||||
@ -556,9 +556,9 @@ fn output_unknown_lints(
|
||||
&["workspace", "lints", "cargo", lint_name],
|
||||
false,
|
||||
)
|
||||
.expect(&format!(
|
||||
"could not find `cargo::{lint_name}` in `[lints]`, or `[workspace.lints]` "
|
||||
));
|
||||
.unwrap_or_else(|| {
|
||||
panic!("could not find `cargo::{lint_name}` in `[lints]`, or `[workspace.lints]` ")
|
||||
});
|
||||
|
||||
let inherited_note = if let (Some(inherit_span_key), Some(inherit_span_value)) = (
|
||||
get_span(manifest.document(), &["lints", "workspace"], false),
|
||||
|
@ -258,9 +258,7 @@ impl Cache {
|
||||
extra_fingerprint: u64,
|
||||
) -> CargoResult<(String, String)> {
|
||||
let key = process_fingerprint(cmd, extra_fingerprint);
|
||||
if self.data.outputs.contains_key(&key) {
|
||||
debug!("rustc info cache hit");
|
||||
} else {
|
||||
if let std::collections::hash_map::Entry::Vacant(e) = self.data.outputs.entry(key) {
|
||||
debug!("rustc info cache miss");
|
||||
debug!("running {}", cmd);
|
||||
let output = cmd.output()?;
|
||||
@ -270,21 +268,20 @@ impl Cache {
|
||||
let stderr = String::from_utf8(output.stderr)
|
||||
.map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes()))
|
||||
.with_context(|| format!("`{}` didn't return utf8 output", cmd))?;
|
||||
self.data.outputs.insert(
|
||||
key,
|
||||
Output {
|
||||
success: output.status.success(),
|
||||
status: if output.status.success() {
|
||||
String::new()
|
||||
} else {
|
||||
cargo_util::exit_status_to_string(output.status)
|
||||
},
|
||||
code: output.status.code(),
|
||||
stdout,
|
||||
stderr,
|
||||
e.insert(Output {
|
||||
success: output.status.success(),
|
||||
status: if output.status.success() {
|
||||
String::new()
|
||||
} else {
|
||||
cargo_util::exit_status_to_string(output.status)
|
||||
},
|
||||
);
|
||||
code: output.status.code(),
|
||||
stdout,
|
||||
stderr,
|
||||
});
|
||||
self.dirty = true;
|
||||
} else {
|
||||
debug!("rustc info cache hit");
|
||||
}
|
||||
let output = &self.data.outputs[&key];
|
||||
if output.success {
|
||||
|
@ -1482,7 +1482,7 @@ pub fn to_real_manifest(
|
||||
warnings.push(format!(
|
||||
"file `{}` found to be present in multiple \
|
||||
build targets:\n{}",
|
||||
target_path.display().to_string(),
|
||||
target_path.display(),
|
||||
conflicts
|
||||
.iter()
|
||||
.map(|t| format!(" * `{}` target `{}`", t.kind().description(), t.name(),))
|
||||
@ -1986,7 +1986,7 @@ fn validate_dependencies(
|
||||
None => "dependencies",
|
||||
};
|
||||
let table_in_toml = if let Some(platform) = platform {
|
||||
format!("target.{}.{kind_name}", platform.to_string())
|
||||
format!("target.{platform}.{kind_name}")
|
||||
} else {
|
||||
kind_name.to_string()
|
||||
};
|
||||
|
@ -1269,7 +1269,7 @@ mod tests {
|
||||
.unwrap();
|
||||
let dep = dep.set_source(WorkspaceSource::new());
|
||||
local
|
||||
.insert_into_table(&vec![], &dep, &gctx, &crate_root, &Features::default())
|
||||
.insert_into_table(&[], &dep, &gctx, &crate_root, &Features::default())
|
||||
.unwrap();
|
||||
assert_eq!(local.data.to_string(), "dep.workspace = true\n");
|
||||
}
|
||||
|
@ -1475,7 +1475,7 @@ fn dep_with_changed_submodule() {
|
||||
});
|
||||
|
||||
let repo = git2::Repository::open(&git_project.root()).unwrap();
|
||||
let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), Path::new("src"));
|
||||
let mut sub = git::add_submodule(&repo, git_project2.url().as_ref(), Path::new("src"));
|
||||
git::commit(&repo);
|
||||
|
||||
let p = project
|
||||
@ -1537,7 +1537,7 @@ project2
|
||||
.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")
|
||||
.unwrap();
|
||||
subrepo
|
||||
.remote_set_url("origin", &git_project3.url().to_string())
|
||||
.remote_set_url("origin", git_project3.url().as_ref())
|
||||
.unwrap();
|
||||
let mut origin = subrepo.find_remote("origin").unwrap();
|
||||
origin.fetch(&Vec::<String>::new(), None, None).unwrap();
|
||||
|
@ -466,7 +466,7 @@ fn inherit_own_detailed_dependencies() {
|
||||
.build();
|
||||
|
||||
Package::new("dep", "0.1.2")
|
||||
.feature("testing", &vec![])
|
||||
.feature("testing", &[])
|
||||
.publish();
|
||||
|
||||
p.cargo("check")
|
||||
|
@ -766,7 +766,7 @@ fn multiple_crates_git_all() {
|
||||
)
|
||||
.build();
|
||||
|
||||
cargo_process(&format!("install --git {} bin1 bin2", p.url().to_string())).run();
|
||||
cargo_process(&format!("install --git {} bin1 bin2", p.url())).run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
@ -2148,7 +2148,7 @@ fn git_install_reads_workspace_manifest() {
|
||||
)
|
||||
.build();
|
||||
|
||||
cargo_process(&format!("install --git {}", p.url().to_string()))
|
||||
cargo_process(&format!("install --git {}", p.url()))
|
||||
.with_status(101)
|
||||
.with_stderr_data(str![[r#"
|
||||
[UPDATING] git repository `[ROOTURL]/foo`
|
||||
|
@ -1457,7 +1457,7 @@ fn dirty_file_outside_pkg_root_inside_submodule() {
|
||||
});
|
||||
git::add_submodule(
|
||||
&repo,
|
||||
&submodule.root().to_url().to_string(),
|
||||
submodule.root().to_url().as_ref(),
|
||||
Path::new("submodule"),
|
||||
);
|
||||
p.symlink("submodule/file.txt", "isengard/src/file.txt");
|
||||
@ -3595,7 +3595,7 @@ fn larger_filesizes() {
|
||||
description = "foo"
|
||||
documentation = "https://example.com/"
|
||||
"#;
|
||||
let lots_of_crabs = std::iter::repeat("🦀").take(1337).collect::<String>();
|
||||
let lots_of_crabs = "🦀".repeat(1337);
|
||||
let main_rs_contents = format!(r#"fn main() {{ println!("{}"); }}"#, lots_of_crabs);
|
||||
let bar_txt_contents = "This file is relatively uncompressible, to increase the compressed
|
||||
package size beyond 1KiB.
|
||||
@ -3711,7 +3711,7 @@ fn symlink_filesizes() {
|
||||
description = "foo"
|
||||
homepage = "https://example.com/"
|
||||
"#;
|
||||
let lots_of_crabs = std::iter::repeat("🦀").take(1337).collect::<String>();
|
||||
let lots_of_crabs = "🦀".repeat(1337);
|
||||
let main_rs_contents = format!(r#"fn main() {{ println!("{}"); }}"#, lots_of_crabs);
|
||||
let bar_txt_contents = "This file is relatively uncompressible, to increase the compressed
|
||||
package size beyond 1KiB.
|
||||
|
@ -1391,7 +1391,7 @@ fn update_precise_git_revisions() {
|
||||
|
||||
// Now make a tag looks like an oid.
|
||||
// It requires a git fetch, as the oid cannot be found in preexisting git db.
|
||||
let arbitrary_tag: String = std::iter::repeat('a').take(head_id.len()).collect();
|
||||
let arbitrary_tag: String = "a".repeat(head_id.len());
|
||||
git::tag(&git_repo, &arbitrary_tag);
|
||||
|
||||
p.cargo("update git --precise")
|
||||
|
Loading…
x
Reference in New Issue
Block a user