refactor: clean up clippy::perf lint warnings

The `clippy::perf` lint group is fairly useful for catching bad
practices that might hurt performance marginally.

This PR fixes most of them except `clippy::large_enum_variant`,
which doesn't feel right at this moment and need more researches.

Anyway, overall this PR should be good.
This commit is contained in:
Weihang Lo 2025-06-04 11:30:42 -04:00
parent 762fbf9bcb
commit c3409d5425
No known key found for this signature in database
GPG Key ID: D7DBF189825E82E7
28 changed files with 73 additions and 96 deletions

View File

@ -66,7 +66,7 @@ pub fn global_root() -> PathBuf {
// [*] It does set the thread name, but only when running concurrently. If not // [*] It does set the thread name, but only when running concurrently. If not
// running concurrently, all tests are run on the main thread. // running concurrently, all tests are run on the main thread.
thread_local! { thread_local! {
static TEST_ID: RefCell<Option<usize>> = RefCell::new(None); static TEST_ID: RefCell<Option<usize>> = const { RefCell::new(None) };
} }
/// See [`init_root`] /// See [`init_root`]

View File

@ -322,7 +322,7 @@ fn split_index_features(mut features: FeatureMap) -> (FeatureMap, Option<Feature
.iter() .iter()
.any(|value| value.starts_with("dep:") || value.contains("?/")) .any(|value| value.starts_with("dep:") || value.contains("?/"))
{ {
let new_values = values.drain(..).collect(); let new_values = std::mem::take(values);
features2.insert(feat.clone(), new_values); features2.insert(feat.clone(), new_values);
} }
} }

View File

@ -646,15 +646,15 @@ pub struct HttpServerHandle {
impl HttpServerHandle { impl HttpServerHandle {
pub fn index_url(&self) -> Url { pub fn index_url(&self) -> Url {
Url::parse(&format!("sparse+http://{}/index/", self.addr.to_string())).unwrap() Url::parse(&format!("sparse+http://{}/index/", self.addr)).unwrap()
} }
pub fn api_url(&self) -> Url { pub fn api_url(&self) -> Url {
Url::parse(&format!("http://{}/", self.addr.to_string())).unwrap() Url::parse(&format!("http://{}/", self.addr)).unwrap()
} }
pub fn dl_url(&self) -> Url { pub fn dl_url(&self) -> Url {
Url::parse(&format!("http://{}/dl", self.addr.to_string())).unwrap() Url::parse(&format!("http://{}/dl", self.addr)).unwrap()
} }
fn stop(&self) { fn stop(&self) {
@ -892,7 +892,7 @@ impl HttpServer {
// - The URL matches the registry base URL // - The URL matches the registry base URL
if footer.url != "https://github.com/rust-lang/crates.io-index" if footer.url != "https://github.com/rust-lang/crates.io-index"
&& footer.url != &format!("sparse+http://{}/index/", self.addr.to_string()) && footer.url != &format!("sparse+http://{}/index/", self.addr)
{ {
return false; return false;
} }

View File

@ -427,6 +427,7 @@ impl<'e> ManRenderer<'e> {
} }
} }
#[allow(clippy::collapsible_str_replace)]
fn escape(s: &str) -> Result<String, Error> { fn escape(s: &str) -> Result<String, Error> {
// Note: Possible source on output escape sequences: https://man7.org/linux/man-pages/man7/groff_char.7.html. // Note: Possible source on output escape sequences: https://man7.org/linux/man-pages/man7/groff_char.7.html.
// Otherwise, use generic escaping in the form `\[u1EE7]` or `\[u1F994]`. // Otherwise, use generic escaping in the form `\[u1EE7]` or `\[u1F994]`.

View File

@ -205,9 +205,7 @@ proptest! {
// If resolution was successful, then unpublishing a version of a crate // If resolution was successful, then unpublishing a version of a crate
// that was not selected should not change that. // that was not selected should not change that.
let not_selected: Vec<_> = input let not_selected: Vec<_> = input
.iter() .iter().filter(|&x| !r.contains(&x.package_id())).cloned()
.cloned()
.filter(|x| !r.contains(&x.package_id()))
.collect(); .collect();
if !not_selected.is_empty() { if !not_selected.is_empty() {
@ -215,9 +213,7 @@ proptest! {
let new_reg = registry( let new_reg = registry(
input input
.iter() .iter().filter(|&x| !indexes_to_unpublish.contains(&x)).cloned()
.cloned()
.filter(|x| !indexes_to_unpublish.contains(&x))
.collect(), .collect(),
); );
@ -248,9 +244,7 @@ proptest! {
let new_reg = registry( let new_reg = registry(
input input
.iter() .iter().filter(|&x| !indexes_to_unpublish.contains(&x)).cloned()
.cloned()
.filter(|x| !indexes_to_unpublish.contains(&x))
.collect(), .collect(),
); );

View File

@ -741,8 +741,8 @@ fn incomplete_information_skipping() {
let new_reg = registry( let new_reg = registry(
input input
.iter() .iter()
.filter(|&x| package_to_yank != x.package_id())
.cloned() .cloned()
.filter(|x| package_to_yank != x.package_id())
.collect(), .collect(),
); );
assert_eq!(input.len(), new_reg.len() + 1); assert_eq!(input.len(), new_reg.len() + 1);
@ -810,8 +810,8 @@ fn incomplete_information_skipping_2() {
let new_reg = registry( let new_reg = registry(
input input
.iter() .iter()
.filter(|&x| package_to_yank != x.package_id())
.cloned() .cloned()
.filter(|x| package_to_yank != x.package_id())
.collect(), .collect(),
); );
assert_eq!(input.len(), new_reg.len() + 1); assert_eq!(input.len(), new_reg.len() + 1);
@ -860,8 +860,8 @@ fn incomplete_information_skipping_3() {
let new_reg = registry( let new_reg = registry(
input input
.iter() .iter()
.filter(|&x| package_to_yank != x.package_id())
.cloned() .cloned()
.filter(|x| package_to_yank != x.package_id())
.collect(), .collect(),
); );
assert_eq!(input.len(), new_reg.len() + 1); assert_eq!(input.len(), new_reg.len() + 1);

View File

@ -975,7 +975,7 @@ impl GitFeatures {
} }
fn expecting() -> String { fn expecting() -> String {
let fields = vec!["`shallow-index`", "`shallow-deps`"]; let fields = ["`shallow-index`", "`shallow-deps`"];
format!( format!(
"unstable 'git' only takes {} as valid inputs", "unstable 'git' only takes {} as valid inputs",
fields.join(" and ") fields.join(" and ")
@ -1087,7 +1087,7 @@ impl GitoxideFeatures {
} }
fn expecting() -> String { fn expecting() -> String {
let fields = vec!["`fetch`", "`checkout`", "`internal-use-git2`"]; let fields = ["`fetch`", "`checkout`", "`internal-use-git2`"];
format!( format!(
"unstable 'gitoxide' only takes {} as valid inputs, for shallow fetches see `-Zgit=shallow-index,shallow-deps`", "unstable 'gitoxide' only takes {} as valid inputs, for shallow fetches see `-Zgit=shallow-index,shallow-deps`",
fields.join(" and ") fields.join(" and ")

View File

@ -151,7 +151,7 @@ impl ManifestMetadata {
/// Whether the given env var should be tracked by Cargo's dep-info. /// Whether the given env var should be tracked by Cargo's dep-info.
pub fn should_track(env_key: &str) -> bool { pub fn should_track(env_key: &str) -> bool {
let keys = MetadataEnvs::keys(); let keys = MetadataEnvs::keys();
keys.iter().any(|k| *k == env_key) keys.contains(&env_key)
} }
pub fn env_var<'a>(&'a self, env_key: &str) -> Option<Cow<'a, str>> { pub fn env_var<'a>(&'a self, env_key: &str) -> Option<Cow<'a, str>> {
@ -1016,21 +1016,21 @@ impl Target {
pub fn is_dylib(&self) -> bool { pub fn is_dylib(&self) -> bool {
match self.kind() { match self.kind() {
TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Dylib), TargetKind::Lib(libs) => libs.contains(&CrateType::Dylib),
_ => false, _ => false,
} }
} }
pub fn is_cdylib(&self) -> bool { pub fn is_cdylib(&self) -> bool {
match self.kind() { match self.kind() {
TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Cdylib), TargetKind::Lib(libs) => libs.contains(&CrateType::Cdylib),
_ => false, _ => false,
} }
} }
pub fn is_staticlib(&self) -> bool { pub fn is_staticlib(&self) -> bool {
match self.kind() { match self.kind() {
TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Staticlib), TargetKind::Lib(libs) => libs.contains(&CrateType::Staticlib),
_ => false, _ => false,
} }
} }

View File

@ -1182,7 +1182,7 @@ mod tls {
use super::Downloads; use super::Downloads;
thread_local!(static PTR: Cell<usize> = Cell::new(0)); thread_local!(static PTR: Cell<usize> = const { Cell::new(0) });
pub(crate) fn with<R>(f: impl FnOnce(Option<&Downloads<'_, '_>>) -> R) -> R { pub(crate) fn with<R>(f: impl FnOnce(Option<&Downloads<'_, '_>>) -> R) -> R {
let ptr = PTR.with(|p| p.get()); let ptr = PTR.with(|p| p.get());

View File

@ -698,7 +698,7 @@ impl ser::Serialize for Resolve {
None => "<none>", None => "<none>",
}; };
let id = encodable_package_id(id, &state, self.version()); let id = encodable_package_id(id, &state, self.version());
metadata.insert(format!("checksum {}", id.to_string()), checksum.to_string()); metadata.insert(format!("checksum {}", id), checksum.to_string());
} }
} }

View File

@ -813,7 +813,7 @@ fn latest_compatible<'s>(
.unwrap_or(true) .unwrap_or(true)
}) })
.map(|(s, _)| s) .map(|(s, _)| s)
.last() .next_back()
.copied() .copied()
} }

View File

@ -202,10 +202,8 @@ impl<'gctx> InstallablePackage<'gctx> {
// If we're installing in --locked mode and there's no `Cargo.lock` published // If we're installing in --locked mode and there's no `Cargo.lock` published
// ie. the bin was published before https://github.com/rust-lang/cargo/pull/7026 // ie. the bin was published before https://github.com/rust-lang/cargo/pull/7026
} else if !ws.root().join("Cargo.lock").exists() { } else if !ws.root().join("Cargo.lock").exists() {
gctx.shell().warn(format!( gctx.shell()
"no Cargo.lock file published in {}", .warn(format!("no Cargo.lock file published in {}", pkg))?;
pkg.to_string()
))?;
} }
} }
let pkg = if source_id.is_git() { let pkg = if source_id.is_git() {

View File

@ -206,8 +206,7 @@ fn run_doc_tests(
p.arg("--test"); p.arg("--test");
add_path_args(ws, unit, &mut p); add_path_args(ws, unit, &mut p);
p.arg("--test-run-directory") p.arg("--test-run-directory").arg(unit.pkg.root());
.arg(unit.pkg.root().to_path_buf());
if let CompileKind::Target(target) = unit.kind { if let CompileKind::Target(target) = unit.kind {
// use `rustc_target()` to properly handle JSON target paths // use `rustc_target()` to properly handle JSON target paths

View File

@ -136,22 +136,19 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes
// Mirror `--workspace` and never avoid workspace members. // Mirror `--workspace` and never avoid workspace members.
// Filtering them out here so the above processes them normally // Filtering them out here so the above processes them normally
// so their dependencies can be updated as requested // so their dependencies can be updated as requested
to_avoid = to_avoid to_avoid.retain(|id| {
.into_iter() for package in ws.members() {
.filter(|id| { let member_id = package.package_id();
for package in ws.members() { // Skip checking the `version` because `previous_resolve` might have a stale
let member_id = package.package_id(); // value.
// Skip checking the `version` because `previous_resolve` might have a stale // When dealing with workspace members, the other fields should be a
// value. // sufficiently unique match.
// When dealing with workspace members, the other fields should be a if id.name() == member_id.name() && id.source_id() == member_id.source_id() {
// sufficiently unique match. return false;
if id.name() == member_id.name() && id.source_id() == member_id.source_id() {
return false;
}
} }
true }
}) true
.collect(); });
registry.add_sources(sources)?; registry.add_sources(sources)?;
} }

View File

@ -546,7 +546,7 @@ fn check_resolver_change<'gctx>(
} }
// Only trigger if updating the root package from 2018. // Only trigger if updating the root package from 2018.
let pkgs = opts.compile_opts.spec.get_packages(ws)?; let pkgs = opts.compile_opts.spec.get_packages(ws)?;
if !pkgs.iter().any(|&pkg| pkg == root_pkg) { if !pkgs.contains(&root_pkg) {
// The root is not being migrated. // The root is not being migrated.
return Ok(()); return Ok(());
} }

View File

@ -88,10 +88,7 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
let mut pkgs = ws.members_with_features(&specs, &opts.cli_features)?; let mut pkgs = ws.members_with_features(&specs, &opts.cli_features)?;
// In `members_with_features_old`, it will add "current" package (determined by the cwd) // In `members_with_features_old`, it will add "current" package (determined by the cwd)
// So we need filter // So we need filter
pkgs = pkgs pkgs.retain(|(m, _)| specs.iter().any(|spec| spec.matches(m.package_id())));
.into_iter()
.filter(|(m, _)| specs.iter().any(|spec| spec.matches(m.package_id())))
.collect();
let (unpublishable, pkgs): (Vec<_>, Vec<_>) = pkgs let (unpublishable, pkgs): (Vec<_>, Vec<_>) = pkgs
.into_iter() .into_iter()

View File

@ -1126,7 +1126,7 @@ fn fetch_with_gitoxide(
let config_overrides = cargo_config_to_gitoxide_overrides(gctx)?; let config_overrides = cargo_config_to_gitoxide_overrides(gctx)?;
let repo_reinitialized = AtomicBool::default(); let repo_reinitialized = AtomicBool::default();
let res = oxide::with_retry_and_progress( let res = oxide::with_retry_and_progress(
&git2_repo.path().to_owned(), git2_repo.path(),
gctx, gctx,
&|repo_path, &|repo_path,
should_interrupt, should_interrupt,

View File

@ -72,13 +72,7 @@ pub(super) fn download(
&& !url.contains(CHECKSUM_TEMPLATE) && !url.contains(CHECKSUM_TEMPLATE)
{ {
// Original format before customizing the download URL was supported. // Original format before customizing the download URL was supported.
write!( write!(url, "/{}/{}/download", pkg.name(), pkg.version()).unwrap();
url,
"/{}/{}/download",
pkg.name(),
pkg.version().to_string()
)
.unwrap();
} else { } else {
let prefix = make_dep_path(&pkg.name(), true); let prefix = make_dep_path(&pkg.name(), true);
url = url url = url

View File

@ -869,7 +869,7 @@ mod tls {
use super::Downloads; use super::Downloads;
use std::cell::Cell; use std::cell::Cell;
thread_local!(static PTR: Cell<usize> = Cell::new(0)); thread_local!(static PTR: Cell<usize> = const { Cell::new(0) });
pub(super) fn with<R>(f: impl FnOnce(Option<&Downloads<'_>>) -> R) -> R { pub(super) fn with<R>(f: impl FnOnce(Option<&Downloads<'_>>) -> R) -> R {
let ptr = PTR.with(|p| p.get()); let ptr = PTR.with(|p| p.get());

View File

@ -152,9 +152,9 @@ fn verify_feature_enabled(
&["workspace", "lints", "cargo", lint_name], &["workspace", "lints", "cargo", lint_name],
false, false,
) )
.expect(&format!( .unwrap_or_else(|| {
"could not find `cargo::{lint_name}` in `[lints]`, or `[workspace.lints]` " panic!("could not find `cargo::{lint_name}` in `[lints]`, or `[workspace.lints]` ")
)); });
let inherited_note = if let (Some(inherit_span_key), Some(inherit_span_value)) = ( let inherited_note = if let (Some(inherit_span_key), Some(inherit_span_value)) = (
get_span(manifest.document(), &["lints", "workspace"], false), get_span(manifest.document(), &["lints", "workspace"], false),
@ -556,9 +556,9 @@ fn output_unknown_lints(
&["workspace", "lints", "cargo", lint_name], &["workspace", "lints", "cargo", lint_name],
false, false,
) )
.expect(&format!( .unwrap_or_else(|| {
"could not find `cargo::{lint_name}` in `[lints]`, or `[workspace.lints]` " panic!("could not find `cargo::{lint_name}` in `[lints]`, or `[workspace.lints]` ")
)); });
let inherited_note = if let (Some(inherit_span_key), Some(inherit_span_value)) = ( let inherited_note = if let (Some(inherit_span_key), Some(inherit_span_value)) = (
get_span(manifest.document(), &["lints", "workspace"], false), get_span(manifest.document(), &["lints", "workspace"], false),

View File

@ -258,9 +258,7 @@ impl Cache {
extra_fingerprint: u64, extra_fingerprint: u64,
) -> CargoResult<(String, String)> { ) -> CargoResult<(String, String)> {
let key = process_fingerprint(cmd, extra_fingerprint); let key = process_fingerprint(cmd, extra_fingerprint);
if self.data.outputs.contains_key(&key) { if let std::collections::hash_map::Entry::Vacant(e) = self.data.outputs.entry(key) {
debug!("rustc info cache hit");
} else {
debug!("rustc info cache miss"); debug!("rustc info cache miss");
debug!("running {}", cmd); debug!("running {}", cmd);
let output = cmd.output()?; let output = cmd.output()?;
@ -270,21 +268,20 @@ impl Cache {
let stderr = String::from_utf8(output.stderr) let stderr = String::from_utf8(output.stderr)
.map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes())) .map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes()))
.with_context(|| format!("`{}` didn't return utf8 output", cmd))?; .with_context(|| format!("`{}` didn't return utf8 output", cmd))?;
self.data.outputs.insert( e.insert(Output {
key, success: output.status.success(),
Output { status: if output.status.success() {
success: output.status.success(), String::new()
status: if output.status.success() { } else {
String::new() cargo_util::exit_status_to_string(output.status)
} else {
cargo_util::exit_status_to_string(output.status)
},
code: output.status.code(),
stdout,
stderr,
}, },
); code: output.status.code(),
stdout,
stderr,
});
self.dirty = true; self.dirty = true;
} else {
debug!("rustc info cache hit");
} }
let output = &self.data.outputs[&key]; let output = &self.data.outputs[&key];
if output.success { if output.success {

View File

@ -1482,7 +1482,7 @@ pub fn to_real_manifest(
warnings.push(format!( warnings.push(format!(
"file `{}` found to be present in multiple \ "file `{}` found to be present in multiple \
build targets:\n{}", build targets:\n{}",
target_path.display().to_string(), target_path.display(),
conflicts conflicts
.iter() .iter()
.map(|t| format!(" * `{}` target `{}`", t.kind().description(), t.name(),)) .map(|t| format!(" * `{}` target `{}`", t.kind().description(), t.name(),))
@ -1986,7 +1986,7 @@ fn validate_dependencies(
None => "dependencies", None => "dependencies",
}; };
let table_in_toml = if let Some(platform) = platform { let table_in_toml = if let Some(platform) = platform {
format!("target.{}.{kind_name}", platform.to_string()) format!("target.{platform}.{kind_name}")
} else { } else {
kind_name.to_string() kind_name.to_string()
}; };

View File

@ -1269,7 +1269,7 @@ mod tests {
.unwrap(); .unwrap();
let dep = dep.set_source(WorkspaceSource::new()); let dep = dep.set_source(WorkspaceSource::new());
local local
.insert_into_table(&vec![], &dep, &gctx, &crate_root, &Features::default()) .insert_into_table(&[], &dep, &gctx, &crate_root, &Features::default())
.unwrap(); .unwrap();
assert_eq!(local.data.to_string(), "dep.workspace = true\n"); assert_eq!(local.data.to_string(), "dep.workspace = true\n");
} }

View File

@ -1475,7 +1475,7 @@ fn dep_with_changed_submodule() {
}); });
let repo = git2::Repository::open(&git_project.root()).unwrap(); let repo = git2::Repository::open(&git_project.root()).unwrap();
let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), Path::new("src")); let mut sub = git::add_submodule(&repo, git_project2.url().as_ref(), Path::new("src"));
git::commit(&repo); git::commit(&repo);
let p = project let p = project
@ -1537,7 +1537,7 @@ project2
.remote_add_fetch("origin", "refs/heads/*:refs/heads/*") .remote_add_fetch("origin", "refs/heads/*:refs/heads/*")
.unwrap(); .unwrap();
subrepo subrepo
.remote_set_url("origin", &git_project3.url().to_string()) .remote_set_url("origin", git_project3.url().as_ref())
.unwrap(); .unwrap();
let mut origin = subrepo.find_remote("origin").unwrap(); let mut origin = subrepo.find_remote("origin").unwrap();
origin.fetch(&Vec::<String>::new(), None, None).unwrap(); origin.fetch(&Vec::<String>::new(), None, None).unwrap();

View File

@ -466,7 +466,7 @@ fn inherit_own_detailed_dependencies() {
.build(); .build();
Package::new("dep", "0.1.2") Package::new("dep", "0.1.2")
.feature("testing", &vec![]) .feature("testing", &[])
.publish(); .publish();
p.cargo("check") p.cargo("check")

View File

@ -766,7 +766,7 @@ fn multiple_crates_git_all() {
) )
.build(); .build();
cargo_process(&format!("install --git {} bin1 bin2", p.url().to_string())).run(); cargo_process(&format!("install --git {} bin1 bin2", p.url())).run();
} }
#[cargo_test] #[cargo_test]
@ -2148,7 +2148,7 @@ fn git_install_reads_workspace_manifest() {
) )
.build(); .build();
cargo_process(&format!("install --git {}", p.url().to_string())) cargo_process(&format!("install --git {}", p.url()))
.with_status(101) .with_status(101)
.with_stderr_data(str![[r#" .with_stderr_data(str![[r#"
[UPDATING] git repository `[ROOTURL]/foo` [UPDATING] git repository `[ROOTURL]/foo`

View File

@ -1457,7 +1457,7 @@ fn dirty_file_outside_pkg_root_inside_submodule() {
}); });
git::add_submodule( git::add_submodule(
&repo, &repo,
&submodule.root().to_url().to_string(), submodule.root().to_url().as_ref(),
Path::new("submodule"), Path::new("submodule"),
); );
p.symlink("submodule/file.txt", "isengard/src/file.txt"); p.symlink("submodule/file.txt", "isengard/src/file.txt");
@ -3595,7 +3595,7 @@ fn larger_filesizes() {
description = "foo" description = "foo"
documentation = "https://example.com/" documentation = "https://example.com/"
"#; "#;
let lots_of_crabs = std::iter::repeat("🦀").take(1337).collect::<String>(); let lots_of_crabs = "🦀".repeat(1337);
let main_rs_contents = format!(r#"fn main() {{ println!("{}"); }}"#, lots_of_crabs); let main_rs_contents = format!(r#"fn main() {{ println!("{}"); }}"#, lots_of_crabs);
let bar_txt_contents = "This file is relatively uncompressible, to increase the compressed let bar_txt_contents = "This file is relatively uncompressible, to increase the compressed
package size beyond 1KiB. package size beyond 1KiB.
@ -3711,7 +3711,7 @@ fn symlink_filesizes() {
description = "foo" description = "foo"
homepage = "https://example.com/" homepage = "https://example.com/"
"#; "#;
let lots_of_crabs = std::iter::repeat("🦀").take(1337).collect::<String>(); let lots_of_crabs = "🦀".repeat(1337);
let main_rs_contents = format!(r#"fn main() {{ println!("{}"); }}"#, lots_of_crabs); let main_rs_contents = format!(r#"fn main() {{ println!("{}"); }}"#, lots_of_crabs);
let bar_txt_contents = "This file is relatively uncompressible, to increase the compressed let bar_txt_contents = "This file is relatively uncompressible, to increase the compressed
package size beyond 1KiB. package size beyond 1KiB.

View File

@ -1391,7 +1391,7 @@ fn update_precise_git_revisions() {
// Now make a tag looks like an oid. // Now make a tag looks like an oid.
// It requires a git fetch, as the oid cannot be found in preexisting git db. // It requires a git fetch, as the oid cannot be found in preexisting git db.
let arbitrary_tag: String = std::iter::repeat('a').take(head_id.len()).collect(); let arbitrary_tag: String = "a".repeat(head_id.len());
git::tag(&git_repo, &arbitrary_tag); git::tag(&git_repo, &arbitrary_tag);
p.cargo("update git --precise") p.cargo("update git --precise")