docs: Surround identifiers in backticks

This was mostly done by clippy via `clippy::doc_markdown`.
I then reviewed it to fix words that shouldn't have it or where `--fix`
put the backtick in the wrong location.
This commit is contained in:
Ed Page 2024-11-14 19:42:33 -06:00
parent 0b281678c3
commit 878caf7447
59 changed files with 128 additions and 128 deletions

View File

@ -1,7 +1,7 @@
//! Utility for capturing a global cache last-use database based on the files //! Utility for capturing a global cache last-use database based on the files
//! on a real-world system. //! on a real-world system.
//! //!
//! This will look in the CARGO_HOME of the current system and record last-use //! This will look in the `CARGO_HOME` of the current system and record last-use
//! data for all files in the cache. This is intended to provide a real-world //! data for all files in the cache. This is intended to provide a real-world
//! example for a benchmark that should be close to what a real set of data //! example for a benchmark that should be close to what a real set of data
//! should look like. //! should look like.

View File

@ -621,7 +621,7 @@ macro_rules! impl_from_tuple_for_inmemorydir {
}; };
} }
/// Extend `impl_from_tuple_for_inmemorydir`` to generate for the specified tuple and all smaller /// Extend `impl_from_tuple_for_inmemorydir` to generate for the specified tuple and all smaller
/// tuples /// tuples
macro_rules! impl_from_tuples_for_inmemorydir { macro_rules! impl_from_tuples_for_inmemorydir {
($var1:ident $path1:ident $data1:ident, $($var:ident $path:ident $data:ident),+) => { ($var1:ident $path1:ident $data1:ident, $($var:ident $path:ident $data:ident),+) => {

View File

@ -5,7 +5,7 @@
//! with the running container. //! with the running container.
//! //!
//! Tests using containers must use `#[cargo_test(container_test)]` to disable //! Tests using containers must use `#[cargo_test(container_test)]` to disable
//! them unless the CARGO_CONTAINER_TESTS environment variable is set. //! them unless the `CARGO_CONTAINER_TESTS` environment variable is set.
use cargo_util::ProcessBuilder; use cargo_util::ProcessBuilder;
use std::collections::HashMap; use std::collections::HashMap;
@ -36,7 +36,7 @@ pub struct ContainerHandle {
/// This can only be used on Linux. macOS and Windows docker doesn't allow /// This can only be used on Linux. macOS and Windows docker doesn't allow
/// direct connection to the container. /// direct connection to the container.
pub ip_address: String, pub ip_address: String,
/// Port mappings of container_port to host_port for ports exposed via EXPOSE. /// Port mappings of `container_port` to `host_port` for ports exposed via EXPOSE.
pub port_mappings: HashMap<u16, u16>, pub port_mappings: HashMap<u16, u16>,
} }

View File

@ -3,7 +3,7 @@
//! Note that cross-testing is very limited. You need to install the //! Note that cross-testing is very limited. You need to install the
//! "alternate" target to the host (32-bit for 64-bit hosts or vice-versa). //! "alternate" target to the host (32-bit for 64-bit hosts or vice-versa).
//! //!
//! Set CFG_DISABLE_CROSS_TESTS=1 environment variable to disable these tests //! Set `CFG_DISABLE_CROSS_TESTS=1` environment variable to disable these tests
//! if you are unable to use the alternate target. Unfortunately 32-bit //! if you are unable to use the alternate target. Unfortunately 32-bit
//! support on macOS is going away, so macOS users are out of luck. //! support on macOS is going away, so macOS users are out of luck.
//! //!

View File

@ -890,7 +890,7 @@ impl Execs {
/// ///
/// Prefer [`Execs::with_stdout_data`] where possible. /// Prefer [`Execs::with_stdout_data`] where possible.
/// - `with` cannot be snapshotted /// - `with` cannot be snapshotted
/// - The absence of `without`` can either mean success or that the string being looked for /// - The absence of `without` can either mean success or that the string being looked for
/// changed. /// changed.
/// ///
/// </div> /// </div>

View File

@ -17,7 +17,7 @@ static CARGO_INTEGRATION_TEST_DIR: &str = "cit";
static GLOBAL_ROOT: OnceLock<Mutex<Option<PathBuf>>> = OnceLock::new(); static GLOBAL_ROOT: OnceLock<Mutex<Option<PathBuf>>> = OnceLock::new();
/// This is used when running cargo is pre-CARGO_TARGET_TMPDIR /// This is used when running cargo is pre-CARGO_TARGET_TMPDIR
/// TODO: Remove when CARGO_TARGET_TMPDIR grows old enough. /// TODO: Remove when `CARGO_TARGET_TMPDIR` grows old enough.
fn global_root_legacy() -> PathBuf { fn global_root_legacy() -> PathBuf {
let mut path = t!(env::current_exe()); let mut path = t!(env::current_exe());
path.pop(); // chop off exe name path.pop(); // chop off exe name

View File

@ -166,7 +166,7 @@ fn read_new_post(new_path: &Path) -> (Vec<u8>, Vec<u8>) {
/// ///
/// - `expected_crate_name` should be something like `foo-0.0.1.crate`. /// - `expected_crate_name` should be something like `foo-0.0.1.crate`.
/// - `expected_files` should be a complete list of files in the crate /// - `expected_files` should be a complete list of files in the crate
/// (relative to expected_crate_name). /// (relative to `expected_crate_name`).
/// - `expected_contents` should be a list of `(file_name, contents)` tuples /// - `expected_contents` should be a list of `(file_name, contents)` tuples
/// to validate the contents of the given file. Only the listed files will /// to validate the contents of the given file. Only the listed files will
/// be checked (others will be ignored). /// be checked (others will be ignored).

View File

@ -1680,7 +1680,7 @@ impl<'de> de::Deserialize<'de> for InvalidCargoFeatures {
} }
} }
/// A StringOrVec can be parsed from either a TOML string or array, /// This can be parsed from either a TOML string or array,
/// but is always stored as a vector. /// but is always stored as a vector.
#[derive(Clone, Debug, Serialize, Eq, PartialEq, PartialOrd, Ord)] #[derive(Clone, Debug, Serialize, Eq, PartialEq, PartialOrd, Ord)]
#[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))] #[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))]

View File

@ -189,7 +189,7 @@ pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()>
/// Writes a file to disk atomically. /// Writes a file to disk atomically.
/// ///
/// write_atomic uses tempfile::persist to accomplish atomic writes. /// This uses `tempfile::persist` to accomplish atomic writes.
pub fn write_atomic<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { pub fn write_atomic<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {
let path = path.as_ref(); let path = path.as_ref();

View File

@ -10,11 +10,11 @@ use std::{
/// in-process unit testing by rustup. /// in-process unit testing by rustup.
pub trait Env { pub trait Env {
/// Return the path to the users home dir, or None if any error occurs: /// Return the path to the users home dir, or None if any error occurs:
/// see home_inner. /// see `home_inner`.
fn home_dir(&self) -> Option<PathBuf>; fn home_dir(&self) -> Option<PathBuf>;
/// Return the current working directory. /// Return the current working directory.
fn current_dir(&self) -> io::Result<PathBuf>; fn current_dir(&self) -> io::Result<PathBuf>;
/// Get an environment variable, as per std::env::var_os. /// Get an environment variable, as per `std::env::var_os`.
fn var_os(&self, key: &str) -> Option<OsString>; fn var_os(&self, key: &str) -> Option<OsString>;
} }
@ -47,7 +47,7 @@ pub fn home_dir_with_env(env: &dyn Env) -> Option<PathBuf> {
env.home_dir() env.home_dir()
} }
/// Variant of cargo_home where the environment source is parameterized. This is /// Variant of `cargo_home` where the environment source is parameterized. This is
/// specifically to support in-process testing scenarios as environment /// specifically to support in-process testing scenarios as environment
/// variables and user home metadata are normally process global state. See the /// variables and user home metadata are normally process global state. See the
/// [`Env`] trait. /// [`Env`] trait.
@ -56,10 +56,10 @@ pub fn cargo_home_with_env(env: &dyn Env) -> io::Result<PathBuf> {
cargo_home_with_cwd_env(env, &cwd) cargo_home_with_cwd_env(env, &cwd)
} }
/// Variant of cargo_home_with_cwd where the environment source is /// Variant of `cargo_home_with_cwd` where the environment source is
/// parameterized. This is specifically to support in-process testing scenarios /// parameterized. This is specifically to support in-process testing scenarios
/// as environment variables and user home metadata are normally process global /// as environment variables and user home metadata are normally process global
/// state. See the OsEnv trait. /// state. See the `OsEnv` trait.
pub fn cargo_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf> { pub fn cargo_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf> {
match env.var_os("CARGO_HOME").filter(|h| !h.is_empty()) { match env.var_os("CARGO_HOME").filter(|h| !h.is_empty()) {
Some(home) => { Some(home) => {
@ -76,19 +76,19 @@ pub fn cargo_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf>
} }
} }
/// Variant of cargo_home_with_cwd where the environment source is /// Variant of `cargo_home_with_cwd` where the environment source is
/// parameterized. This is specifically to support in-process testing scenarios /// parameterized. This is specifically to support in-process testing scenarios
/// as environment variables and user home metadata are normally process global /// as environment variables and user home metadata are normally process global
/// state. See the OsEnv trait. /// state. See the `OsEnv` trait.
pub fn rustup_home_with_env(env: &dyn Env) -> io::Result<PathBuf> { pub fn rustup_home_with_env(env: &dyn Env) -> io::Result<PathBuf> {
let cwd = env.current_dir()?; let cwd = env.current_dir()?;
rustup_home_with_cwd_env(env, &cwd) rustup_home_with_cwd_env(env, &cwd)
} }
/// Variant of cargo_home_with_cwd where the environment source is /// Variant of `cargo_home_with_cwd` where the environment source is
/// parameterized. This is specifically to support in-process testing scenarios /// parameterized. This is specifically to support in-process testing scenarios
/// as environment variables and user home metadata are normally process global /// as environment variables and user home metadata are normally process global
/// state. See the OsEnv trait. /// state. See the `OsEnv` trait.
pub fn rustup_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf> { pub fn rustup_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf> {
match env.var_os("RUSTUP_HOME").filter(|h| !h.is_empty()) { match env.var_os("RUSTUP_HOME").filter(|h| !h.is_empty()) {
Some(home) => { Some(home) => {

View File

@ -24,7 +24,7 @@ pub fn parse_name_and_section(text: &str) -> Result<(&str, u8), Error> {
Ok((name, section)) Ok((name, section))
} }
/// Extracts the text from a header after Tag::Heading has been received. /// Extracts the text from a header after `Tag::Heading` has been received.
pub fn header_text<'e>(parser: &mut EventIter<'e>) -> Result<CowStr<'e>, Error> { pub fn header_text<'e>(parser: &mut EventIter<'e>) -> Result<CowStr<'e>, Error> {
let text = match parser.next() { let text = match parser.next() {
Some((Event::Text(t), _range)) => t, Some((Event::Text(t), _range)) => t,

View File

@ -36,7 +36,7 @@ pub struct DiagnosticSpan {
/// Is this a "primary" span -- meaning the point, or one of the points, /// Is this a "primary" span -- meaning the point, or one of the points,
/// where the error occurred? /// where the error occurred?
pub is_primary: bool, pub is_primary: bool,
/// Source text from the start of line_start to the end of line_end. /// Source text from the start of `line_start` to the end of `line_end`.
pub text: Vec<DiagnosticSpanLine>, pub text: Vec<DiagnosticSpanLine>,
/// Label that should be placed at this location (if any) /// Label that should be placed at this location (if any)
label: Option<String>, label: Option<String>,

View File

@ -121,10 +121,10 @@ impl<'a, 'gctx> BuildContext<'a, 'gctx> {
/// Gets the host architecture triple. /// Gets the host architecture triple.
/// ///
/// For example, x86_64-unknown-linux-gnu, would be /// For example, `x86_64-unknown-linux-gnu`, would be
/// - machine: x86_64, /// - machine: `x86_64`,
/// - hardware-platform: unknown, /// - hardware-platform: `unknown`,
/// - operating system: linux-gnu. /// - operating system: `linux-gnu`.
pub fn host_triple(&self) -> InternedString { pub fn host_triple(&self) -> InternedString {
self.target_data.rustc.host self.target_data.rustc.host
} }

View File

@ -93,7 +93,7 @@ pub struct FileType {
} }
impl FileType { impl FileType {
/// The filename for this FileType crated by rustc. /// The filename for this `FileType` crated by rustc.
pub fn output_filename(&self, target: &Target, metadata: Option<&str>) -> String { pub fn output_filename(&self, target: &Target, metadata: Option<&str>) -> String {
match metadata { match metadata {
Some(metadata) => format!( Some(metadata) => format!(
@ -107,7 +107,7 @@ impl FileType {
} }
} }
/// The filename for this FileType that Cargo should use when "uplifting" /// The filename for this `FileType` that Cargo should use when "uplifting"
/// it to the destination directory. /// it to the destination directory.
pub fn uplift_filename(&self, target: &Target) -> String { pub fn uplift_filename(&self, target: &Target) -> String {
let name = match target.binary_filename() { let name = match target.binary_filename() {
@ -513,10 +513,10 @@ impl TargetInfo {
parse_crate_type(crate_type, &process, output, error, &mut output.lines()) parse_crate_type(crate_type, &process, output, error, &mut output.lines())
} }
/// Returns all the file types generated by rustc for the given mode/target_kind. /// Returns all the file types generated by rustc for the given `mode`/`target_kind`.
/// ///
/// The first value is a Vec of file types generated, the second value is /// The first value is a Vec of file types generated, the second value is
/// a list of CrateTypes that are not supported by the given target. /// a list of `CrateTypes` that are not supported by the given target.
pub fn rustc_outputs( pub fn rustc_outputs(
&self, &self,
mode: CompileMode, mode: CompileMode,

View File

@ -1,8 +1,8 @@
//! A graph-like structure used to represent the rustc commands to build the package and the //! A graph-like structure used to represent the rustc commands to build the package and the
//! interdependencies between them. //! interdependencies between them.
//! //!
//! The BuildPlan structure is used to store the dependency graph of a dry run so that it can be //! The `BuildPlan` structure is used to store the dependency graph of a dry run so that it can be
//! shared with an external build system. Each Invocation in the BuildPlan comprises a single //! shared with an external build system. Each Invocation in the `BuildPlan` comprises a single
//! subprocess and defines the build environment, the outputs produced by the subprocess, and the //! subprocess and defines the build environment, the outputs produced by the subprocess, and the
//! dependencies on other Invocations. //! dependencies on other Invocations.

View File

@ -328,7 +328,7 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> {
self.layout(unit.kind).build().join(dir) self.layout(unit.kind).build().join(dir)
} }
/// Returns the "OUT_DIR" directory for running a build script. /// Returns the "`OUT_DIR`" directory for running a build script.
/// `/path/to/target/{debug,release}/build/PKG-HASH/out` /// `/path/to/target/{debug,release}/build/PKG-HASH/out`
pub fn build_script_out_dir(&self, unit: &Unit) -> PathBuf { pub fn build_script_out_dir(&self, unit: &Unit) -> PathBuf {
self.build_script_run_dir(unit).join("out") self.build_script_run_dir(unit).join("out")
@ -375,7 +375,7 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> {
.map(Arc::clone) .map(Arc::clone)
} }
/// Returns the path where the output for the given unit and FileType /// Returns the path where the output for the given unit and `FileType`
/// should be uplifted to. /// should be uplifted to.
/// ///
/// Returns `None` if the unit shouldn't be uplifted (for example, a /// Returns `None` if the unit shouldn't be uplifted (for example, a

View File

@ -80,7 +80,7 @@ pub struct BuildRunner<'a, 'gctx> {
pub lto: HashMap<Unit, Lto>, pub lto: HashMap<Unit, Lto>,
/// Map of Doc/Docscrape units to metadata for their -Cmetadata flag. /// Map of Doc/Docscrape units to metadata for their -Cmetadata flag.
/// See Context::find_metadata_units for more details. /// See `Context::find_metadata_units` for more details.
pub metadata_for_doc_units: HashMap<Unit, Metadata>, pub metadata_for_doc_units: HashMap<Unit, Metadata>,
/// Set of metadata of Docscrape units that fail before completion, e.g. /// Set of metadata of Docscrape units that fail before completion, e.g.
@ -415,7 +415,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
&self.bcx.unit_graph[unit] &self.bcx.unit_graph[unit]
} }
/// Returns the RunCustomBuild Unit associated with the given Unit. /// Returns the `RunCustomBuild` Unit associated with the given Unit.
/// ///
/// If the package does not have a build script, this returns None. /// If the package does not have a build script, this returns None.
pub fn find_build_script_unit(&self, unit: &Unit) -> Option<Unit> { pub fn find_build_script_unit(&self, unit: &Unit) -> Option<Unit> {
@ -431,7 +431,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
.map(|unit_dep| unit_dep.unit.clone()) .map(|unit_dep| unit_dep.unit.clone())
} }
/// Returns the metadata hash for the RunCustomBuild Unit associated with /// Returns the metadata hash for the `RunCustomBuild` Unit associated with
/// the given unit. /// the given unit.
/// ///
/// If the package does not have a build script, this returns None. /// If the package does not have a build script, this returns None.
@ -440,7 +440,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
Some(self.get_run_build_script_metadata(&script_unit)) Some(self.get_run_build_script_metadata(&script_unit))
} }
/// Returns the metadata hash for a RunCustomBuild unit. /// Returns the metadata hash for a `RunCustomBuild` unit.
pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata { pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata {
assert!(unit.mode.is_run_custom_build()); assert!(unit.mode.is_run_custom_build());
self.files().metadata(unit) self.files().metadata(unit)

View File

@ -81,7 +81,7 @@ pub struct Compilation<'gctx> {
/// All directories for the output of native build commands. /// All directories for the output of native build commands.
/// ///
/// This is currently used to drive some entries which are added to the /// This is currently used to drive some entries which are added to the
/// LD_LIBRARY_PATH as appropriate. /// `LD_LIBRARY_PATH` as appropriate.
/// ///
/// The order should be deterministic. /// The order should be deterministic.
pub native_dirs: BTreeSet<PathBuf>, pub native_dirs: BTreeSet<PathBuf>,
@ -113,10 +113,10 @@ pub struct Compilation<'gctx> {
/// Rustc process to be used by default /// Rustc process to be used by default
rustc_process: ProcessBuilder, rustc_process: ProcessBuilder,
/// Rustc process to be used for workspace crates instead of rustc_process /// Rustc process to be used for workspace crates instead of `rustc_process`
rustc_workspace_wrapper_process: ProcessBuilder, rustc_workspace_wrapper_process: ProcessBuilder,
/// Optional rustc process to be used for primary crates instead of either rustc_process or /// Optional rustc process to be used for primary crates instead of either `rustc_process` or
/// rustc_workspace_wrapper_process /// `rustc_workspace_wrapper_process`
primary_rustc_process: Option<ProcessBuilder>, primary_rustc_process: Option<ProcessBuilder>,
target_runners: HashMap<CompileKind, Option<(PathBuf, Vec<String>)>>, target_runners: HashMap<CompileKind, Option<(PathBuf, Vec<String>)>>,
@ -403,7 +403,7 @@ impl<'gctx> Compilation<'gctx> {
} }
} }
/// Prepares a rustc_tool process with additional environment variables /// Prepares a `rustc_tool` process with additional environment variables
/// that are only relevant in a context that has a unit /// that are only relevant in a context that has a unit
fn fill_rustc_tool_env(mut cmd: ProcessBuilder, unit: &Unit) -> ProcessBuilder { fn fill_rustc_tool_env(mut cmd: ProcessBuilder, unit: &Unit) -> ProcessBuilder {
if unit.target.is_executable() { if unit.target.is_executable() {

View File

@ -111,7 +111,7 @@ pub struct BuildOutput {
/// inserted during `build_map`. The rest of the entries are added /// inserted during `build_map`. The rest of the entries are added
/// immediately after each build script runs. /// immediately after each build script runs.
/// ///
/// The `Metadata` is the unique metadata hash for the RunCustomBuild Unit of /// The `Metadata` is the unique metadata hash for the `RunCustomBuild` Unit of
/// the package. It needs a unique key, since the build script can be run /// the package. It needs a unique key, since the build script can be run
/// multiple times with different profiles or features. We can't embed a /// multiple times with different profiles or features. We can't embed a
/// `Unit` because this structure needs to be shareable between threads. /// `Unit` because this structure needs to be shareable between threads.
@ -150,7 +150,7 @@ pub struct BuildScripts {
/// This is the set of transitive dependencies that are host-only /// This is the set of transitive dependencies that are host-only
/// (proc-macro, plugin, build-dependency) that contain a build script. /// (proc-macro, plugin, build-dependency) that contain a build script.
/// Any `BuildOutput::library_paths` path relative to `target` will be /// Any `BuildOutput::library_paths` path relative to `target` will be
/// added to LD_LIBRARY_PATH so that the compiler can find any dynamic /// added to `LD_LIBRARY_PATH` so that the compiler can find any dynamic
/// libraries a build script may have generated. /// libraries a build script may have generated.
pub plugins: BTreeSet<(PackageId, Metadata)>, pub plugins: BTreeSet<(PackageId, Metadata)>,
} }

View File

@ -69,13 +69,13 @@
//! `cargo rustc` extra args | ✓ | ✓ //! `cargo rustc` extra args | ✓ | ✓
//! [`CompileMode`] | ✓ | ✓ //! [`CompileMode`] | ✓ | ✓
//! Target Name | ✓ | ✓ //! Target Name | ✓ | ✓
//! TargetKind (bin/lib/etc.) | ✓ | ✓ //! `TargetKind` (bin/lib/etc.) | ✓ | ✓
//! Enabled Features | ✓ | ✓ //! Enabled Features | ✓ | ✓
//! Declared Features | ✓ | //! Declared Features | ✓ |
//! Immediate dependencys hashes | ✓[^1] | ✓ //! Immediate dependencys hashes | ✓[^1] | ✓
//! [`CompileKind`] (host/target) | ✓ | ✓ //! [`CompileKind`] (host/target) | ✓ | ✓
//! __CARGO_DEFAULT_LIB_METADATA[^4] | | ✓ //! `__CARGO_DEFAULT_LIB_METADATA`[^4] | | ✓
//! package_id | | ✓ //! `package_id` | | ✓
//! authors, description, homepage, repo | ✓ | //! authors, description, homepage, repo | ✓ |
//! Target src path relative to ws | ✓ | //! Target src path relative to ws | ✓ |
//! Target flags (test/bench/for_host/edition) | ✓ | //! Target flags (test/bench/for_host/edition) | ✓ |
@ -84,7 +84,7 @@
//! RUSTFLAGS/RUSTDOCFLAGS | ✓ | //! RUSTFLAGS/RUSTDOCFLAGS | ✓ |
//! [`Lto`] flags | ✓ | ✓ //! [`Lto`] flags | ✓ | ✓
//! config settings[^5] | ✓ | //! config settings[^5] | ✓ |
//! is_std | | ✓ //! `is_std` | | ✓
//! `[lints]` table[^6] | ✓ | //! `[lints]` table[^6] | ✓ |
//! `[lints.rust.unexpected_cfgs.check-cfg]` | ✓ | //! `[lints.rust.unexpected_cfgs.check-cfg]` | ✓ |
//! //!
@ -738,7 +738,7 @@ enum LocalFingerprint {
/// for us to look at. If any of those files are newer than this file then /// for us to look at. If any of those files are newer than this file then
/// we need to recompile. /// we need to recompile.
/// ///
/// If the `checksum` bool is true then the dep_info file is expected to /// If the `checksum` bool is true then the `dep_info` file is expected to
/// contain file checksums instead of file mtimes. /// contain file checksums instead of file mtimes.
CheckDepInfo { dep_info: PathBuf, checksum: bool }, CheckDepInfo { dep_info: PathBuf, checksum: bool },

View File

@ -151,7 +151,7 @@ use crate::util::{DependencyQueue, GlobalContext, Progress, ProgressStyle, Queue
/// This structure is backed by the `DependencyQueue` type and manages the /// This structure is backed by the `DependencyQueue` type and manages the
/// queueing of compilation steps for each package. Packages enqueue units of /// queueing of compilation steps for each package. Packages enqueue units of
/// work and then later on the entire graph is converted to DrainState and /// work and then later on the entire graph is converted to `DrainState` and
/// executed. /// executed.
pub struct JobQueue<'gctx> { pub struct JobQueue<'gctx> {
queue: DependencyQueue<Unit, Artifact, Job>, queue: DependencyQueue<Unit, Artifact, Job>,
@ -163,7 +163,7 @@ pub struct JobQueue<'gctx> {
/// actual compilation step of each package. Packages enqueue units of work and /// actual compilation step of each package. Packages enqueue units of work and
/// then later on the entire graph is processed and compiled. /// then later on the entire graph is processed and compiled.
/// ///
/// It is created from JobQueue when we have fully assembled the crate graph /// It is created from `JobQueue` when we have fully assembled the crate graph
/// (i.e., all package dependencies are known). /// (i.e., all package dependencies are known).
struct DrainState<'gctx> { struct DrainState<'gctx> {
// This is the length of the DependencyQueue when starting out // This is the length of the DependencyQueue when starting out
@ -265,9 +265,9 @@ struct ErrorToHandle {
/// care about individually reporting every thread that it broke; just the /// care about individually reporting every thread that it broke; just the
/// first is enough. /// first is enough.
/// ///
/// The exception where print_always is true is that we do report every /// The exception where `print_always` is true is that we do report every
/// instance of a rustc invocation that failed with diagnostics. This /// instance of a rustc invocation that failed with diagnostics. This
/// corresponds to errors from Message::Finish. /// corresponds to errors from `Message::Finish`.
print_always: bool, print_always: bool,
} }

View File

@ -567,7 +567,7 @@ impl<'gctx> Timings<'gctx> {
} }
impl UnitTime { impl UnitTime {
/// Returns the codegen time as (rmeta_time, codegen_time, percent of total) /// Returns the codegen time as (`rmeta_time`, `codegen_time`, percent of total)
fn codegen_time(&self) -> Option<(f64, f64, f64)> { fn codegen_time(&self) -> Option<(f64, f64, f64)> {
self.rmeta_time.map(|rmeta_time| { self.rmeta_time.map(|rmeta_time| {
let ctime = self.duration - rmeta_time; let ctime = self.duration - rmeta_time;

View File

@ -199,7 +199,7 @@ fn attach_std_deps(
} }
/// Compute all the dependencies of the given root units. /// Compute all the dependencies of the given root units.
/// The result is stored in state.unit_dependencies. /// The result is stored in `state.unit_dependencies`.
fn deps_of_roots(roots: &[Unit], state: &mut State<'_, '_>) -> CargoResult<()> { fn deps_of_roots(roots: &[Unit], state: &mut State<'_, '_>) -> CargoResult<()> {
for unit in roots.iter() { for unit in roots.iter() {
// Dependencies of tests/benches should not have `panic` set. // Dependencies of tests/benches should not have `panic` set.

View File

@ -137,7 +137,7 @@ pub const SEE_CHANNELS: &str =
"See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information \ "See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information \
about Rust release channels."; about Rust release channels.";
/// Value of [`allow-features`](CliUnstable::allow_features] /// Value of [`allow-features`](CliUnstable::allow_features)
pub type AllowFeatures = BTreeSet<String>; pub type AllowFeatures = BTreeSet<String>;
/// The edition of the compiler ([RFC 2052]) /// The edition of the compiler ([RFC 2052])

View File

@ -1020,7 +1020,7 @@ impl GlobalCacheTracker {
Ok(()) Ok(())
} }
/// Adds paths to delete from either registry_crate or registry_src whose /// Adds paths to delete from either `registry_crate` or `registry_src` whose
/// last use is older than the given timestamp. /// last use is older than the given timestamp.
fn get_registry_items_to_clean_age( fn get_registry_items_to_clean_age(
conn: &Connection, conn: &Connection,

View File

@ -290,7 +290,7 @@ pub struct PackageSet<'gctx> {
sources: RefCell<SourceMap<'gctx>>, sources: RefCell<SourceMap<'gctx>>,
gctx: &'gctx GlobalContext, gctx: &'gctx GlobalContext,
multi: Multi, multi: Multi,
/// Used to prevent reusing the PackageSet to download twice. /// Used to prevent reusing the `PackageSet` to download twice.
downloading: Cell<bool>, downloading: Cell<bool>,
/// Whether or not to use curl HTTP/2 multiplexing. /// Whether or not to use curl HTTP/2 multiplexing.
multiplexing: bool, multiplexing: bool,

View File

@ -1003,16 +1003,16 @@ pub struct UnitFor {
/// └── shared_dep build.rs /// └── shared_dep build.rs
/// ``` /// ```
/// ///
/// In this example, `foo build.rs` is HOST=true, HOST_FEATURES=false. /// In this example, `foo build.rs` is `HOST=true`, `HOST_FEATURES=false`.
/// This is so that `foo build.rs` gets the profile settings for build /// This is so that `foo build.rs` gets the profile settings for build
/// scripts (HOST=true) and features of foo (HOST_FEATURES=false) because /// scripts (`HOST=true`) and features of foo (`HOST_FEATURES=false`) because
/// build scripts need to know which features their package is being built /// build scripts need to know which features their package is being built
/// with. /// with.
/// ///
/// But in the case of `shared_dep`, when built as a build dependency, /// But in the case of `shared_dep`, when built as a build dependency,
/// both flags are true (it only wants the build-dependency features). /// both flags are true (it only wants the build-dependency features).
/// When `shared_dep` is built as a normal dependency, then `shared_dep /// When `shared_dep` is built as a normal dependency, then `shared_dep
/// build.rs` is HOST=true, HOST_FEATURES=false for the same reasons that /// build.rs` is `HOST=true`, `HOST_FEATURES=false` for the same reasons that
/// foo's build script is set that way. /// foo's build script is set that way.
host_features: bool, host_features: bool,
/// How Cargo processes the `panic` setting or profiles. /// How Cargo processes the `panic` setting or profiles.
@ -1139,7 +1139,7 @@ impl UnitFor {
/// Returns a new copy updated based on the target dependency. /// Returns a new copy updated based on the target dependency.
/// ///
/// This is where the magic happens that the host/host_features settings /// This is where the magic happens that the `host`/`host_features` settings
/// transition in a sticky fashion. As the dependency graph is being /// transition in a sticky fashion. As the dependency graph is being
/// built, once those flags are set, they stay set for the duration of /// built, once those flags are set, they stay set for the duration of
/// that portion of tree. /// that portion of tree.

View File

@ -210,7 +210,7 @@ impl FeatureOpts {
Ok(opts) Ok(opts)
} }
/// Creates a new FeatureOpts for the given behavior. /// Creates a new `FeatureOpts` for the given behavior.
pub fn new_behavior(behavior: ResolveBehavior, has_dev_units: HasDevUnits) -> FeatureOpts { pub fn new_behavior(behavior: ResolveBehavior, has_dev_units: HasDevUnits) -> FeatureOpts {
match behavior { match behavior {
ResolveBehavior::V1 => FeatureOpts::default(), ResolveBehavior::V1 => FeatureOpts::default(),
@ -259,7 +259,7 @@ pub struct CliFeatures {
} }
impl CliFeatures { impl CliFeatures {
/// Creates a new CliFeatures from the given command-line flags. /// Creates a new `CliFeatures` from the given command-line flags.
pub fn from_command_line( pub fn from_command_line(
features: &[String], features: &[String],
all_features: bool, all_features: bool,
@ -291,7 +291,7 @@ impl CliFeatures {
}) })
} }
/// Creates a new CliFeatures with the given `all_features` setting. /// Creates a new `CliFeatures` with the given `all_features` setting.
pub fn new_all(all_features: bool) -> CliFeatures { pub fn new_all(all_features: bool) -> CliFeatures {
CliFeatures { CliFeatures {
features: Rc::new(BTreeSet::new()), features: Rc::new(BTreeSet::new()),
@ -330,7 +330,7 @@ impl ResolvedFeatures {
} }
/// Variant of `activated_features` that returns `None` if this is /// Variant of `activated_features` that returns `None` if this is
/// not a valid pkg_id/is_build combination. Used in places which do /// not a valid `pkg_id/is_build` combination. Used in places which do
/// not know which packages are activated (like `cargo clean`). /// not know which packages are activated (like `cargo clean`).
pub fn activated_features_unverified( pub fn activated_features_unverified(
&self, &self,
@ -559,7 +559,7 @@ impl<'a, 'gctx> FeatureResolver<'a, 'gctx> {
Ok(()) Ok(())
} }
/// Activate a single FeatureValue for a package. /// Activate a single `FeatureValue` for a package.
fn activate_fv( fn activate_fv(
&mut self, &mut self,
pkg_id: PackageId, pkg_id: PackageId,
@ -734,7 +734,7 @@ impl<'a, 'gctx> FeatureResolver<'a, 'gctx> {
Ok(()) Ok(())
} }
/// Returns Vec of FeatureValues from a Dependency definition. /// Returns Vec of `FeatureValues` from a Dependency definition.
fn fvs_from_dependency(&self, dep_id: PackageId, dep: &Dependency) -> Vec<FeatureValue> { fn fvs_from_dependency(&self, dep_id: PackageId, dep: &Dependency) -> Vec<FeatureValue> {
let summary = self.resolve.summary(dep_id); let summary = self.resolve.summary(dep_id);
let feature_map = summary.features(); let feature_map = summary.features();
@ -749,7 +749,7 @@ impl<'a, 'gctx> FeatureResolver<'a, 'gctx> {
result result
} }
/// Returns Vec of FeatureValues from a set of command-line features. /// Returns Vec of `FeatureValues` from a set of command-line features.
fn fvs_from_requested( fn fvs_from_requested(
&self, &self,
pkg_id: PackageId, pkg_id: PackageId,

View File

@ -86,7 +86,7 @@ pub enum ResolveVersion {
/// * Introduced in 2020 in version 1.47. /// * Introduced in 2020 in version 1.47.
/// * New lockfiles use V3 by default from in 1.53 to 1.82. /// * New lockfiles use V3 by default from in 1.53 to 1.82.
V3, V3,
/// SourceId URL serialization is aware of URL encoding. For example, /// `SourceId` URL serialization is aware of URL encoding. For example,
/// `?branch=foo bar` is now encoded as `?branch=foo+bar` and can be decoded /// `?branch=foo bar` is now encoded as `?branch=foo+bar` and can be decoded
/// back and forth correctly. /// back and forth correctly.
/// ///

View File

@ -150,7 +150,7 @@ pub struct ResolveOpts {
} }
impl ResolveOpts { impl ResolveOpts {
/// Creates a ResolveOpts that resolves everything. /// Creates a `ResolveOpts` that resolves everything.
pub fn everything() -> ResolveOpts { pub fn everything() -> ResolveOpts {
ResolveOpts { ResolveOpts {
dev_deps: true, dev_deps: true,
@ -227,7 +227,7 @@ pub struct RemainingDeps {
time: u32, time: u32,
/// the data is augmented by the insertion time. /// the data is augmented by the insertion time.
/// This insures that no two items will cmp eq. /// This insures that no two items will cmp eq.
/// Forcing the OrdSet into a multi set. /// Forcing the `OrdSet` into a multi set.
data: im_rc::OrdSet<(DepsFrame, u32)>, data: im_rc::OrdSet<(DepsFrame, u32)>,
} }

View File

@ -209,7 +209,7 @@ impl SourceId {
SourceId::new(SourceKind::Git(reference), url.clone(), None) SourceId::new(SourceKind::Git(reference), url.clone(), None)
} }
/// Creates a SourceId from a remote registry URL when the registry name /// Creates a `SourceId` from a remote registry URL when the registry name
/// cannot be determined, e.g. a user passes `--index` directly from CLI. /// cannot be determined, e.g. a user passes `--index` directly from CLI.
/// ///
/// Use [`SourceId::for_alt_registry`] if a name can provided, which /// Use [`SourceId::for_alt_registry`] if a name can provided, which
@ -446,7 +446,7 @@ impl SourceId {
} }
/// Check if the precise data field stores information for this `name` /// Check if the precise data field stores information for this `name`
/// from a call to [SourceId::with_precise_registry_version]. /// from a call to [`SourceId::with_precise_registry_version`].
/// ///
/// If so return the version currently in the lock file and the version to be updated to. /// If so return the version currently in the lock file and the version to be updated to.
pub fn precise_registry_version( pub fn precise_registry_version(
@ -502,7 +502,7 @@ impl SourceId {
/// On a registry dependency we also need to keep track of the package that /// On a registry dependency we also need to keep track of the package that
/// should be updated and even which of the versions should be updated. /// should be updated and even which of the versions should be updated.
/// All of this gets encoded in the precise field using this method. /// All of this gets encoded in the precise field using this method.
/// The data can be read with [SourceId::precise_registry_version] /// The data can be read with [`SourceId::precise_registry_version`]
pub fn with_precise_registry_version( pub fn with_precise_registry_version(
self, self,
name: InternedString, name: InternedString,
@ -665,7 +665,7 @@ impl fmt::Display for SourceId {
} }
} }
/// The hash of SourceId is used in the name of some Cargo folders, so shouldn't /// The hash of `SourceId` is used in the name of some Cargo folders, so shouldn't
/// vary. `as_str` gives the serialisation of a url (which has a spec) and so /// vary. `as_str` gives the serialisation of a url (which has a spec) and so
/// insulates against possible changes in how the url crate does hashing. /// insulates against possible changes in how the url crate does hashing.
impl Hash for SourceId { impl Hash for SourceId {

View File

@ -185,7 +185,7 @@ const _: fn() = || {
}; };
/// Checks features for errors, bailing out a CargoResult:Err if invalid, /// Checks features for errors, bailing out a CargoResult:Err if invalid,
/// and creates FeatureValues for each feature. /// and creates `FeatureValues` for each feature.
fn build_feature_map( fn build_feature_map(
features: &BTreeMap<InternedString, Vec<InternedString>>, features: &BTreeMap<InternedString, Vec<InternedString>>,
dependencies: &[Dependency], dependencies: &[Dependency],
@ -351,7 +351,7 @@ fn build_feature_map(
Ok(map) Ok(map)
} }
/// FeatureValue represents the types of dependencies a feature can have. /// `FeatureValue` represents the types of dependencies a feature can have.
#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] #[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub enum FeatureValue { pub enum FeatureValue {
/// A feature enabling another feature. /// A feature enabling another feature.

View File

@ -396,7 +396,7 @@ impl<'gctx> Workspace<'gctx> {
.unwrap_or(&self.current_manifest) .unwrap_or(&self.current_manifest)
} }
/// Returns the root Package or VirtualManifest. /// Returns the root Package or `VirtualManifest`.
pub fn root_maybe(&self) -> &MaybePackage { pub fn root_maybe(&self) -> &MaybePackage {
self.packages.get(self.root_manifest()) self.packages.get(self.root_manifest())
} }

View File

@ -265,7 +265,7 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<(
pub struct DepOp { pub struct DepOp {
/// Describes the crate /// Describes the crate
pub crate_spec: Option<String>, pub crate_spec: Option<String>,
/// Dependency key, overriding the package name in crate_spec /// Dependency key, overriding the package name in `crate_spec`
pub rename: Option<String>, pub rename: Option<String>,
/// Feature flags to activate /// Feature flags to activate

View File

@ -747,7 +747,7 @@ fn traverse_and_share(
new_unit new_unit
} }
/// Removes duplicate CompileMode::Doc units that would cause problems with /// Removes duplicate `CompileMode::Doc` units that would cause problems with
/// filename collisions. /// filename collisions.
/// ///
/// Rustdoc only separates units by crate name in the file directory /// Rustdoc only separates units by crate name in the file directory

View File

@ -572,7 +572,7 @@ pub fn init(opts: &NewOptions, gctx: &GlobalContext) -> CargoResult<NewProjectKi
Ok(kind) Ok(kind)
} }
/// IgnoreList /// `IgnoreList`
struct IgnoreList { struct IgnoreList {
/// git like formatted entries /// git like formatted entries
ignore: Vec<String>, ignore: Vec<String>,
@ -613,7 +613,7 @@ impl IgnoreList {
ignore_items.join("\n") + "\n" ignore_items.join("\n") + "\n"
} }
/// format_existing is used to format the IgnoreList when the ignore file /// `format_existing` is used to format the `IgnoreList` when the ignore file
/// already exists. It reads the contents of the given `BufRead` and /// already exists. It reads the contents of the given `BufRead` and
/// checks if the contents of the ignore list are already existing in the /// checks if the contents of the ignore list are already existing in the
/// file. /// file.

View File

@ -380,7 +380,7 @@ fn upgrade_dependency(
/// cargo-edit. Returns true if any file has changed. /// cargo-edit. Returns true if any file has changed.
/// ///
/// Some of the checks here are duplicating checks already done in /// Some of the checks here are duplicating checks already done in
/// upgrade_manifests/upgrade_dependency. Why? Let's say upgrade_dependency has /// `upgrade_manifests/upgrade_dependency`. Why? Let's say `upgrade_dependency` has
/// found that dependency foo was eligible for an upgrade. But foo can occur in /// found that dependency foo was eligible for an upgrade. But foo can occur in
/// multiple manifest files, and even multiple times in the same manifest file, /// multiple manifest files, and even multiple times in the same manifest file,
/// and may be pinned, renamed, etc. in some of the instances. So we still need /// and may be pinned, renamed, etc. in some of the instances. So we still need

View File

@ -98,7 +98,7 @@ pub struct CrateListingV1 {
} }
impl InstallTracker { impl InstallTracker {
/// Create an InstallTracker from information on disk. /// Create an `InstallTracker` from information on disk.
pub fn load(gctx: &GlobalContext, root: &Filesystem) -> CargoResult<InstallTracker> { pub fn load(gctx: &GlobalContext, root: &Filesystem) -> CargoResult<InstallTracker> {
let v1_lock = let v1_lock =
root.open_rw_exclusive_create(Path::new(".crates.toml"), gctx, "crate metadata")?; root.open_rw_exclusive_create(Path::new(".crates.toml"), gctx, "crate metadata")?;
@ -153,7 +153,7 @@ impl InstallTracker {
/// Returns a tuple `(freshness, map)`. `freshness` indicates if the /// Returns a tuple `(freshness, map)`. `freshness` indicates if the
/// package should be built (`Dirty`) or if it is already up-to-date /// package should be built (`Dirty`) or if it is already up-to-date
/// (`Fresh`) and should be skipped. The map maps binary names to the /// (`Fresh`) and should be skipped. The map maps binary names to the
/// PackageId that installed it (which is None if not known). /// `PackageId` that installed it (which is `None` if not known).
/// ///
/// If there are no duplicates, then it will be considered `Dirty` (i.e., /// If there are no duplicates, then it will be considered `Dirty` (i.e.,
/// it is OK to build/install). /// it is OK to build/install).
@ -250,7 +250,7 @@ impl InstallTracker {
/// Check if any executables are already installed. /// Check if any executables are already installed.
/// ///
/// Returns a map of duplicates, the key is the executable name and the /// Returns a map of duplicates, the key is the executable name and the
/// value is the PackageId that is already installed. The PackageId is /// value is the `PackageId` that is already installed. The `PackageId` is
/// None if it is an untracked executable. /// None if it is an untracked executable.
fn find_duplicates( fn find_duplicates(
&self, &self,
@ -762,7 +762,7 @@ where
} }
} }
/// Helper to convert features to a BTreeSet. /// Helper to convert features to a `BTreeSet`.
fn feature_set(features: &Rc<BTreeSet<FeatureValue>>) -> BTreeSet<String> { fn feature_set(features: &Rc<BTreeSet<FeatureValue>>) -> BTreeSet<String> {
features.iter().map(|s| s.to_string()).collect() features.iter().map(|s| s.to_string()).collect()
} }

View File

@ -176,7 +176,7 @@ fn registry<'gctx>(
)) ))
} }
/// Gets the SourceId for an index or registry setting. /// Gets the `SourceId` for an index or registry setting.
/// ///
/// The `index` and `reg` values are from the command-line or config settings. /// The `index` and `reg` values are from the command-line or config settings.
/// If both are None, and no source-replacement is configured, returns the source for crates.io. /// If both are None, and no source-replacement is configured, returns the source for crates.io.
@ -314,7 +314,7 @@ pub(crate) struct RegistrySourceIds {
/// Use when interacting with the source (querying / publishing , etc) /// Use when interacting with the source (querying / publishing , etc)
/// ///
/// The source for crates.io may be replaced by a built-in source for accessing crates.io with /// The source for crates.io may be replaced by a built-in source for accessing crates.io with
/// the sparse protocol, or a source for the testing framework (when the replace_crates_io /// the sparse protocol, or a source for the testing framework (when the `replace_crates_io`
/// function is used) /// function is used)
/// ///
/// User-defined source replacement is not applied. /// User-defined source replacement is not applied.

View File

@ -74,9 +74,9 @@ pub struct Graph<'a> {
/// For example `--features foo` will mark the "foo" node here. /// For example `--features foo` will mark the "foo" node here.
cli_features: HashSet<usize>, cli_features: HashSet<usize>,
/// Map of dependency names, used for building internal feature map for /// Map of dependency names, used for building internal feature map for
/// dep_name/feat_name syntax. /// `dep_name/feat_name` syntax.
/// ///
/// Key is the index of a package node, value is a map of dep_name to a /// Key is the index of a package node, value is a map of `dep_name` to a
/// set of `(pkg_node_index, is_optional)`. /// set of `(pkg_node_index, is_optional)`.
dep_name_map: HashMap<usize, HashMap<InternedString, HashSet<(usize, bool)>>>, dep_name_map: HashMap<usize, HashMap<InternedString, HashSet<(usize, bool)>>>,
} }
@ -126,7 +126,7 @@ impl<'a> Graph<'a> {
&self.nodes[index] &self.nodes[index]
} }
/// Given a slice of PackageIds, returns the indexes of all nodes that match. /// Given a slice of `PackageIds`, returns the indexes of all nodes that match.
pub fn indexes_from_ids(&self, package_ids: &[PackageId]) -> Vec<usize> { pub fn indexes_from_ids(&self, package_ids: &[PackageId]) -> Vec<usize> {
let mut result: Vec<(&Node, usize)> = self let mut result: Vec<(&Node, usize)> = self
.nodes .nodes

View File

@ -7,9 +7,9 @@
//! messages, guiding them to understand the issue and how to resolve it. //! messages, guiding them to understand the issue and how to resolve it.
//! //!
//! Note that there are a lot of limitations here. This reads OpenSSH //! Note that there are a lot of limitations here. This reads OpenSSH
//! known_hosts files from well-known locations, but it does not read OpenSSH //! `known_hosts` files from well-known locations, but it does not read OpenSSH
//! config files. The config file can change the behavior of how OpenSSH //! config files. The config file can change the behavior of how OpenSSH
//! handles known_hosts files. For example, some things we don't handle: //! handles `known_hosts` files. For example, some things we don't handle:
//! //!
//! - `GlobalKnownHostsFile` — Changes the location of the global host file. //! - `GlobalKnownHostsFile` — Changes the location of the global host file.
//! - `UserKnownHostsFile` — Changes the location of the user's host file. //! - `UserKnownHostsFile` — Changes the location of the user's host file.
@ -516,7 +516,7 @@ fn known_host_files(gctx: &GlobalContext) -> Vec<PathBuf> {
result result
} }
/// The location of the user's known_hosts file. /// The location of the user's `known_hosts` file.
fn user_known_host_location() -> Option<PathBuf> { fn user_known_host_location() -> Option<PathBuf> {
// NOTE: This is a potentially inaccurate prediction of what the user // NOTE: This is a potentially inaccurate prediction of what the user
// actually wants. The actual location depends on several factors: // actually wants. The actual location depends on several factors:
@ -619,7 +619,7 @@ fn hashed_hostname_matches(host: &str, hashed: &str) -> bool {
hashed_host == &result[..] hashed_host == &result[..]
} }
/// Loads an OpenSSH known_hosts file. /// Loads an OpenSSH `known_hosts` file.
fn load_hostfile(path: &Path) -> Result<Vec<KnownHost>, anyhow::Error> { fn load_hostfile(path: &Path) -> Result<Vec<KnownHost>, anyhow::Error> {
let contents = cargo_util::paths::read(path)?; let contents = cargo_util::paths::read(path)?;
Ok(load_hostfile_contents(path, &contents)) Ok(load_hostfile_contents(path, &contents))

View File

@ -1,4 +1,4 @@
//! See [GitSource]. //! See [`GitSource`].
use crate::core::global_cache_tracker; use crate::core::global_cache_tracker;
use crate::core::GitReference; use crate::core::GitReference;

View File

@ -531,7 +531,7 @@ impl Summaries {
/// ///
/// * `root` --- this is the root argument passed to `load` /// * `root` --- this is the root argument passed to `load`
/// * `name` --- the name of the package. /// * `name` --- the name of the package.
/// * `source_id` --- the registry's SourceId used when parsing JSON blobs /// * `source_id` --- the registry's `SourceId` used when parsing JSON blobs
/// to create summaries. /// to create summaries.
/// * `load` --- the actual index implementation which may be very slow to /// * `load` --- the actual index implementation which may be very slow to
/// call. We avoid this if we can. /// call. We avoid this if we can.

View File

@ -422,7 +422,7 @@ pub trait RegistryData {
/// Returns the [`Path`] to the [`Filesystem`]. /// Returns the [`Path`] to the [`Filesystem`].
fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path; fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path;
/// Block until all outstanding Poll::Pending requests are Poll::Ready. /// Block until all outstanding `Poll::Pending` requests are `Poll::Ready`.
fn block_until_ready(&mut self) -> CargoResult<()>; fn block_until_ready(&mut self) -> CargoResult<()>;
} }

View File

@ -573,7 +573,7 @@ fn credential_action(
/// Returns the token to use for the given registry. /// Returns the token to use for the given registry.
/// If a `login_url` is provided and a token is not available, the /// If a `login_url` is provided and a token is not available, the
/// login_url will be included in the returned error. /// `login_url` will be included in the returned error.
pub fn auth_token( pub fn auth_token(
gctx: &GlobalContext, gctx: &GlobalContext,
sid: &SourceId, sid: &SourceId,

View File

@ -62,7 +62,7 @@ pub trait CommandExt: Sized {
) )
} }
/// Variant of arg_package_spec that does not include the `--all` flag /// Variant of `arg_package_spec` that does not include the `--all` flag
/// (but does include `--workspace`). Used to avoid confusion with /// (but does include `--workspace`). Used to avoid confusion with
/// historical uses of `--all`. /// historical uses of `--all`.
fn arg_package_spec_no_all( fn arg_package_spec_no_all(

View File

@ -16,9 +16,9 @@ pub(super) struct Deserializer<'gctx> {
pub(super) key: ConfigKey, pub(super) key: ConfigKey,
/// Whether or not this key part is allowed to be an inner table. For /// Whether or not this key part is allowed to be an inner table. For
/// example, `profile.dev.build-override` needs to check if /// example, `profile.dev.build-override` needs to check if
/// CARGO_PROFILE_DEV_BUILD_OVERRIDE_ prefixes exist. But /// `CARGO_PROFILE_DEV_BUILD_OVERRIDE_` prefixes exist. But
/// CARGO_BUILD_TARGET should not check for prefixes because it would /// `CARGO_BUILD_TARGET` should not check for prefixes because it would
/// collide with CARGO_BUILD_TARGET_DIR. See `ConfigMapAccess` for /// collide with `CARGO_BUILD_TARGET_DIR`. See `ConfigMapAccess` for
/// details. /// details.
pub(super) env_prefix_ok: bool, pub(super) env_prefix_ok: bool,
} }

View File

@ -116,7 +116,7 @@ use super::auth::RegistryConfig;
// Helper macro for creating typed access methods. // Helper macro for creating typed access methods.
macro_rules! get_value_typed { macro_rules! get_value_typed {
($name:ident, $ty:ty, $variant:ident, $expected:expr) => { ($name:ident, $ty:ty, $variant:ident, $expected:expr) => {
/// Low-level private method for getting a config value as an OptValue. /// Low-level private method for getting a config value as an [`OptValue`].
fn $name(&self, key: &ConfigKey) -> Result<OptValue<$ty>, ConfigError> { fn $name(&self, key: &ConfigKey) -> Result<OptValue<$ty>, ConfigError> {
let cv = self.get_cv(key)?; let cv = self.get_cv(key)?;
let env = self.get_config_env::<$ty>(key)?; let env = self.get_config_env::<$ty>(key)?;
@ -244,7 +244,7 @@ pub struct GlobalContext {
/// NOTE: this should be set before `configure()`. If calling this from an integration test, /// NOTE: this should be set before `configure()`. If calling this from an integration test,
/// consider using `ConfigBuilder::enable_nightly_features` instead. /// consider using `ConfigBuilder::enable_nightly_features` instead.
pub nightly_features_allowed: bool, pub nightly_features_allowed: bool,
/// WorkspaceRootConfigs that have been found /// `WorkspaceRootConfigs` that have been found
pub ws_roots: RefCell<HashMap<PathBuf, WorkspaceRootConfig>>, pub ws_roots: RefCell<HashMap<PathBuf, WorkspaceRootConfig>>,
/// The global cache tracker is a database used to track disk cache usage. /// The global cache tracker is a database used to track disk cache usage.
global_cache_tracker: LazyCell<RefCell<GlobalCacheTracker>>, global_cache_tracker: LazyCell<RefCell<GlobalCacheTracker>>,
@ -909,7 +909,7 @@ impl GlobalContext {
} }
} }
/// Helper for StringList type to get something that is a string or list. /// Helper for `StringList` type to get something that is a string or list.
fn get_list_or_string( fn get_list_or_string(
&self, &self,
key: &ConfigKey, key: &ConfigKey,
@ -1888,7 +1888,7 @@ impl GlobalContext {
Ok(()) Ok(())
} }
/// Returns a list of [target.'cfg()'] tables. /// Returns a list of [target.'`cfg()`'] tables.
/// ///
/// The list is sorted by the table name. /// The list is sorted by the table name.
pub fn target_cfgs(&self) -> CargoResult<&Vec<(String, TargetCfgConfig)>> { pub fn target_cfgs(&self) -> CargoResult<&Vec<(String, TargetCfgConfig)>> {
@ -2916,7 +2916,7 @@ impl StringList {
} }
} }
/// StringList automatically merges config values with environment values, /// `StringList` automatically merges config values with environment values,
/// this instead follows the precedence rules, so that eg. a string list found /// this instead follows the precedence rules, so that eg. a string list found
/// in the environment will be used instead of one in a config file. /// in the environment will be used instead of one in a config file.
/// ///

View File

@ -82,7 +82,7 @@ impl<'de> serde::Deserialize<'de> for PathAndArgs {
} }
impl PathAndArgs { impl PathAndArgs {
/// Construct a PathAndArgs from a string. The string will be split on ascii whitespace, /// Construct a `PathAndArgs` from a string. The string will be split on ascii whitespace,
/// with the first item being treated as a `ConfigRelativePath` to the executable, and subsequent /// with the first item being treated as a `ConfigRelativePath` to the executable, and subsequent
/// items as arguments. /// items as arguments.
pub fn from_whitespace_separated_string(p: &Value<String>) -> PathAndArgs { pub fn from_whitespace_separated_string(p: &Value<String>) -> PathAndArgs {

View File

@ -25,7 +25,7 @@ pub struct DependencyQueue<N: Hash + Eq, E: Hash + Eq, V> {
/// package. /// package.
/// ///
/// This map is statically known and does not get updated throughout the /// This map is statically known and does not get updated throughout the
/// lifecycle of the DependencyQueue. /// lifecycle of the `DependencyQueue`.
/// ///
/// This is sort of like a `HashMap<(N, E), HashSet<N>>` map, but more /// This is sort of like a `HashMap<(N, E), HashSet<N>>` map, but more
/// easily indexable with just an `N` /// easily indexable with just an `N`
@ -113,7 +113,7 @@ impl<N: Hash + Eq + Clone, E: Eq + Hash + Clone, V> DependencyQueue<N, E, V> {
/// Creates a flattened reverse dependency list. For a given key, finds the /// Creates a flattened reverse dependency list. For a given key, finds the
/// set of nodes which depend on it, including transitively. This is different /// set of nodes which depend on it, including transitively. This is different
/// from self.reverse_dep_map because self.reverse_dep_map only maps one level /// from `self.reverse_dep_map` because `self.reverse_dep_map` only maps one level
/// of reverse dependencies. /// of reverse dependencies.
fn depth<'a, N: Hash + Eq + Clone, E: Hash + Eq + Clone>( fn depth<'a, N: Hash + Eq + Clone, E: Hash + Eq + Clone>(
key: &N, key: &N,

View File

@ -26,9 +26,9 @@ pub struct Rustc {
pub workspace_wrapper: Option<PathBuf>, pub workspace_wrapper: Option<PathBuf>,
/// Verbose version information (the output of `rustc -vV`) /// Verbose version information (the output of `rustc -vV`)
pub verbose_version: String, pub verbose_version: String,
/// The rustc version (`1.23.4-beta.2`), this comes from verbose_version. /// The rustc version (`1.23.4-beta.2`), this comes from `verbose_version`.
pub version: semver::Version, pub version: semver::Version,
/// The host triple (arch-platform-OS), this comes from verbose_version. /// The host triple (arch-platform-OS), this comes from `verbose_version`.
pub host: InternedString, pub host: InternedString,
/// The rustc full commit hash, this comes from `verbose_version`. /// The rustc full commit hash, this comes from `verbose_version`.
pub commit_hash: Option<String>, pub commit_hash: Option<String>,

View File

@ -130,7 +130,7 @@ impl OptVersionReq {
} }
/// Allows to match pre-release in SemVer-Compatible way. /// Allows to match pre-release in SemVer-Compatible way.
/// See [`semver_eval_ext`] for matches_prerelease semantics. /// See [`semver_eval_ext`] for `matches_prerelease` semantics.
pub fn matches_prerelease(&self, version: &Version) -> bool { pub fn matches_prerelease(&self, version: &Version) -> bool {
if let OptVersionReq::Req(req) = self { if let OptVersionReq::Req(req) = self {
return req.matches_prerelease(version); return req.matches_prerelease(version);

View File

@ -176,7 +176,7 @@ impl Dependency {
self.public self.public
} }
/// Get the SourceID for this dependency. /// Get the `SourceID` for this dependency.
pub fn source_id(&self, gctx: &GlobalContext) -> CargoResult<MaybeWorkspace<SourceId>> { pub fn source_id(&self, gctx: &GlobalContext) -> CargoResult<MaybeWorkspace<SourceId>> {
match &self.source.as_ref() { match &self.source.as_ref() {
Some(Source::Registry(_)) | None => { Some(Source::Registry(_)) | None => {
@ -903,7 +903,7 @@ impl PathSource {
self self
} }
/// Get the SourceID for this dependency. /// Get the `SourceID` for this dependency.
pub fn source_id(&self) -> CargoResult<SourceId> { pub fn source_id(&self) -> CargoResult<SourceId> {
SourceId::for_path(&self.path) SourceId::for_path(&self.path)
} }
@ -967,7 +967,7 @@ impl GitSource {
self self
} }
/// Get the SourceID for this dependency. /// Get the `SourceID` for this dependency.
pub fn source_id(&self) -> CargoResult<SourceId> { pub fn source_id(&self) -> CargoResult<SourceId> {
let git_url = self.git.parse::<url::Url>()?; let git_url = self.git.parse::<url::Url>()?;
let git_ref = self.git_ref(); let git_ref = self.git_ref();

View File

@ -1,4 +1,4 @@
//! Tests for cfg() expressions. //! Tests for `cfg()` expressions.
use cargo_test_support::prelude::*; use cargo_test_support::prelude::*;
use cargo_test_support::registry::Package; use cargo_test_support::registry::Package;

View File

@ -1,5 +1,5 @@
//! Tests for when multiple artifacts have the same output filename. //! Tests for when multiple artifacts have the same output filename.
//! See https://github.com/rust-lang/cargo/issues/6313 for more details. //! See <https://github.com/rust-lang/cargo/issues/6313> for more details.
//! Ideally these should never happen, but I don't think we'll ever be able to //! Ideally these should never happen, but I don't think we'll ever be able to
//! prevent all collisions. //! prevent all collisions.

View File

@ -3,7 +3,7 @@
//! //!
//! These tests use a replacement of rustc ("rustc-fix-shim") which emits JSON //! These tests use a replacement of rustc ("rustc-fix-shim") which emits JSON
//! messages based on what the test is exercising. It uses an environment //! messages based on what the test is exercising. It uses an environment
//! variable RUSTC_FIX_SHIM_SEQUENCE which determines how it should behave //! variable `RUSTC_FIX_SHIM_SEQUENCE` which determines how it should behave
//! based on how many times `rustc` has run. It keeps track of how many times //! based on how many times `rustc` has run. It keeps track of how many times
//! rustc has run in a local file. //! rustc has run in a local file.
//! //!

View File

@ -93,7 +93,7 @@ fn now() -> SystemTime {
} }
/// Helper for simulating running cargo in the past. Use with the /// Helper for simulating running cargo in the past. Use with the
/// __CARGO_TEST_LAST_USE_NOW environment variable. /// `__CARGO_TEST_LAST_USE_NOW` environment variable.
fn days_ago_unix(n: u64) -> String { fn days_ago_unix(n: u64) -> String {
days_ago(n) days_ago(n)
.duration_since(SystemTime::UNIX_EPOCH) .duration_since(SystemTime::UNIX_EPOCH)
@ -103,7 +103,7 @@ fn days_ago_unix(n: u64) -> String {
} }
/// Helper for simulating running cargo in the past. Use with the /// Helper for simulating running cargo in the past. Use with the
/// __CARGO_TEST_LAST_USE_NOW environment variable. /// `__CARGO_TEST_LAST_USE_NOW` environment variable.
fn months_ago_unix(n: u64) -> String { fn months_ago_unix(n: u64) -> String {
days_ago_unix(n * 30) days_ago_unix(n * 30)
} }

View File

@ -1,7 +1,7 @@
//! Network tests for https transport. //! Network tests for https transport.
//! //!
//! Note that these tests will generally require setting CARGO_CONTAINER_TESTS //! Note that these tests will generally require setting `CARGO_CONTAINER_TESTS`
//! or CARGO_PUBLIC_NETWORK_TESTS. //! or `CARGO_PUBLIC_NETWORK_TESTS`.
use cargo_test_support::containers::Container; use cargo_test_support::containers::Container;
use cargo_test_support::prelude::*; use cargo_test_support::prelude::*;

View File

@ -1,7 +1,7 @@
//! Network tests for SSH connections. //! Network tests for SSH connections.
//! //!
//! Note that these tests will generally require setting CARGO_CONTAINER_TESTS //! Note that these tests will generally require setting `CARGO_CONTAINER_TESTS`
//! or CARGO_PUBLIC_NETWORK_TESTS. //! or `CARGO_PUBLIC_NETWORK_TESTS`.
//! //!
//! NOTE: The container tests almost certainly won't work on Windows. //! NOTE: The container tests almost certainly won't work on Windows.