mirror of
https://github.com/rust-lang/cargo.git
synced 2025-09-25 11:14:46 +00:00
docs: Surround identifiers in backticks
This was mostly done by clippy via `clippy::doc_markdown`. I then reviewed it to fix words that shouldn't have it or where `--fix` put the backtick in the wrong location.
This commit is contained in:
parent
0b281678c3
commit
878caf7447
@ -1,7 +1,7 @@
|
||||
//! Utility for capturing a global cache last-use database based on the files
|
||||
//! on a real-world system.
|
||||
//!
|
||||
//! This will look in the CARGO_HOME of the current system and record last-use
|
||||
//! This will look in the `CARGO_HOME` of the current system and record last-use
|
||||
//! data for all files in the cache. This is intended to provide a real-world
|
||||
//! example for a benchmark that should be close to what a real set of data
|
||||
//! should look like.
|
||||
|
@ -621,7 +621,7 @@ macro_rules! impl_from_tuple_for_inmemorydir {
|
||||
};
|
||||
}
|
||||
|
||||
/// Extend `impl_from_tuple_for_inmemorydir`` to generate for the specified tuple and all smaller
|
||||
/// Extend `impl_from_tuple_for_inmemorydir` to generate for the specified tuple and all smaller
|
||||
/// tuples
|
||||
macro_rules! impl_from_tuples_for_inmemorydir {
|
||||
($var1:ident $path1:ident $data1:ident, $($var:ident $path:ident $data:ident),+) => {
|
||||
|
@ -5,7 +5,7 @@
|
||||
//! with the running container.
|
||||
//!
|
||||
//! Tests using containers must use `#[cargo_test(container_test)]` to disable
|
||||
//! them unless the CARGO_CONTAINER_TESTS environment variable is set.
|
||||
//! them unless the `CARGO_CONTAINER_TESTS` environment variable is set.
|
||||
|
||||
use cargo_util::ProcessBuilder;
|
||||
use std::collections::HashMap;
|
||||
@ -36,7 +36,7 @@ pub struct ContainerHandle {
|
||||
/// This can only be used on Linux. macOS and Windows docker doesn't allow
|
||||
/// direct connection to the container.
|
||||
pub ip_address: String,
|
||||
/// Port mappings of container_port to host_port for ports exposed via EXPOSE.
|
||||
/// Port mappings of `container_port` to `host_port` for ports exposed via EXPOSE.
|
||||
pub port_mappings: HashMap<u16, u16>,
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
//! Note that cross-testing is very limited. You need to install the
|
||||
//! "alternate" target to the host (32-bit for 64-bit hosts or vice-versa).
|
||||
//!
|
||||
//! Set CFG_DISABLE_CROSS_TESTS=1 environment variable to disable these tests
|
||||
//! Set `CFG_DISABLE_CROSS_TESTS=1` environment variable to disable these tests
|
||||
//! if you are unable to use the alternate target. Unfortunately 32-bit
|
||||
//! support on macOS is going away, so macOS users are out of luck.
|
||||
//!
|
||||
|
@ -890,7 +890,7 @@ impl Execs {
|
||||
///
|
||||
/// Prefer [`Execs::with_stdout_data`] where possible.
|
||||
/// - `with` cannot be snapshotted
|
||||
/// - The absence of `without`` can either mean success or that the string being looked for
|
||||
/// - The absence of `without` can either mean success or that the string being looked for
|
||||
/// changed.
|
||||
///
|
||||
/// </div>
|
||||
|
@ -17,7 +17,7 @@ static CARGO_INTEGRATION_TEST_DIR: &str = "cit";
|
||||
static GLOBAL_ROOT: OnceLock<Mutex<Option<PathBuf>>> = OnceLock::new();
|
||||
|
||||
/// This is used when running cargo is pre-CARGO_TARGET_TMPDIR
|
||||
/// TODO: Remove when CARGO_TARGET_TMPDIR grows old enough.
|
||||
/// TODO: Remove when `CARGO_TARGET_TMPDIR` grows old enough.
|
||||
fn global_root_legacy() -> PathBuf {
|
||||
let mut path = t!(env::current_exe());
|
||||
path.pop(); // chop off exe name
|
||||
|
@ -166,7 +166,7 @@ fn read_new_post(new_path: &Path) -> (Vec<u8>, Vec<u8>) {
|
||||
///
|
||||
/// - `expected_crate_name` should be something like `foo-0.0.1.crate`.
|
||||
/// - `expected_files` should be a complete list of files in the crate
|
||||
/// (relative to expected_crate_name).
|
||||
/// (relative to `expected_crate_name`).
|
||||
/// - `expected_contents` should be a list of `(file_name, contents)` tuples
|
||||
/// to validate the contents of the given file. Only the listed files will
|
||||
/// be checked (others will be ignored).
|
||||
|
@ -1680,7 +1680,7 @@ impl<'de> de::Deserialize<'de> for InvalidCargoFeatures {
|
||||
}
|
||||
}
|
||||
|
||||
/// A StringOrVec can be parsed from either a TOML string or array,
|
||||
/// This can be parsed from either a TOML string or array,
|
||||
/// but is always stored as a vector.
|
||||
#[derive(Clone, Debug, Serialize, Eq, PartialEq, PartialOrd, Ord)]
|
||||
#[cfg_attr(feature = "unstable-schema", derive(schemars::JsonSchema))]
|
||||
|
@ -189,7 +189,7 @@ pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()>
|
||||
|
||||
/// Writes a file to disk atomically.
|
||||
///
|
||||
/// write_atomic uses tempfile::persist to accomplish atomic writes.
|
||||
/// This uses `tempfile::persist` to accomplish atomic writes.
|
||||
pub fn write_atomic<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {
|
||||
let path = path.as_ref();
|
||||
|
||||
|
@ -10,11 +10,11 @@ use std::{
|
||||
/// in-process unit testing by rustup.
|
||||
pub trait Env {
|
||||
/// Return the path to the users home dir, or None if any error occurs:
|
||||
/// see home_inner.
|
||||
/// see `home_inner`.
|
||||
fn home_dir(&self) -> Option<PathBuf>;
|
||||
/// Return the current working directory.
|
||||
fn current_dir(&self) -> io::Result<PathBuf>;
|
||||
/// Get an environment variable, as per std::env::var_os.
|
||||
/// Get an environment variable, as per `std::env::var_os`.
|
||||
fn var_os(&self, key: &str) -> Option<OsString>;
|
||||
}
|
||||
|
||||
@ -47,7 +47,7 @@ pub fn home_dir_with_env(env: &dyn Env) -> Option<PathBuf> {
|
||||
env.home_dir()
|
||||
}
|
||||
|
||||
/// Variant of cargo_home where the environment source is parameterized. This is
|
||||
/// Variant of `cargo_home` where the environment source is parameterized. This is
|
||||
/// specifically to support in-process testing scenarios as environment
|
||||
/// variables and user home metadata are normally process global state. See the
|
||||
/// [`Env`] trait.
|
||||
@ -56,10 +56,10 @@ pub fn cargo_home_with_env(env: &dyn Env) -> io::Result<PathBuf> {
|
||||
cargo_home_with_cwd_env(env, &cwd)
|
||||
}
|
||||
|
||||
/// Variant of cargo_home_with_cwd where the environment source is
|
||||
/// Variant of `cargo_home_with_cwd` where the environment source is
|
||||
/// parameterized. This is specifically to support in-process testing scenarios
|
||||
/// as environment variables and user home metadata are normally process global
|
||||
/// state. See the OsEnv trait.
|
||||
/// state. See the `OsEnv` trait.
|
||||
pub fn cargo_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf> {
|
||||
match env.var_os("CARGO_HOME").filter(|h| !h.is_empty()) {
|
||||
Some(home) => {
|
||||
@ -76,19 +76,19 @@ pub fn cargo_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf>
|
||||
}
|
||||
}
|
||||
|
||||
/// Variant of cargo_home_with_cwd where the environment source is
|
||||
/// Variant of `cargo_home_with_cwd` where the environment source is
|
||||
/// parameterized. This is specifically to support in-process testing scenarios
|
||||
/// as environment variables and user home metadata are normally process global
|
||||
/// state. See the OsEnv trait.
|
||||
/// state. See the `OsEnv` trait.
|
||||
pub fn rustup_home_with_env(env: &dyn Env) -> io::Result<PathBuf> {
|
||||
let cwd = env.current_dir()?;
|
||||
rustup_home_with_cwd_env(env, &cwd)
|
||||
}
|
||||
|
||||
/// Variant of cargo_home_with_cwd where the environment source is
|
||||
/// Variant of `cargo_home_with_cwd` where the environment source is
|
||||
/// parameterized. This is specifically to support in-process testing scenarios
|
||||
/// as environment variables and user home metadata are normally process global
|
||||
/// state. See the OsEnv trait.
|
||||
/// state. See the `OsEnv` trait.
|
||||
pub fn rustup_home_with_cwd_env(env: &dyn Env, cwd: &Path) -> io::Result<PathBuf> {
|
||||
match env.var_os("RUSTUP_HOME").filter(|h| !h.is_empty()) {
|
||||
Some(home) => {
|
||||
|
@ -24,7 +24,7 @@ pub fn parse_name_and_section(text: &str) -> Result<(&str, u8), Error> {
|
||||
Ok((name, section))
|
||||
}
|
||||
|
||||
/// Extracts the text from a header after Tag::Heading has been received.
|
||||
/// Extracts the text from a header after `Tag::Heading` has been received.
|
||||
pub fn header_text<'e>(parser: &mut EventIter<'e>) -> Result<CowStr<'e>, Error> {
|
||||
let text = match parser.next() {
|
||||
Some((Event::Text(t), _range)) => t,
|
||||
|
@ -36,7 +36,7 @@ pub struct DiagnosticSpan {
|
||||
/// Is this a "primary" span -- meaning the point, or one of the points,
|
||||
/// where the error occurred?
|
||||
pub is_primary: bool,
|
||||
/// Source text from the start of line_start to the end of line_end.
|
||||
/// Source text from the start of `line_start` to the end of `line_end`.
|
||||
pub text: Vec<DiagnosticSpanLine>,
|
||||
/// Label that should be placed at this location (if any)
|
||||
label: Option<String>,
|
||||
|
@ -121,10 +121,10 @@ impl<'a, 'gctx> BuildContext<'a, 'gctx> {
|
||||
|
||||
/// Gets the host architecture triple.
|
||||
///
|
||||
/// For example, x86_64-unknown-linux-gnu, would be
|
||||
/// - machine: x86_64,
|
||||
/// - hardware-platform: unknown,
|
||||
/// - operating system: linux-gnu.
|
||||
/// For example, `x86_64-unknown-linux-gnu`, would be
|
||||
/// - machine: `x86_64`,
|
||||
/// - hardware-platform: `unknown`,
|
||||
/// - operating system: `linux-gnu`.
|
||||
pub fn host_triple(&self) -> InternedString {
|
||||
self.target_data.rustc.host
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ pub struct FileType {
|
||||
}
|
||||
|
||||
impl FileType {
|
||||
/// The filename for this FileType crated by rustc.
|
||||
/// The filename for this `FileType` crated by rustc.
|
||||
pub fn output_filename(&self, target: &Target, metadata: Option<&str>) -> String {
|
||||
match metadata {
|
||||
Some(metadata) => format!(
|
||||
@ -107,7 +107,7 @@ impl FileType {
|
||||
}
|
||||
}
|
||||
|
||||
/// The filename for this FileType that Cargo should use when "uplifting"
|
||||
/// The filename for this `FileType` that Cargo should use when "uplifting"
|
||||
/// it to the destination directory.
|
||||
pub fn uplift_filename(&self, target: &Target) -> String {
|
||||
let name = match target.binary_filename() {
|
||||
@ -513,10 +513,10 @@ impl TargetInfo {
|
||||
parse_crate_type(crate_type, &process, output, error, &mut output.lines())
|
||||
}
|
||||
|
||||
/// Returns all the file types generated by rustc for the given mode/target_kind.
|
||||
/// Returns all the file types generated by rustc for the given `mode`/`target_kind`.
|
||||
///
|
||||
/// The first value is a Vec of file types generated, the second value is
|
||||
/// a list of CrateTypes that are not supported by the given target.
|
||||
/// a list of `CrateTypes` that are not supported by the given target.
|
||||
pub fn rustc_outputs(
|
||||
&self,
|
||||
mode: CompileMode,
|
||||
|
@ -1,8 +1,8 @@
|
||||
//! A graph-like structure used to represent the rustc commands to build the package and the
|
||||
//! interdependencies between them.
|
||||
//!
|
||||
//! The BuildPlan structure is used to store the dependency graph of a dry run so that it can be
|
||||
//! shared with an external build system. Each Invocation in the BuildPlan comprises a single
|
||||
//! The `BuildPlan` structure is used to store the dependency graph of a dry run so that it can be
|
||||
//! shared with an external build system. Each Invocation in the `BuildPlan` comprises a single
|
||||
//! subprocess and defines the build environment, the outputs produced by the subprocess, and the
|
||||
//! dependencies on other Invocations.
|
||||
|
||||
|
@ -328,7 +328,7 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> {
|
||||
self.layout(unit.kind).build().join(dir)
|
||||
}
|
||||
|
||||
/// Returns the "OUT_DIR" directory for running a build script.
|
||||
/// Returns the "`OUT_DIR`" directory for running a build script.
|
||||
/// `/path/to/target/{debug,release}/build/PKG-HASH/out`
|
||||
pub fn build_script_out_dir(&self, unit: &Unit) -> PathBuf {
|
||||
self.build_script_run_dir(unit).join("out")
|
||||
@ -375,7 +375,7 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> {
|
||||
.map(Arc::clone)
|
||||
}
|
||||
|
||||
/// Returns the path where the output for the given unit and FileType
|
||||
/// Returns the path where the output for the given unit and `FileType`
|
||||
/// should be uplifted to.
|
||||
///
|
||||
/// Returns `None` if the unit shouldn't be uplifted (for example, a
|
||||
|
@ -80,7 +80,7 @@ pub struct BuildRunner<'a, 'gctx> {
|
||||
pub lto: HashMap<Unit, Lto>,
|
||||
|
||||
/// Map of Doc/Docscrape units to metadata for their -Cmetadata flag.
|
||||
/// See Context::find_metadata_units for more details.
|
||||
/// See `Context::find_metadata_units` for more details.
|
||||
pub metadata_for_doc_units: HashMap<Unit, Metadata>,
|
||||
|
||||
/// Set of metadata of Docscrape units that fail before completion, e.g.
|
||||
@ -415,7 +415,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
|
||||
&self.bcx.unit_graph[unit]
|
||||
}
|
||||
|
||||
/// Returns the RunCustomBuild Unit associated with the given Unit.
|
||||
/// Returns the `RunCustomBuild` Unit associated with the given Unit.
|
||||
///
|
||||
/// If the package does not have a build script, this returns None.
|
||||
pub fn find_build_script_unit(&self, unit: &Unit) -> Option<Unit> {
|
||||
@ -431,7 +431,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
|
||||
.map(|unit_dep| unit_dep.unit.clone())
|
||||
}
|
||||
|
||||
/// Returns the metadata hash for the RunCustomBuild Unit associated with
|
||||
/// Returns the metadata hash for the `RunCustomBuild` Unit associated with
|
||||
/// the given unit.
|
||||
///
|
||||
/// If the package does not have a build script, this returns None.
|
||||
@ -440,7 +440,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
|
||||
Some(self.get_run_build_script_metadata(&script_unit))
|
||||
}
|
||||
|
||||
/// Returns the metadata hash for a RunCustomBuild unit.
|
||||
/// Returns the metadata hash for a `RunCustomBuild` unit.
|
||||
pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata {
|
||||
assert!(unit.mode.is_run_custom_build());
|
||||
self.files().metadata(unit)
|
||||
|
@ -81,7 +81,7 @@ pub struct Compilation<'gctx> {
|
||||
/// All directories for the output of native build commands.
|
||||
///
|
||||
/// This is currently used to drive some entries which are added to the
|
||||
/// LD_LIBRARY_PATH as appropriate.
|
||||
/// `LD_LIBRARY_PATH` as appropriate.
|
||||
///
|
||||
/// The order should be deterministic.
|
||||
pub native_dirs: BTreeSet<PathBuf>,
|
||||
@ -113,10 +113,10 @@ pub struct Compilation<'gctx> {
|
||||
|
||||
/// Rustc process to be used by default
|
||||
rustc_process: ProcessBuilder,
|
||||
/// Rustc process to be used for workspace crates instead of rustc_process
|
||||
/// Rustc process to be used for workspace crates instead of `rustc_process`
|
||||
rustc_workspace_wrapper_process: ProcessBuilder,
|
||||
/// Optional rustc process to be used for primary crates instead of either rustc_process or
|
||||
/// rustc_workspace_wrapper_process
|
||||
/// Optional rustc process to be used for primary crates instead of either `rustc_process` or
|
||||
/// `rustc_workspace_wrapper_process`
|
||||
primary_rustc_process: Option<ProcessBuilder>,
|
||||
|
||||
target_runners: HashMap<CompileKind, Option<(PathBuf, Vec<String>)>>,
|
||||
@ -403,7 +403,7 @@ impl<'gctx> Compilation<'gctx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Prepares a rustc_tool process with additional environment variables
|
||||
/// Prepares a `rustc_tool` process with additional environment variables
|
||||
/// that are only relevant in a context that has a unit
|
||||
fn fill_rustc_tool_env(mut cmd: ProcessBuilder, unit: &Unit) -> ProcessBuilder {
|
||||
if unit.target.is_executable() {
|
||||
|
@ -111,7 +111,7 @@ pub struct BuildOutput {
|
||||
/// inserted during `build_map`. The rest of the entries are added
|
||||
/// immediately after each build script runs.
|
||||
///
|
||||
/// The `Metadata` is the unique metadata hash for the RunCustomBuild Unit of
|
||||
/// The `Metadata` is the unique metadata hash for the `RunCustomBuild` Unit of
|
||||
/// the package. It needs a unique key, since the build script can be run
|
||||
/// multiple times with different profiles or features. We can't embed a
|
||||
/// `Unit` because this structure needs to be shareable between threads.
|
||||
@ -150,7 +150,7 @@ pub struct BuildScripts {
|
||||
/// This is the set of transitive dependencies that are host-only
|
||||
/// (proc-macro, plugin, build-dependency) that contain a build script.
|
||||
/// Any `BuildOutput::library_paths` path relative to `target` will be
|
||||
/// added to LD_LIBRARY_PATH so that the compiler can find any dynamic
|
||||
/// added to `LD_LIBRARY_PATH` so that the compiler can find any dynamic
|
||||
/// libraries a build script may have generated.
|
||||
pub plugins: BTreeSet<(PackageId, Metadata)>,
|
||||
}
|
||||
|
@ -69,13 +69,13 @@
|
||||
//! `cargo rustc` extra args | ✓ | ✓
|
||||
//! [`CompileMode`] | ✓ | ✓
|
||||
//! Target Name | ✓ | ✓
|
||||
//! TargetKind (bin/lib/etc.) | ✓ | ✓
|
||||
//! `TargetKind` (bin/lib/etc.) | ✓ | ✓
|
||||
//! Enabled Features | ✓ | ✓
|
||||
//! Declared Features | ✓ |
|
||||
//! Immediate dependency’s hashes | ✓[^1] | ✓
|
||||
//! [`CompileKind`] (host/target) | ✓ | ✓
|
||||
//! __CARGO_DEFAULT_LIB_METADATA[^4] | | ✓
|
||||
//! package_id | | ✓
|
||||
//! `__CARGO_DEFAULT_LIB_METADATA`[^4] | | ✓
|
||||
//! `package_id` | | ✓
|
||||
//! authors, description, homepage, repo | ✓ |
|
||||
//! Target src path relative to ws | ✓ |
|
||||
//! Target flags (test/bench/for_host/edition) | ✓ |
|
||||
@ -84,7 +84,7 @@
|
||||
//! RUSTFLAGS/RUSTDOCFLAGS | ✓ |
|
||||
//! [`Lto`] flags | ✓ | ✓
|
||||
//! config settings[^5] | ✓ |
|
||||
//! is_std | | ✓
|
||||
//! `is_std` | | ✓
|
||||
//! `[lints]` table[^6] | ✓ |
|
||||
//! `[lints.rust.unexpected_cfgs.check-cfg]` | ✓ |
|
||||
//!
|
||||
@ -738,7 +738,7 @@ enum LocalFingerprint {
|
||||
/// for us to look at. If any of those files are newer than this file then
|
||||
/// we need to recompile.
|
||||
///
|
||||
/// If the `checksum` bool is true then the dep_info file is expected to
|
||||
/// If the `checksum` bool is true then the `dep_info` file is expected to
|
||||
/// contain file checksums instead of file mtimes.
|
||||
CheckDepInfo { dep_info: PathBuf, checksum: bool },
|
||||
|
||||
|
@ -151,7 +151,7 @@ use crate::util::{DependencyQueue, GlobalContext, Progress, ProgressStyle, Queue
|
||||
|
||||
/// This structure is backed by the `DependencyQueue` type and manages the
|
||||
/// queueing of compilation steps for each package. Packages enqueue units of
|
||||
/// work and then later on the entire graph is converted to DrainState and
|
||||
/// work and then later on the entire graph is converted to `DrainState` and
|
||||
/// executed.
|
||||
pub struct JobQueue<'gctx> {
|
||||
queue: DependencyQueue<Unit, Artifact, Job>,
|
||||
@ -163,7 +163,7 @@ pub struct JobQueue<'gctx> {
|
||||
/// actual compilation step of each package. Packages enqueue units of work and
|
||||
/// then later on the entire graph is processed and compiled.
|
||||
///
|
||||
/// It is created from JobQueue when we have fully assembled the crate graph
|
||||
/// It is created from `JobQueue` when we have fully assembled the crate graph
|
||||
/// (i.e., all package dependencies are known).
|
||||
struct DrainState<'gctx> {
|
||||
// This is the length of the DependencyQueue when starting out
|
||||
@ -265,9 +265,9 @@ struct ErrorToHandle {
|
||||
/// care about individually reporting every thread that it broke; just the
|
||||
/// first is enough.
|
||||
///
|
||||
/// The exception where print_always is true is that we do report every
|
||||
/// The exception where `print_always` is true is that we do report every
|
||||
/// instance of a rustc invocation that failed with diagnostics. This
|
||||
/// corresponds to errors from Message::Finish.
|
||||
/// corresponds to errors from `Message::Finish`.
|
||||
print_always: bool,
|
||||
}
|
||||
|
||||
|
@ -567,7 +567,7 @@ impl<'gctx> Timings<'gctx> {
|
||||
}
|
||||
|
||||
impl UnitTime {
|
||||
/// Returns the codegen time as (rmeta_time, codegen_time, percent of total)
|
||||
/// Returns the codegen time as (`rmeta_time`, `codegen_time`, percent of total)
|
||||
fn codegen_time(&self) -> Option<(f64, f64, f64)> {
|
||||
self.rmeta_time.map(|rmeta_time| {
|
||||
let ctime = self.duration - rmeta_time;
|
||||
|
@ -199,7 +199,7 @@ fn attach_std_deps(
|
||||
}
|
||||
|
||||
/// Compute all the dependencies of the given root units.
|
||||
/// The result is stored in state.unit_dependencies.
|
||||
/// The result is stored in `state.unit_dependencies`.
|
||||
fn deps_of_roots(roots: &[Unit], state: &mut State<'_, '_>) -> CargoResult<()> {
|
||||
for unit in roots.iter() {
|
||||
// Dependencies of tests/benches should not have `panic` set.
|
||||
|
@ -137,7 +137,7 @@ pub const SEE_CHANNELS: &str =
|
||||
"See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information \
|
||||
about Rust release channels.";
|
||||
|
||||
/// Value of [`allow-features`](CliUnstable::allow_features]
|
||||
/// Value of [`allow-features`](CliUnstable::allow_features)
|
||||
pub type AllowFeatures = BTreeSet<String>;
|
||||
|
||||
/// The edition of the compiler ([RFC 2052])
|
||||
|
@ -1020,7 +1020,7 @@ impl GlobalCacheTracker {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Adds paths to delete from either registry_crate or registry_src whose
|
||||
/// Adds paths to delete from either `registry_crate` or `registry_src` whose
|
||||
/// last use is older than the given timestamp.
|
||||
fn get_registry_items_to_clean_age(
|
||||
conn: &Connection,
|
||||
|
@ -290,7 +290,7 @@ pub struct PackageSet<'gctx> {
|
||||
sources: RefCell<SourceMap<'gctx>>,
|
||||
gctx: &'gctx GlobalContext,
|
||||
multi: Multi,
|
||||
/// Used to prevent reusing the PackageSet to download twice.
|
||||
/// Used to prevent reusing the `PackageSet` to download twice.
|
||||
downloading: Cell<bool>,
|
||||
/// Whether or not to use curl HTTP/2 multiplexing.
|
||||
multiplexing: bool,
|
||||
|
@ -1003,16 +1003,16 @@ pub struct UnitFor {
|
||||
/// └── shared_dep build.rs
|
||||
/// ```
|
||||
///
|
||||
/// In this example, `foo build.rs` is HOST=true, HOST_FEATURES=false.
|
||||
/// In this example, `foo build.rs` is `HOST=true`, `HOST_FEATURES=false`.
|
||||
/// This is so that `foo build.rs` gets the profile settings for build
|
||||
/// scripts (HOST=true) and features of foo (HOST_FEATURES=false) because
|
||||
/// scripts (`HOST=true`) and features of foo (`HOST_FEATURES=false`) because
|
||||
/// build scripts need to know which features their package is being built
|
||||
/// with.
|
||||
///
|
||||
/// But in the case of `shared_dep`, when built as a build dependency,
|
||||
/// both flags are true (it only wants the build-dependency features).
|
||||
/// When `shared_dep` is built as a normal dependency, then `shared_dep
|
||||
/// build.rs` is HOST=true, HOST_FEATURES=false for the same reasons that
|
||||
/// build.rs` is `HOST=true`, `HOST_FEATURES=false` for the same reasons that
|
||||
/// foo's build script is set that way.
|
||||
host_features: bool,
|
||||
/// How Cargo processes the `panic` setting or profiles.
|
||||
@ -1139,7 +1139,7 @@ impl UnitFor {
|
||||
|
||||
/// Returns a new copy updated based on the target dependency.
|
||||
///
|
||||
/// This is where the magic happens that the host/host_features settings
|
||||
/// This is where the magic happens that the `host`/`host_features` settings
|
||||
/// transition in a sticky fashion. As the dependency graph is being
|
||||
/// built, once those flags are set, they stay set for the duration of
|
||||
/// that portion of tree.
|
||||
|
@ -210,7 +210,7 @@ impl FeatureOpts {
|
||||
Ok(opts)
|
||||
}
|
||||
|
||||
/// Creates a new FeatureOpts for the given behavior.
|
||||
/// Creates a new `FeatureOpts` for the given behavior.
|
||||
pub fn new_behavior(behavior: ResolveBehavior, has_dev_units: HasDevUnits) -> FeatureOpts {
|
||||
match behavior {
|
||||
ResolveBehavior::V1 => FeatureOpts::default(),
|
||||
@ -259,7 +259,7 @@ pub struct CliFeatures {
|
||||
}
|
||||
|
||||
impl CliFeatures {
|
||||
/// Creates a new CliFeatures from the given command-line flags.
|
||||
/// Creates a new `CliFeatures` from the given command-line flags.
|
||||
pub fn from_command_line(
|
||||
features: &[String],
|
||||
all_features: bool,
|
||||
@ -291,7 +291,7 @@ impl CliFeatures {
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new CliFeatures with the given `all_features` setting.
|
||||
/// Creates a new `CliFeatures` with the given `all_features` setting.
|
||||
pub fn new_all(all_features: bool) -> CliFeatures {
|
||||
CliFeatures {
|
||||
features: Rc::new(BTreeSet::new()),
|
||||
@ -330,7 +330,7 @@ impl ResolvedFeatures {
|
||||
}
|
||||
|
||||
/// Variant of `activated_features` that returns `None` if this is
|
||||
/// not a valid pkg_id/is_build combination. Used in places which do
|
||||
/// not a valid `pkg_id/is_build` combination. Used in places which do
|
||||
/// not know which packages are activated (like `cargo clean`).
|
||||
pub fn activated_features_unverified(
|
||||
&self,
|
||||
@ -559,7 +559,7 @@ impl<'a, 'gctx> FeatureResolver<'a, 'gctx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Activate a single FeatureValue for a package.
|
||||
/// Activate a single `FeatureValue` for a package.
|
||||
fn activate_fv(
|
||||
&mut self,
|
||||
pkg_id: PackageId,
|
||||
@ -734,7 +734,7 @@ impl<'a, 'gctx> FeatureResolver<'a, 'gctx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns Vec of FeatureValues from a Dependency definition.
|
||||
/// Returns Vec of `FeatureValues` from a Dependency definition.
|
||||
fn fvs_from_dependency(&self, dep_id: PackageId, dep: &Dependency) -> Vec<FeatureValue> {
|
||||
let summary = self.resolve.summary(dep_id);
|
||||
let feature_map = summary.features();
|
||||
@ -749,7 +749,7 @@ impl<'a, 'gctx> FeatureResolver<'a, 'gctx> {
|
||||
result
|
||||
}
|
||||
|
||||
/// Returns Vec of FeatureValues from a set of command-line features.
|
||||
/// Returns Vec of `FeatureValues` from a set of command-line features.
|
||||
fn fvs_from_requested(
|
||||
&self,
|
||||
pkg_id: PackageId,
|
||||
|
@ -86,7 +86,7 @@ pub enum ResolveVersion {
|
||||
/// * Introduced in 2020 in version 1.47.
|
||||
/// * New lockfiles use V3 by default from in 1.53 to 1.82.
|
||||
V3,
|
||||
/// SourceId URL serialization is aware of URL encoding. For example,
|
||||
/// `SourceId` URL serialization is aware of URL encoding. For example,
|
||||
/// `?branch=foo bar` is now encoded as `?branch=foo+bar` and can be decoded
|
||||
/// back and forth correctly.
|
||||
///
|
||||
|
@ -150,7 +150,7 @@ pub struct ResolveOpts {
|
||||
}
|
||||
|
||||
impl ResolveOpts {
|
||||
/// Creates a ResolveOpts that resolves everything.
|
||||
/// Creates a `ResolveOpts` that resolves everything.
|
||||
pub fn everything() -> ResolveOpts {
|
||||
ResolveOpts {
|
||||
dev_deps: true,
|
||||
@ -227,7 +227,7 @@ pub struct RemainingDeps {
|
||||
time: u32,
|
||||
/// the data is augmented by the insertion time.
|
||||
/// This insures that no two items will cmp eq.
|
||||
/// Forcing the OrdSet into a multi set.
|
||||
/// Forcing the `OrdSet` into a multi set.
|
||||
data: im_rc::OrdSet<(DepsFrame, u32)>,
|
||||
}
|
||||
|
||||
|
@ -209,7 +209,7 @@ impl SourceId {
|
||||
SourceId::new(SourceKind::Git(reference), url.clone(), None)
|
||||
}
|
||||
|
||||
/// Creates a SourceId from a remote registry URL when the registry name
|
||||
/// Creates a `SourceId` from a remote registry URL when the registry name
|
||||
/// cannot be determined, e.g. a user passes `--index` directly from CLI.
|
||||
///
|
||||
/// Use [`SourceId::for_alt_registry`] if a name can provided, which
|
||||
@ -446,7 +446,7 @@ impl SourceId {
|
||||
}
|
||||
|
||||
/// Check if the precise data field stores information for this `name`
|
||||
/// from a call to [SourceId::with_precise_registry_version].
|
||||
/// from a call to [`SourceId::with_precise_registry_version`].
|
||||
///
|
||||
/// If so return the version currently in the lock file and the version to be updated to.
|
||||
pub fn precise_registry_version(
|
||||
@ -502,7 +502,7 @@ impl SourceId {
|
||||
/// On a registry dependency we also need to keep track of the package that
|
||||
/// should be updated and even which of the versions should be updated.
|
||||
/// All of this gets encoded in the precise field using this method.
|
||||
/// The data can be read with [SourceId::precise_registry_version]
|
||||
/// The data can be read with [`SourceId::precise_registry_version`]
|
||||
pub fn with_precise_registry_version(
|
||||
self,
|
||||
name: InternedString,
|
||||
@ -665,7 +665,7 @@ impl fmt::Display for SourceId {
|
||||
}
|
||||
}
|
||||
|
||||
/// The hash of SourceId is used in the name of some Cargo folders, so shouldn't
|
||||
/// The hash of `SourceId` is used in the name of some Cargo folders, so shouldn't
|
||||
/// vary. `as_str` gives the serialisation of a url (which has a spec) and so
|
||||
/// insulates against possible changes in how the url crate does hashing.
|
||||
impl Hash for SourceId {
|
||||
|
@ -185,7 +185,7 @@ const _: fn() = || {
|
||||
};
|
||||
|
||||
/// Checks features for errors, bailing out a CargoResult:Err if invalid,
|
||||
/// and creates FeatureValues for each feature.
|
||||
/// and creates `FeatureValues` for each feature.
|
||||
fn build_feature_map(
|
||||
features: &BTreeMap<InternedString, Vec<InternedString>>,
|
||||
dependencies: &[Dependency],
|
||||
@ -351,7 +351,7 @@ fn build_feature_map(
|
||||
Ok(map)
|
||||
}
|
||||
|
||||
/// FeatureValue represents the types of dependencies a feature can have.
|
||||
/// `FeatureValue` represents the types of dependencies a feature can have.
|
||||
#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
|
||||
pub enum FeatureValue {
|
||||
/// A feature enabling another feature.
|
||||
|
@ -396,7 +396,7 @@ impl<'gctx> Workspace<'gctx> {
|
||||
.unwrap_or(&self.current_manifest)
|
||||
}
|
||||
|
||||
/// Returns the root Package or VirtualManifest.
|
||||
/// Returns the root Package or `VirtualManifest`.
|
||||
pub fn root_maybe(&self) -> &MaybePackage {
|
||||
self.packages.get(self.root_manifest())
|
||||
}
|
||||
|
@ -265,7 +265,7 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<(
|
||||
pub struct DepOp {
|
||||
/// Describes the crate
|
||||
pub crate_spec: Option<String>,
|
||||
/// Dependency key, overriding the package name in crate_spec
|
||||
/// Dependency key, overriding the package name in `crate_spec`
|
||||
pub rename: Option<String>,
|
||||
|
||||
/// Feature flags to activate
|
||||
|
@ -747,7 +747,7 @@ fn traverse_and_share(
|
||||
new_unit
|
||||
}
|
||||
|
||||
/// Removes duplicate CompileMode::Doc units that would cause problems with
|
||||
/// Removes duplicate `CompileMode::Doc` units that would cause problems with
|
||||
/// filename collisions.
|
||||
///
|
||||
/// Rustdoc only separates units by crate name in the file directory
|
||||
|
@ -572,7 +572,7 @@ pub fn init(opts: &NewOptions, gctx: &GlobalContext) -> CargoResult<NewProjectKi
|
||||
Ok(kind)
|
||||
}
|
||||
|
||||
/// IgnoreList
|
||||
/// `IgnoreList`
|
||||
struct IgnoreList {
|
||||
/// git like formatted entries
|
||||
ignore: Vec<String>,
|
||||
@ -613,7 +613,7 @@ impl IgnoreList {
|
||||
ignore_items.join("\n") + "\n"
|
||||
}
|
||||
|
||||
/// format_existing is used to format the IgnoreList when the ignore file
|
||||
/// `format_existing` is used to format the `IgnoreList` when the ignore file
|
||||
/// already exists. It reads the contents of the given `BufRead` and
|
||||
/// checks if the contents of the ignore list are already existing in the
|
||||
/// file.
|
||||
|
@ -380,7 +380,7 @@ fn upgrade_dependency(
|
||||
/// cargo-edit. Returns true if any file has changed.
|
||||
///
|
||||
/// Some of the checks here are duplicating checks already done in
|
||||
/// upgrade_manifests/upgrade_dependency. Why? Let's say upgrade_dependency has
|
||||
/// `upgrade_manifests/upgrade_dependency`. Why? Let's say `upgrade_dependency` has
|
||||
/// found that dependency foo was eligible for an upgrade. But foo can occur in
|
||||
/// multiple manifest files, and even multiple times in the same manifest file,
|
||||
/// and may be pinned, renamed, etc. in some of the instances. So we still need
|
||||
|
@ -98,7 +98,7 @@ pub struct CrateListingV1 {
|
||||
}
|
||||
|
||||
impl InstallTracker {
|
||||
/// Create an InstallTracker from information on disk.
|
||||
/// Create an `InstallTracker` from information on disk.
|
||||
pub fn load(gctx: &GlobalContext, root: &Filesystem) -> CargoResult<InstallTracker> {
|
||||
let v1_lock =
|
||||
root.open_rw_exclusive_create(Path::new(".crates.toml"), gctx, "crate metadata")?;
|
||||
@ -153,7 +153,7 @@ impl InstallTracker {
|
||||
/// Returns a tuple `(freshness, map)`. `freshness` indicates if the
|
||||
/// package should be built (`Dirty`) or if it is already up-to-date
|
||||
/// (`Fresh`) and should be skipped. The map maps binary names to the
|
||||
/// PackageId that installed it (which is None if not known).
|
||||
/// `PackageId` that installed it (which is `None` if not known).
|
||||
///
|
||||
/// If there are no duplicates, then it will be considered `Dirty` (i.e.,
|
||||
/// it is OK to build/install).
|
||||
@ -250,7 +250,7 @@ impl InstallTracker {
|
||||
/// Check if any executables are already installed.
|
||||
///
|
||||
/// Returns a map of duplicates, the key is the executable name and the
|
||||
/// value is the PackageId that is already installed. The PackageId is
|
||||
/// value is the `PackageId` that is already installed. The `PackageId` is
|
||||
/// None if it is an untracked executable.
|
||||
fn find_duplicates(
|
||||
&self,
|
||||
@ -762,7 +762,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to convert features to a BTreeSet.
|
||||
/// Helper to convert features to a `BTreeSet`.
|
||||
fn feature_set(features: &Rc<BTreeSet<FeatureValue>>) -> BTreeSet<String> {
|
||||
features.iter().map(|s| s.to_string()).collect()
|
||||
}
|
||||
|
@ -176,7 +176,7 @@ fn registry<'gctx>(
|
||||
))
|
||||
}
|
||||
|
||||
/// Gets the SourceId for an index or registry setting.
|
||||
/// Gets the `SourceId` for an index or registry setting.
|
||||
///
|
||||
/// The `index` and `reg` values are from the command-line or config settings.
|
||||
/// If both are None, and no source-replacement is configured, returns the source for crates.io.
|
||||
@ -314,7 +314,7 @@ pub(crate) struct RegistrySourceIds {
|
||||
/// Use when interacting with the source (querying / publishing , etc)
|
||||
///
|
||||
/// The source for crates.io may be replaced by a built-in source for accessing crates.io with
|
||||
/// the sparse protocol, or a source for the testing framework (when the replace_crates_io
|
||||
/// the sparse protocol, or a source for the testing framework (when the `replace_crates_io`
|
||||
/// function is used)
|
||||
///
|
||||
/// User-defined source replacement is not applied.
|
||||
|
@ -74,9 +74,9 @@ pub struct Graph<'a> {
|
||||
/// For example `--features foo` will mark the "foo" node here.
|
||||
cli_features: HashSet<usize>,
|
||||
/// Map of dependency names, used for building internal feature map for
|
||||
/// dep_name/feat_name syntax.
|
||||
/// `dep_name/feat_name` syntax.
|
||||
///
|
||||
/// Key is the index of a package node, value is a map of dep_name to a
|
||||
/// Key is the index of a package node, value is a map of `dep_name` to a
|
||||
/// set of `(pkg_node_index, is_optional)`.
|
||||
dep_name_map: HashMap<usize, HashMap<InternedString, HashSet<(usize, bool)>>>,
|
||||
}
|
||||
@ -126,7 +126,7 @@ impl<'a> Graph<'a> {
|
||||
&self.nodes[index]
|
||||
}
|
||||
|
||||
/// Given a slice of PackageIds, returns the indexes of all nodes that match.
|
||||
/// Given a slice of `PackageIds`, returns the indexes of all nodes that match.
|
||||
pub fn indexes_from_ids(&self, package_ids: &[PackageId]) -> Vec<usize> {
|
||||
let mut result: Vec<(&Node, usize)> = self
|
||||
.nodes
|
||||
|
@ -7,9 +7,9 @@
|
||||
//! messages, guiding them to understand the issue and how to resolve it.
|
||||
//!
|
||||
//! Note that there are a lot of limitations here. This reads OpenSSH
|
||||
//! known_hosts files from well-known locations, but it does not read OpenSSH
|
||||
//! `known_hosts` files from well-known locations, but it does not read OpenSSH
|
||||
//! config files. The config file can change the behavior of how OpenSSH
|
||||
//! handles known_hosts files. For example, some things we don't handle:
|
||||
//! handles `known_hosts` files. For example, some things we don't handle:
|
||||
//!
|
||||
//! - `GlobalKnownHostsFile` — Changes the location of the global host file.
|
||||
//! - `UserKnownHostsFile` — Changes the location of the user's host file.
|
||||
@ -516,7 +516,7 @@ fn known_host_files(gctx: &GlobalContext) -> Vec<PathBuf> {
|
||||
result
|
||||
}
|
||||
|
||||
/// The location of the user's known_hosts file.
|
||||
/// The location of the user's `known_hosts` file.
|
||||
fn user_known_host_location() -> Option<PathBuf> {
|
||||
// NOTE: This is a potentially inaccurate prediction of what the user
|
||||
// actually wants. The actual location depends on several factors:
|
||||
@ -619,7 +619,7 @@ fn hashed_hostname_matches(host: &str, hashed: &str) -> bool {
|
||||
hashed_host == &result[..]
|
||||
}
|
||||
|
||||
/// Loads an OpenSSH known_hosts file.
|
||||
/// Loads an OpenSSH `known_hosts` file.
|
||||
fn load_hostfile(path: &Path) -> Result<Vec<KnownHost>, anyhow::Error> {
|
||||
let contents = cargo_util::paths::read(path)?;
|
||||
Ok(load_hostfile_contents(path, &contents))
|
||||
|
@ -1,4 +1,4 @@
|
||||
//! See [GitSource].
|
||||
//! See [`GitSource`].
|
||||
|
||||
use crate::core::global_cache_tracker;
|
||||
use crate::core::GitReference;
|
||||
|
@ -531,7 +531,7 @@ impl Summaries {
|
||||
///
|
||||
/// * `root` --- this is the root argument passed to `load`
|
||||
/// * `name` --- the name of the package.
|
||||
/// * `source_id` --- the registry's SourceId used when parsing JSON blobs
|
||||
/// * `source_id` --- the registry's `SourceId` used when parsing JSON blobs
|
||||
/// to create summaries.
|
||||
/// * `load` --- the actual index implementation which may be very slow to
|
||||
/// call. We avoid this if we can.
|
||||
|
@ -422,7 +422,7 @@ pub trait RegistryData {
|
||||
/// Returns the [`Path`] to the [`Filesystem`].
|
||||
fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path;
|
||||
|
||||
/// Block until all outstanding Poll::Pending requests are Poll::Ready.
|
||||
/// Block until all outstanding `Poll::Pending` requests are `Poll::Ready`.
|
||||
fn block_until_ready(&mut self) -> CargoResult<()>;
|
||||
}
|
||||
|
||||
|
@ -573,7 +573,7 @@ fn credential_action(
|
||||
|
||||
/// Returns the token to use for the given registry.
|
||||
/// If a `login_url` is provided and a token is not available, the
|
||||
/// login_url will be included in the returned error.
|
||||
/// `login_url` will be included in the returned error.
|
||||
pub fn auth_token(
|
||||
gctx: &GlobalContext,
|
||||
sid: &SourceId,
|
||||
|
@ -62,7 +62,7 @@ pub trait CommandExt: Sized {
|
||||
)
|
||||
}
|
||||
|
||||
/// Variant of arg_package_spec that does not include the `--all` flag
|
||||
/// Variant of `arg_package_spec` that does not include the `--all` flag
|
||||
/// (but does include `--workspace`). Used to avoid confusion with
|
||||
/// historical uses of `--all`.
|
||||
fn arg_package_spec_no_all(
|
||||
|
@ -16,9 +16,9 @@ pub(super) struct Deserializer<'gctx> {
|
||||
pub(super) key: ConfigKey,
|
||||
/// Whether or not this key part is allowed to be an inner table. For
|
||||
/// example, `profile.dev.build-override` needs to check if
|
||||
/// CARGO_PROFILE_DEV_BUILD_OVERRIDE_ prefixes exist. But
|
||||
/// CARGO_BUILD_TARGET should not check for prefixes because it would
|
||||
/// collide with CARGO_BUILD_TARGET_DIR. See `ConfigMapAccess` for
|
||||
/// `CARGO_PROFILE_DEV_BUILD_OVERRIDE_` prefixes exist. But
|
||||
/// `CARGO_BUILD_TARGET` should not check for prefixes because it would
|
||||
/// collide with `CARGO_BUILD_TARGET_DIR`. See `ConfigMapAccess` for
|
||||
/// details.
|
||||
pub(super) env_prefix_ok: bool,
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ use super::auth::RegistryConfig;
|
||||
// Helper macro for creating typed access methods.
|
||||
macro_rules! get_value_typed {
|
||||
($name:ident, $ty:ty, $variant:ident, $expected:expr) => {
|
||||
/// Low-level private method for getting a config value as an OptValue.
|
||||
/// Low-level private method for getting a config value as an [`OptValue`].
|
||||
fn $name(&self, key: &ConfigKey) -> Result<OptValue<$ty>, ConfigError> {
|
||||
let cv = self.get_cv(key)?;
|
||||
let env = self.get_config_env::<$ty>(key)?;
|
||||
@ -244,7 +244,7 @@ pub struct GlobalContext {
|
||||
/// NOTE: this should be set before `configure()`. If calling this from an integration test,
|
||||
/// consider using `ConfigBuilder::enable_nightly_features` instead.
|
||||
pub nightly_features_allowed: bool,
|
||||
/// WorkspaceRootConfigs that have been found
|
||||
/// `WorkspaceRootConfigs` that have been found
|
||||
pub ws_roots: RefCell<HashMap<PathBuf, WorkspaceRootConfig>>,
|
||||
/// The global cache tracker is a database used to track disk cache usage.
|
||||
global_cache_tracker: LazyCell<RefCell<GlobalCacheTracker>>,
|
||||
@ -909,7 +909,7 @@ impl GlobalContext {
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper for StringList type to get something that is a string or list.
|
||||
/// Helper for `StringList` type to get something that is a string or list.
|
||||
fn get_list_or_string(
|
||||
&self,
|
||||
key: &ConfigKey,
|
||||
@ -1888,7 +1888,7 @@ impl GlobalContext {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns a list of [target.'cfg()'] tables.
|
||||
/// Returns a list of [target.'`cfg()`'] tables.
|
||||
///
|
||||
/// The list is sorted by the table name.
|
||||
pub fn target_cfgs(&self) -> CargoResult<&Vec<(String, TargetCfgConfig)>> {
|
||||
@ -2916,7 +2916,7 @@ impl StringList {
|
||||
}
|
||||
}
|
||||
|
||||
/// StringList automatically merges config values with environment values,
|
||||
/// `StringList` automatically merges config values with environment values,
|
||||
/// this instead follows the precedence rules, so that eg. a string list found
|
||||
/// in the environment will be used instead of one in a config file.
|
||||
///
|
||||
|
@ -82,7 +82,7 @@ impl<'de> serde::Deserialize<'de> for PathAndArgs {
|
||||
}
|
||||
|
||||
impl PathAndArgs {
|
||||
/// Construct a PathAndArgs from a string. The string will be split on ascii whitespace,
|
||||
/// Construct a `PathAndArgs` from a string. The string will be split on ascii whitespace,
|
||||
/// with the first item being treated as a `ConfigRelativePath` to the executable, and subsequent
|
||||
/// items as arguments.
|
||||
pub fn from_whitespace_separated_string(p: &Value<String>) -> PathAndArgs {
|
||||
|
@ -25,7 +25,7 @@ pub struct DependencyQueue<N: Hash + Eq, E: Hash + Eq, V> {
|
||||
/// package.
|
||||
///
|
||||
/// This map is statically known and does not get updated throughout the
|
||||
/// lifecycle of the DependencyQueue.
|
||||
/// lifecycle of the `DependencyQueue`.
|
||||
///
|
||||
/// This is sort of like a `HashMap<(N, E), HashSet<N>>` map, but more
|
||||
/// easily indexable with just an `N`
|
||||
@ -113,7 +113,7 @@ impl<N: Hash + Eq + Clone, E: Eq + Hash + Clone, V> DependencyQueue<N, E, V> {
|
||||
|
||||
/// Creates a flattened reverse dependency list. For a given key, finds the
|
||||
/// set of nodes which depend on it, including transitively. This is different
|
||||
/// from self.reverse_dep_map because self.reverse_dep_map only maps one level
|
||||
/// from `self.reverse_dep_map` because `self.reverse_dep_map` only maps one level
|
||||
/// of reverse dependencies.
|
||||
fn depth<'a, N: Hash + Eq + Clone, E: Hash + Eq + Clone>(
|
||||
key: &N,
|
||||
|
@ -26,9 +26,9 @@ pub struct Rustc {
|
||||
pub workspace_wrapper: Option<PathBuf>,
|
||||
/// Verbose version information (the output of `rustc -vV`)
|
||||
pub verbose_version: String,
|
||||
/// The rustc version (`1.23.4-beta.2`), this comes from verbose_version.
|
||||
/// The rustc version (`1.23.4-beta.2`), this comes from `verbose_version`.
|
||||
pub version: semver::Version,
|
||||
/// The host triple (arch-platform-OS), this comes from verbose_version.
|
||||
/// The host triple (arch-platform-OS), this comes from `verbose_version`.
|
||||
pub host: InternedString,
|
||||
/// The rustc full commit hash, this comes from `verbose_version`.
|
||||
pub commit_hash: Option<String>,
|
||||
|
@ -130,7 +130,7 @@ impl OptVersionReq {
|
||||
}
|
||||
|
||||
/// Allows to match pre-release in SemVer-Compatible way.
|
||||
/// See [`semver_eval_ext`] for matches_prerelease semantics.
|
||||
/// See [`semver_eval_ext`] for `matches_prerelease` semantics.
|
||||
pub fn matches_prerelease(&self, version: &Version) -> bool {
|
||||
if let OptVersionReq::Req(req) = self {
|
||||
return req.matches_prerelease(version);
|
||||
|
@ -176,7 +176,7 @@ impl Dependency {
|
||||
self.public
|
||||
}
|
||||
|
||||
/// Get the SourceID for this dependency.
|
||||
/// Get the `SourceID` for this dependency.
|
||||
pub fn source_id(&self, gctx: &GlobalContext) -> CargoResult<MaybeWorkspace<SourceId>> {
|
||||
match &self.source.as_ref() {
|
||||
Some(Source::Registry(_)) | None => {
|
||||
@ -903,7 +903,7 @@ impl PathSource {
|
||||
self
|
||||
}
|
||||
|
||||
/// Get the SourceID for this dependency.
|
||||
/// Get the `SourceID` for this dependency.
|
||||
pub fn source_id(&self) -> CargoResult<SourceId> {
|
||||
SourceId::for_path(&self.path)
|
||||
}
|
||||
@ -967,7 +967,7 @@ impl GitSource {
|
||||
self
|
||||
}
|
||||
|
||||
/// Get the SourceID for this dependency.
|
||||
/// Get the `SourceID` for this dependency.
|
||||
pub fn source_id(&self) -> CargoResult<SourceId> {
|
||||
let git_url = self.git.parse::<url::Url>()?;
|
||||
let git_ref = self.git_ref();
|
||||
|
@ -1,4 +1,4 @@
|
||||
//! Tests for cfg() expressions.
|
||||
//! Tests for `cfg()` expressions.
|
||||
|
||||
use cargo_test_support::prelude::*;
|
||||
use cargo_test_support::registry::Package;
|
||||
|
@ -1,5 +1,5 @@
|
||||
//! Tests for when multiple artifacts have the same output filename.
|
||||
//! See https://github.com/rust-lang/cargo/issues/6313 for more details.
|
||||
//! See <https://github.com/rust-lang/cargo/issues/6313> for more details.
|
||||
//! Ideally these should never happen, but I don't think we'll ever be able to
|
||||
//! prevent all collisions.
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
//!
|
||||
//! These tests use a replacement of rustc ("rustc-fix-shim") which emits JSON
|
||||
//! messages based on what the test is exercising. It uses an environment
|
||||
//! variable RUSTC_FIX_SHIM_SEQUENCE which determines how it should behave
|
||||
//! variable `RUSTC_FIX_SHIM_SEQUENCE` which determines how it should behave
|
||||
//! based on how many times `rustc` has run. It keeps track of how many times
|
||||
//! rustc has run in a local file.
|
||||
//!
|
||||
|
@ -93,7 +93,7 @@ fn now() -> SystemTime {
|
||||
}
|
||||
|
||||
/// Helper for simulating running cargo in the past. Use with the
|
||||
/// __CARGO_TEST_LAST_USE_NOW environment variable.
|
||||
/// `__CARGO_TEST_LAST_USE_NOW` environment variable.
|
||||
fn days_ago_unix(n: u64) -> String {
|
||||
days_ago(n)
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
@ -103,7 +103,7 @@ fn days_ago_unix(n: u64) -> String {
|
||||
}
|
||||
|
||||
/// Helper for simulating running cargo in the past. Use with the
|
||||
/// __CARGO_TEST_LAST_USE_NOW environment variable.
|
||||
/// `__CARGO_TEST_LAST_USE_NOW` environment variable.
|
||||
fn months_ago_unix(n: u64) -> String {
|
||||
days_ago_unix(n * 30)
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Network tests for https transport.
|
||||
//!
|
||||
//! Note that these tests will generally require setting CARGO_CONTAINER_TESTS
|
||||
//! or CARGO_PUBLIC_NETWORK_TESTS.
|
||||
//! Note that these tests will generally require setting `CARGO_CONTAINER_TESTS`
|
||||
//! or `CARGO_PUBLIC_NETWORK_TESTS`.
|
||||
|
||||
use cargo_test_support::containers::Container;
|
||||
use cargo_test_support::prelude::*;
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Network tests for SSH connections.
|
||||
//!
|
||||
//! Note that these tests will generally require setting CARGO_CONTAINER_TESTS
|
||||
//! or CARGO_PUBLIC_NETWORK_TESTS.
|
||||
//! Note that these tests will generally require setting `CARGO_CONTAINER_TESTS`
|
||||
//! or `CARGO_PUBLIC_NETWORK_TESTS`.
|
||||
//!
|
||||
//! NOTE: The container tests almost certainly won't work on Windows.
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user