mirror of
https://github.com/rust-lang/cargo.git
synced 2025-10-01 11:30:39 +00:00
Auto merge of #7820 - ehuss:features2-split, r=alexcrichton
Add new feature resolver. This adds a new resolver which handles feature unification independently of the main resolver. This can be enabled with the `-Zfeatures` flag which takes a comma-separated list of options to enable new behaviors. See `unstable.md` docs for details. There are two significant behavior changes: 1. Ignore targets that are not enabled. 2. Do not unify features between build_deps, dev_deps, and normal deps. The "forks" in the unit graph are handled by adding `DepKind` to `UnitFor`. The feature resolver tracks features independently for the different dependency kinds. Unfortunately this currently does not support decoupling proc_macro dependencies. This is because at resolve time it does not know which dependencies are proc_macros. Moving feature resolution to after the packages are downloaded would require massive changes, and would make the unit computation much more complex. Nobody to my knowledge has requested this capability, presumably because proc_macros are relatively new, and they tend not to have very many dependencies, and those dependencies tend to be proc-macro specific (like syn/quote). I'd like to lean towards adding proc-macro to the index so that it can be known during resolve time, which would be much easier to implement, but with the downside of needing to add a new field to the index. I did not update `cargo metadata`, yet. It's not really clear how it should behave. I think I'll need to investigate how people are currently using the feature information and figure out how it should work. Perhaps adding features to "dep_kinds" will be the solution, but I'm not sure. The goal is to try to gather feedback about how well this new resolver works. There are two important things to check: whether it breaks a project, and how much does it increases compile time (since packages can be built multiple times with different features). I'd like to stabilize it one piece at a time assuming the disruption is not too great. If a project breaks or builds slower, the user can implement a backwards-compatible workaround of sprinkling additional features into `Cargo.toml` dependencies. I think `itarget` is a good candidate to try to stabilize first, since it is less likely to break things or change how things are built. If it does cause too much disruption, then I think we'll need to consider making it optional, enabled *somehow*. There is an environment variable that can be set which forces Cargo to use the new feature resolver. This can be used in Cargo's own testsuite to explore which tests behave differently with the different features set.
This commit is contained in:
commit
d6fa260d1a
@ -1,19 +1,17 @@
|
||||
use crate::core::compiler::unit::UnitInterner;
|
||||
use crate::core::compiler::CompileTarget;
|
||||
use crate::core::compiler::{BuildConfig, BuildOutput, CompileKind, Unit};
|
||||
use crate::core::profiles::Profiles;
|
||||
use crate::core::{Dependency, InternedString, Workspace};
|
||||
use crate::core::{InternedString, Workspace};
|
||||
use crate::core::{PackageId, PackageSet};
|
||||
use crate::util::config::{Config, TargetConfig};
|
||||
use crate::util::config::Config;
|
||||
use crate::util::errors::CargoResult;
|
||||
use crate::util::Rustc;
|
||||
use cargo_platform::Cfg;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::str;
|
||||
|
||||
mod target_info;
|
||||
pub use self::target_info::{FileFlavor, TargetInfo};
|
||||
pub use self::target_info::{FileFlavor, RustcTargetData, TargetInfo};
|
||||
|
||||
/// The build context, containing all information about a build task.
|
||||
///
|
||||
@ -30,26 +28,14 @@ pub struct BuildContext<'a, 'cfg> {
|
||||
pub build_config: &'a BuildConfig,
|
||||
/// Extra compiler args for either `rustc` or `rustdoc`.
|
||||
pub extra_compiler_args: HashMap<Unit<'a>, Vec<String>>,
|
||||
/// Package downloader.
|
||||
pub packages: &'a PackageSet<'cfg>,
|
||||
|
||||
/// Source of interning new units as they're created.
|
||||
pub units: &'a UnitInterner<'a>,
|
||||
|
||||
/// Information about the compiler that we've detected on the local system.
|
||||
pub rustc: Rustc,
|
||||
|
||||
/// Build information for the "host", which is information about when
|
||||
/// `rustc` is invoked without a `--target` flag. This is used for
|
||||
/// procedural macros, build scripts, etc.
|
||||
host_config: TargetConfig,
|
||||
host_info: TargetInfo,
|
||||
|
||||
/// Build information for targets that we're building for. This will be
|
||||
/// empty if the `--target` flag is not passed, and currently also only ever
|
||||
/// has at most one entry, but eventually we'd like to support multi-target
|
||||
/// builds with Cargo.
|
||||
target_config: HashMap<CompileTarget, TargetConfig>,
|
||||
target_info: HashMap<CompileTarget, TargetInfo>,
|
||||
/// Information about rustc and the target platform.
|
||||
pub target_data: RustcTargetData,
|
||||
}
|
||||
|
||||
impl<'a, 'cfg> BuildContext<'a, 'cfg> {
|
||||
@ -61,74 +47,33 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
|
||||
profiles: Profiles,
|
||||
units: &'a UnitInterner<'a>,
|
||||
extra_compiler_args: HashMap<Unit<'a>, Vec<String>>,
|
||||
target_data: RustcTargetData,
|
||||
) -> CargoResult<BuildContext<'a, 'cfg>> {
|
||||
let rustc = config.load_global_rustc(Some(ws))?;
|
||||
|
||||
let host_config = config.target_cfg_triple(&rustc.host)?;
|
||||
let host_info = TargetInfo::new(
|
||||
config,
|
||||
build_config.requested_kind,
|
||||
&rustc,
|
||||
CompileKind::Host,
|
||||
)?;
|
||||
let mut target_config = HashMap::new();
|
||||
let mut target_info = HashMap::new();
|
||||
if let CompileKind::Target(target) = build_config.requested_kind {
|
||||
let tcfg = config.target_cfg_triple(target.short_name())?;
|
||||
target_config.insert(target, tcfg);
|
||||
target_info.insert(
|
||||
target,
|
||||
TargetInfo::new(
|
||||
config,
|
||||
build_config.requested_kind,
|
||||
&rustc,
|
||||
CompileKind::Target(target),
|
||||
)?,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(BuildContext {
|
||||
ws,
|
||||
packages,
|
||||
config,
|
||||
rustc,
|
||||
target_config,
|
||||
target_info,
|
||||
host_config,
|
||||
host_info,
|
||||
build_config,
|
||||
profiles,
|
||||
extra_compiler_args,
|
||||
units,
|
||||
target_data,
|
||||
})
|
||||
}
|
||||
|
||||
/// Whether a dependency should be compiled for the host or target platform,
|
||||
/// specified by `CompileKind`.
|
||||
pub fn dep_platform_activated(&self, dep: &Dependency, kind: CompileKind) -> bool {
|
||||
// If this dependency is only available for certain platforms,
|
||||
// make sure we're only enabling it for that platform.
|
||||
let platform = match dep.platform() {
|
||||
Some(p) => p,
|
||||
None => return true,
|
||||
};
|
||||
let name = kind.short_name(self);
|
||||
platform.matches(name, self.cfg(kind))
|
||||
pub fn rustc(&self) -> &Rustc {
|
||||
&self.target_data.rustc
|
||||
}
|
||||
|
||||
/// Gets the user-specified linker for a particular host or target.
|
||||
pub fn linker(&self, kind: CompileKind) -> Option<PathBuf> {
|
||||
self.target_config(kind)
|
||||
self.target_data
|
||||
.target_config(kind)
|
||||
.linker
|
||||
.as_ref()
|
||||
.map(|l| l.val.clone().resolve_program(self.config))
|
||||
}
|
||||
|
||||
/// Gets the list of `cfg`s printed out from the compiler for the specified kind.
|
||||
pub fn cfg(&self, kind: CompileKind) -> &[Cfg] {
|
||||
self.info(kind).cfg()
|
||||
}
|
||||
|
||||
/// Gets the host architecture triple.
|
||||
///
|
||||
/// For example, x86_64-unknown-linux-gnu, would be
|
||||
@ -136,15 +81,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
|
||||
/// - hardware-platform: unknown,
|
||||
/// - operating system: linux-gnu.
|
||||
pub fn host_triple(&self) -> InternedString {
|
||||
self.rustc.host
|
||||
}
|
||||
|
||||
/// Gets the target configuration for a particular host or target.
|
||||
pub fn target_config(&self, kind: CompileKind) -> &TargetConfig {
|
||||
match kind {
|
||||
CompileKind::Host => &self.host_config,
|
||||
CompileKind::Target(s) => &self.target_config[&s],
|
||||
}
|
||||
self.target_data.rustc.host
|
||||
}
|
||||
|
||||
/// Gets the number of jobs specified for this build.
|
||||
@ -153,24 +90,17 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
|
||||
}
|
||||
|
||||
pub fn rustflags_args(&self, unit: &Unit<'_>) -> &[String] {
|
||||
&self.info(unit.kind).rustflags
|
||||
&self.target_data.info(unit.kind).rustflags
|
||||
}
|
||||
|
||||
pub fn rustdocflags_args(&self, unit: &Unit<'_>) -> &[String] {
|
||||
&self.info(unit.kind).rustdocflags
|
||||
&self.target_data.info(unit.kind).rustdocflags
|
||||
}
|
||||
|
||||
pub fn show_warnings(&self, pkg: PackageId) -> bool {
|
||||
pkg.source_id().is_path() || self.config.extra_verbose()
|
||||
}
|
||||
|
||||
pub fn info(&self, kind: CompileKind) -> &TargetInfo {
|
||||
match kind {
|
||||
CompileKind::Host => &self.host_info,
|
||||
CompileKind::Target(s) => &self.target_info[&s],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extra_args_for(&self, unit: &Unit<'a>) -> Option<&Vec<String>> {
|
||||
self.extra_compiler_args.get(unit)
|
||||
}
|
||||
@ -180,6 +110,9 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
|
||||
/// `lib_name` is the `links` library name and `kind` is whether it is for
|
||||
/// Host or Target.
|
||||
pub fn script_override(&self, lib_name: &str, kind: CompileKind) -> Option<&BuildOutput> {
|
||||
self.target_config(kind).links_overrides.get(lib_name)
|
||||
self.target_data
|
||||
.target_config(kind)
|
||||
.links_overrides
|
||||
.get(lib_name)
|
||||
}
|
||||
}
|
||||
|
@ -1,18 +1,18 @@
|
||||
use crate::core::compiler::CompileKind;
|
||||
use crate::core::compiler::CompileTarget;
|
||||
use crate::core::{Dependency, TargetKind, Workspace};
|
||||
use crate::util::config::{Config, StringList, TargetConfig};
|
||||
use crate::util::{CargoResult, CargoResultExt, ProcessBuilder, Rustc};
|
||||
use cargo_platform::{Cfg, CfgExpr};
|
||||
use std::cell::RefCell;
|
||||
use std::collections::hash_map::{Entry, HashMap};
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::str::{self, FromStr};
|
||||
|
||||
use crate::core::compiler::CompileKind;
|
||||
use crate::core::TargetKind;
|
||||
use crate::util::config::StringList;
|
||||
use crate::util::{CargoResult, CargoResultExt, Config, ProcessBuilder, Rustc};
|
||||
use cargo_platform::{Cfg, CfgExpr};
|
||||
|
||||
/// Information about the platform target gleaned from querying rustc.
|
||||
///
|
||||
/// The `BuildContext` keeps two of these, one for the host and one for the
|
||||
/// `RustcTargetData` keeps two of these, one for the host and one for the
|
||||
/// target. If no target is specified, it uses a clone from the host.
|
||||
#[derive(Clone)]
|
||||
pub struct TargetInfo {
|
||||
@ -468,3 +468,91 @@ fn env_args(
|
||||
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
/// Collection of information about `rustc` and the host and target.
|
||||
pub struct RustcTargetData {
|
||||
/// Information about `rustc` itself.
|
||||
pub rustc: Rustc,
|
||||
/// Build information for the "host", which is information about when
|
||||
/// `rustc` is invoked without a `--target` flag. This is used for
|
||||
/// procedural macros, build scripts, etc.
|
||||
host_config: TargetConfig,
|
||||
host_info: TargetInfo,
|
||||
|
||||
/// Build information for targets that we're building for. This will be
|
||||
/// empty if the `--target` flag is not passed, and currently also only ever
|
||||
/// has at most one entry, but eventually we'd like to support multi-target
|
||||
/// builds with Cargo.
|
||||
target_config: HashMap<CompileTarget, TargetConfig>,
|
||||
target_info: HashMap<CompileTarget, TargetInfo>,
|
||||
}
|
||||
|
||||
impl RustcTargetData {
|
||||
pub fn new(ws: &Workspace<'_>, requested_kind: CompileKind) -> CargoResult<RustcTargetData> {
|
||||
let config = ws.config();
|
||||
let rustc = config.load_global_rustc(Some(ws))?;
|
||||
let host_config = config.target_cfg_triple(&rustc.host)?;
|
||||
let host_info = TargetInfo::new(config, requested_kind, &rustc, CompileKind::Host)?;
|
||||
let mut target_config = HashMap::new();
|
||||
let mut target_info = HashMap::new();
|
||||
if let CompileKind::Target(target) = requested_kind {
|
||||
let tcfg = config.target_cfg_triple(target.short_name())?;
|
||||
target_config.insert(target, tcfg);
|
||||
target_info.insert(
|
||||
target,
|
||||
TargetInfo::new(config, requested_kind, &rustc, CompileKind::Target(target))?,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(RustcTargetData {
|
||||
rustc,
|
||||
target_config,
|
||||
target_info,
|
||||
host_config,
|
||||
host_info,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a "short" name for the given kind, suitable for keying off
|
||||
/// configuration in Cargo or presenting to users.
|
||||
pub fn short_name<'a>(&'a self, kind: &'a CompileKind) -> &'a str {
|
||||
match kind {
|
||||
CompileKind::Host => &self.rustc.host,
|
||||
CompileKind::Target(target) => target.short_name(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether a dependency should be compiled for the host or target platform,
|
||||
/// specified by `CompileKind`.
|
||||
pub fn dep_platform_activated(&self, dep: &Dependency, kind: CompileKind) -> bool {
|
||||
// If this dependency is only available for certain platforms,
|
||||
// make sure we're only enabling it for that platform.
|
||||
let platform = match dep.platform() {
|
||||
Some(p) => p,
|
||||
None => return true,
|
||||
};
|
||||
let name = self.short_name(&kind);
|
||||
platform.matches(name, self.cfg(kind))
|
||||
}
|
||||
|
||||
/// Gets the list of `cfg`s printed out from the compiler for the specified kind.
|
||||
pub fn cfg(&self, kind: CompileKind) -> &[Cfg] {
|
||||
self.info(kind).cfg()
|
||||
}
|
||||
|
||||
/// Information about the given target platform, learned by querying rustc.
|
||||
pub fn info(&self, kind: CompileKind) -> &TargetInfo {
|
||||
match kind {
|
||||
CompileKind::Host => &self.host_info,
|
||||
CompileKind::Target(s) => &self.target_info[&s],
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the target configuration for a particular host or target.
|
||||
pub fn target_config(&self, kind: CompileKind) -> &TargetConfig {
|
||||
match kind {
|
||||
CompileKind::Host => &self.host_config,
|
||||
CompileKind::Target(s) => &self.target_config[&s],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ impl<'cfg> Compilation<'cfg> {
|
||||
bcx: &BuildContext<'a, 'cfg>,
|
||||
default_kind: CompileKind,
|
||||
) -> CargoResult<Compilation<'cfg>> {
|
||||
let mut rustc = bcx.rustc.process();
|
||||
let mut rustc = bcx.rustc().process();
|
||||
|
||||
let mut primary_unit_rustc_process = bcx.build_config.primary_unit_rustc.clone();
|
||||
|
||||
@ -102,8 +102,16 @@ impl<'cfg> Compilation<'cfg> {
|
||||
root_output: PathBuf::from("/"),
|
||||
deps_output: PathBuf::from("/"),
|
||||
host_deps_output: PathBuf::from("/"),
|
||||
host_dylib_path: bcx.info(CompileKind::Host).sysroot_host_libdir.clone(),
|
||||
target_dylib_path: bcx.info(default_kind).sysroot_target_libdir.clone(),
|
||||
host_dylib_path: bcx
|
||||
.target_data
|
||||
.info(CompileKind::Host)
|
||||
.sysroot_host_libdir
|
||||
.clone(),
|
||||
target_dylib_path: bcx
|
||||
.target_data
|
||||
.info(default_kind)
|
||||
.sysroot_target_libdir
|
||||
.clone(),
|
||||
tests: Vec::new(),
|
||||
binaries: Vec::new(),
|
||||
extra_env: HashMap::new(),
|
||||
@ -114,7 +122,7 @@ impl<'cfg> Compilation<'cfg> {
|
||||
rustc_process: rustc,
|
||||
primary_unit_rustc_process,
|
||||
host: bcx.host_triple().to_string(),
|
||||
target: default_kind.short_name(bcx).to_string(),
|
||||
target: bcx.target_data.short_name(&default_kind).to_string(),
|
||||
target_runner: target_runner(bcx, default_kind)?,
|
||||
})
|
||||
}
|
||||
@ -286,7 +294,7 @@ fn target_runner(
|
||||
bcx: &BuildContext<'_, '_>,
|
||||
kind: CompileKind,
|
||||
) -> CargoResult<Option<(PathBuf, Vec<String>)>> {
|
||||
let target = kind.short_name(bcx);
|
||||
let target = bcx.target_data.short_name(&kind);
|
||||
|
||||
// try target.{}.runner
|
||||
let key = format!("target.{}.runner", target);
|
||||
@ -296,7 +304,7 @@ fn target_runner(
|
||||
}
|
||||
|
||||
// try target.'cfg(...)'.runner
|
||||
let target_cfg = bcx.info(kind).cfg();
|
||||
let target_cfg = bcx.target_data.info(kind).cfg();
|
||||
let mut cfgs = bcx
|
||||
.config
|
||||
.target_cfgs()?
|
||||
|
@ -1,4 +1,3 @@
|
||||
use crate::core::compiler::BuildContext;
|
||||
use crate::core::{InternedString, Target};
|
||||
use crate::util::errors::{CargoResult, CargoResultExt};
|
||||
use serde::Serialize;
|
||||
@ -40,15 +39,6 @@ impl CompileKind {
|
||||
CompileKind::Target(n) => CompileKind::Target(n),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a "short" name for this kind, suitable for keying off
|
||||
/// configuration in Cargo or presenting to users.
|
||||
pub fn short_name(&self, bcx: &BuildContext<'_, '_>) -> &str {
|
||||
match self {
|
||||
CompileKind::Host => bcx.host_triple().as_str(),
|
||||
CompileKind::Target(target) => target.short_name(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Abstraction for the representation of a compilation target that Cargo has.
|
||||
|
@ -147,7 +147,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
|
||||
/// Returns `None` if the unit should not use a metadata data hash (like
|
||||
/// rustdoc, or some dylibs).
|
||||
pub fn metadata(&self, unit: &Unit<'a>) -> Option<Metadata> {
|
||||
self.metas[unit].clone()
|
||||
self.metas[unit]
|
||||
}
|
||||
|
||||
/// Gets the short hash based only on the `PackageId`.
|
||||
@ -259,13 +259,13 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
|
||||
) -> CargoResult<PathBuf> {
|
||||
assert!(target.is_bin());
|
||||
let dest = self.layout(kind).dest();
|
||||
let info = bcx.info(kind);
|
||||
let info = bcx.target_data.info(kind);
|
||||
let file_types = info
|
||||
.file_types(
|
||||
"bin",
|
||||
FileFlavor::Normal,
|
||||
&TargetKind::Bin,
|
||||
kind.short_name(bcx),
|
||||
bcx.target_data.short_name(&kind),
|
||||
)?
|
||||
.expect("target must support `bin`");
|
||||
|
||||
@ -402,7 +402,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
|
||||
|
||||
let out_dir = self.out_dir(unit);
|
||||
let link_stem = self.link_stem(unit);
|
||||
let info = bcx.info(unit.kind);
|
||||
let info = bcx.target_data.info(unit.kind);
|
||||
let file_stem = self.file_stem(unit);
|
||||
|
||||
let mut add = |crate_type: &str, flavor: FileFlavor| -> CargoResult<()> {
|
||||
@ -415,7 +415,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
|
||||
crate_type,
|
||||
flavor,
|
||||
unit.target.kind(),
|
||||
unit.kind.short_name(bcx),
|
||||
bcx.target_data.short_name(&unit.kind),
|
||||
)?;
|
||||
|
||||
match file_types {
|
||||
@ -489,14 +489,14 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
|
||||
does not support these crate types",
|
||||
unsupported.join(", "),
|
||||
unit.pkg,
|
||||
unit.kind.short_name(bcx),
|
||||
bcx.target_data.short_name(&unit.kind),
|
||||
)
|
||||
}
|
||||
anyhow::bail!(
|
||||
"cannot compile `{}` as the target `{}` does not \
|
||||
support any of the output crate types",
|
||||
unit.pkg,
|
||||
unit.kind.short_name(bcx),
|
||||
bcx.target_data.short_name(&unit.kind),
|
||||
);
|
||||
}
|
||||
Ok(ret)
|
||||
@ -515,7 +515,7 @@ fn metadata_of<'a, 'cfg>(
|
||||
metadata_of(&dep.unit, cx, metas);
|
||||
}
|
||||
}
|
||||
metas[unit].clone()
|
||||
metas[unit]
|
||||
}
|
||||
|
||||
fn compute_metadata<'a, 'cfg>(
|
||||
@ -551,11 +551,12 @@ fn compute_metadata<'a, 'cfg>(
|
||||
// doing this eventually.
|
||||
let bcx = &cx.bcx;
|
||||
let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA");
|
||||
let short_name = bcx.target_data.short_name(&unit.kind);
|
||||
if !(unit.mode.is_any_test() || unit.mode.is_check())
|
||||
&& (unit.target.is_dylib()
|
||||
|| unit.target.is_cdylib()
|
||||
|| (unit.target.is_executable() && unit.kind.short_name(bcx).starts_with("wasm32-"))
|
||||
|| (unit.target.is_executable() && unit.kind.short_name(bcx).contains("msvc")))
|
||||
|| (unit.target.is_executable() && short_name.starts_with("wasm32-"))
|
||||
|| (unit.target.is_executable() && short_name.contains("msvc")))
|
||||
&& unit.pkg.package_id().source_id().is_path()
|
||||
&& __cargo_default_lib_metadata.is_err()
|
||||
{
|
||||
@ -609,7 +610,7 @@ fn compute_metadata<'a, 'cfg>(
|
||||
unit.target.name().hash(&mut hasher);
|
||||
unit.target.kind().hash(&mut hasher);
|
||||
|
||||
bcx.rustc.verbose_version.hash(&mut hasher);
|
||||
bcx.rustc().verbose_version.hash(&mut hasher);
|
||||
|
||||
if cx.is_primary_package(unit) {
|
||||
// This is primarily here for clippy. This ensures that the clippy
|
||||
|
@ -450,7 +450,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
||||
suggestion,
|
||||
crate::version(),
|
||||
self.bcx.host_triple(),
|
||||
unit.kind.short_name(self.bcx),
|
||||
self.bcx.target_data.short_name(&unit.kind),
|
||||
unit,
|
||||
other_unit))
|
||||
}
|
||||
|
@ -180,7 +180,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
||||
cmd.env("OUT_DIR", &script_out_dir)
|
||||
.env("CARGO_MANIFEST_DIR", unit.pkg.root())
|
||||
.env("NUM_JOBS", &bcx.jobs().to_string())
|
||||
.env("TARGET", unit.kind.short_name(bcx))
|
||||
.env("TARGET", bcx.target_data.short_name(&unit.kind))
|
||||
.env("DEBUG", debug.to_string())
|
||||
.env("OPT_LEVEL", &unit.profile.opt_level.to_string())
|
||||
.env(
|
||||
@ -191,11 +191,11 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
||||
},
|
||||
)
|
||||
.env("HOST", &bcx.host_triple())
|
||||
.env("RUSTC", &bcx.rustc.path)
|
||||
.env("RUSTC", &bcx.rustc().path)
|
||||
.env("RUSTDOC", &*bcx.config.rustdoc()?)
|
||||
.inherit_jobserver(&cx.jobserver);
|
||||
|
||||
if let Some(linker) = &bcx.target_config(unit.kind).linker {
|
||||
if let Some(linker) = &bcx.target_data.target_config(unit.kind).linker {
|
||||
cmd.env(
|
||||
"RUSTC_LINKER",
|
||||
linker.val.clone().resolve_program(bcx.config),
|
||||
@ -213,7 +213,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
|
||||
}
|
||||
|
||||
let mut cfg_map = HashMap::new();
|
||||
for cfg in bcx.cfg(unit.kind) {
|
||||
for cfg in bcx.target_data.cfg(unit.kind) {
|
||||
match *cfg {
|
||||
Cfg::Name(ref n) => {
|
||||
cfg_map.insert(n.clone(), None);
|
||||
|
@ -1122,7 +1122,7 @@ fn calculate_normal<'a, 'cfg>(
|
||||
let m = unit.pkg.manifest().metadata();
|
||||
let metadata = util::hash_u64((&m.authors, &m.description, &m.homepage, &m.repository));
|
||||
Ok(Fingerprint {
|
||||
rustc: util::hash_u64(&cx.bcx.rustc.verbose_version),
|
||||
rustc: util::hash_u64(&cx.bcx.rustc().verbose_version),
|
||||
target: util::hash_u64(&unit.target),
|
||||
profile: profile_hash,
|
||||
// Note that .0 is hashed here, not .1 which is the cwd. That doesn't
|
||||
@ -1180,7 +1180,7 @@ fn calculate_run_custom_build<'a, 'cfg>(
|
||||
|
||||
Ok(Fingerprint {
|
||||
local: Mutex::new(local),
|
||||
rustc: util::hash_u64(&cx.bcx.rustc.verbose_version),
|
||||
rustc: util::hash_u64(&cx.bcx.rustc().verbose_version),
|
||||
deps,
|
||||
outputs: if overridden { Vec::new() } else { vec![output] },
|
||||
|
||||
|
@ -28,7 +28,7 @@ use lazycell::LazyCell;
|
||||
use log::debug;
|
||||
|
||||
pub use self::build_config::{BuildConfig, CompileMode, MessageFormat};
|
||||
pub use self::build_context::{BuildContext, FileFlavor, TargetInfo};
|
||||
pub use self::build_context::{BuildContext, FileFlavor, RustcTargetData, TargetInfo};
|
||||
use self::build_plan::BuildPlan;
|
||||
pub use self::compilation::{Compilation, Doctest};
|
||||
pub use self::compile_kind::{CompileKind, CompileTarget};
|
||||
|
@ -1,8 +1,9 @@
|
||||
//! Code for building the standard library.
|
||||
|
||||
use crate::core::compiler::{BuildContext, CompileKind, CompileMode, Unit};
|
||||
use crate::core::compiler::{BuildContext, CompileKind, CompileMode, RustcTargetData, Unit};
|
||||
use crate::core::profiles::UnitFor;
|
||||
use crate::core::resolver::ResolveOpts;
|
||||
use crate::core::resolver::features::{FeaturesFor, ResolvedFeatures};
|
||||
use crate::core::resolver::{HasDevUnits, ResolveOpts};
|
||||
use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace};
|
||||
use crate::ops::{self, Packages};
|
||||
use crate::util::errors::CargoResult;
|
||||
@ -31,8 +32,10 @@ pub fn parse_unstable_flag(value: Option<&str>) -> Vec<String> {
|
||||
/// Resolve the standard library dependencies.
|
||||
pub fn resolve_std<'cfg>(
|
||||
ws: &Workspace<'cfg>,
|
||||
target_data: &RustcTargetData,
|
||||
requested_target: CompileKind,
|
||||
crates: &[String],
|
||||
) -> CargoResult<(PackageSet<'cfg>, Resolve)> {
|
||||
) -> CargoResult<(PackageSet<'cfg>, Resolve, ResolvedFeatures)> {
|
||||
let src_path = detect_sysroot_src_path(ws)?;
|
||||
let to_patch = [
|
||||
"rustc-std-workspace-core",
|
||||
@ -99,8 +102,19 @@ pub fn resolve_std<'cfg>(
|
||||
/*dev_deps*/ false, &features, /*all_features*/ false,
|
||||
/*uses_default_features*/ true,
|
||||
);
|
||||
let resolve = ops::resolve_ws_with_opts(&std_ws, opts, &specs)?;
|
||||
Ok((resolve.pkg_set, resolve.targeted_resolve))
|
||||
let resolve = ops::resolve_ws_with_opts(
|
||||
&std_ws,
|
||||
target_data,
|
||||
requested_target,
|
||||
&opts,
|
||||
&specs,
|
||||
HasDevUnits::No,
|
||||
)?;
|
||||
Ok((
|
||||
resolve.pkg_set,
|
||||
resolve.targeted_resolve,
|
||||
resolve.resolved_features,
|
||||
))
|
||||
}
|
||||
|
||||
/// Generate a list of root `Unit`s for the standard library.
|
||||
@ -110,6 +124,7 @@ pub fn generate_std_roots<'a>(
|
||||
bcx: &BuildContext<'a, '_>,
|
||||
crates: &[String],
|
||||
std_resolve: &'a Resolve,
|
||||
std_features: &ResolvedFeatures,
|
||||
kind: CompileKind,
|
||||
) -> CargoResult<Vec<Unit<'a>>> {
|
||||
// Generate the root Units for the standard library.
|
||||
@ -139,7 +154,8 @@ pub fn generate_std_roots<'a>(
|
||||
unit_for,
|
||||
mode,
|
||||
);
|
||||
let features = std_resolve.features_sorted(pkg.package_id());
|
||||
let features =
|
||||
std_features.activated_features(pkg.package_id(), FeaturesFor::NormalOrDev);
|
||||
Ok(bcx.units.intern(
|
||||
pkg, lib, profile, kind, mode, features, /*is_std*/ true,
|
||||
))
|
||||
|
@ -605,15 +605,17 @@ fn d_as_f64(d: Duration) -> f64 {
|
||||
|
||||
fn render_rustc_info(bcx: &BuildContext<'_, '_>) -> String {
|
||||
let version = bcx
|
||||
.rustc
|
||||
.rustc()
|
||||
.verbose_version
|
||||
.lines()
|
||||
.next()
|
||||
.expect("rustc version");
|
||||
let requested_target = bcx.build_config.requested_kind.short_name(bcx);
|
||||
let requested_target = bcx.target_data.short_name(&bcx.build_config.requested_kind);
|
||||
format!(
|
||||
"{}<br>Host: {}<br>Target: {}",
|
||||
version, bcx.rustc.host, requested_target
|
||||
version,
|
||||
bcx.rustc().host,
|
||||
requested_target
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::core::compiler::{CompileKind, CompileMode};
|
||||
use crate::core::{profiles::Profile, Package, Target};
|
||||
use crate::core::{profiles::Profile, InternedString, Package, Target};
|
||||
use crate::util::hex::short_hash;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashSet;
|
||||
@ -50,7 +50,7 @@ pub struct UnitInner<'a> {
|
||||
pub mode: CompileMode,
|
||||
/// The `cfg` features to enable for this unit.
|
||||
/// This must be sorted.
|
||||
pub features: Vec<&'a str>,
|
||||
pub features: Vec<InternedString>,
|
||||
/// Whether this is a standard library unit.
|
||||
pub is_std: bool,
|
||||
}
|
||||
@ -145,7 +145,7 @@ impl<'a> UnitInterner<'a> {
|
||||
profile: Profile,
|
||||
kind: CompileKind,
|
||||
mode: CompileMode,
|
||||
features: Vec<&'a str>,
|
||||
features: Vec<InternedString>,
|
||||
is_std: bool,
|
||||
) -> Unit<'a> {
|
||||
let inner = self.intern_inner(&UnitInner {
|
||||
|
@ -20,6 +20,7 @@ use crate::core::compiler::{BuildContext, CompileKind, CompileMode};
|
||||
use crate::core::dependency::DepKind;
|
||||
use crate::core::package::Downloads;
|
||||
use crate::core::profiles::{Profile, UnitFor};
|
||||
use crate::core::resolver::features::{FeaturesFor, ResolvedFeatures};
|
||||
use crate::core::resolver::Resolve;
|
||||
use crate::core::{InternedString, Package, PackageId, Target};
|
||||
use crate::CargoResult;
|
||||
@ -53,7 +54,9 @@ struct State<'a, 'cfg> {
|
||||
unit_dependencies: UnitGraph<'a>,
|
||||
package_cache: HashMap<PackageId, &'a Package>,
|
||||
usr_resolve: &'a Resolve,
|
||||
usr_features: &'a ResolvedFeatures,
|
||||
std_resolve: Option<&'a Resolve>,
|
||||
std_features: Option<&'a ResolvedFeatures>,
|
||||
/// This flag is `true` while generating the dependencies for the standard
|
||||
/// library.
|
||||
is_std: bool,
|
||||
@ -62,10 +65,15 @@ struct State<'a, 'cfg> {
|
||||
pub fn build_unit_dependencies<'a, 'cfg>(
|
||||
bcx: &'a BuildContext<'a, 'cfg>,
|
||||
resolve: &'a Resolve,
|
||||
std_resolve: Option<&'a Resolve>,
|
||||
features: &'a ResolvedFeatures,
|
||||
std_resolve: Option<&'a (Resolve, ResolvedFeatures)>,
|
||||
roots: &[Unit<'a>],
|
||||
std_roots: &[Unit<'a>],
|
||||
) -> CargoResult<UnitGraph<'a>> {
|
||||
let (std_resolve, std_features) = match std_resolve {
|
||||
Some((r, f)) => (Some(r), Some(f)),
|
||||
None => (None, None),
|
||||
};
|
||||
let mut state = State {
|
||||
bcx,
|
||||
downloads: bcx.packages.enable_download()?,
|
||||
@ -73,7 +81,9 @@ pub fn build_unit_dependencies<'a, 'cfg>(
|
||||
unit_dependencies: HashMap::new(),
|
||||
package_cache: HashMap::new(),
|
||||
usr_resolve: resolve,
|
||||
usr_features: features,
|
||||
std_resolve,
|
||||
std_features,
|
||||
is_std: false,
|
||||
};
|
||||
|
||||
@ -168,7 +178,7 @@ fn deps_of_roots<'a, 'cfg>(roots: &[Unit<'a>], mut state: &mut State<'a, 'cfg>)
|
||||
} else if unit.target.is_custom_build() {
|
||||
// This normally doesn't happen, except `clean` aggressively
|
||||
// generates all units.
|
||||
UnitFor::new_build()
|
||||
UnitFor::new_build(false)
|
||||
} else if unit.target.for_host() {
|
||||
// Proc macro / plugin should never have panic set.
|
||||
UnitFor::new_compiler()
|
||||
@ -220,7 +230,7 @@ fn compute_deps<'a, 'cfg>(
|
||||
unit_for: UnitFor,
|
||||
) -> CargoResult<Vec<UnitDep<'a>>> {
|
||||
if unit.mode.is_run_custom_build() {
|
||||
return compute_deps_custom_build(unit, state);
|
||||
return compute_deps_custom_build(unit, unit_for, state);
|
||||
} else if unit.mode.is_doc() {
|
||||
// Note: this does not include doc test.
|
||||
return compute_deps_doc(unit, state);
|
||||
@ -228,7 +238,7 @@ fn compute_deps<'a, 'cfg>(
|
||||
|
||||
let bcx = state.bcx;
|
||||
let id = unit.pkg.package_id();
|
||||
let deps = state.resolve().deps(id).filter(|&(_id, deps)| {
|
||||
let filtered_deps = state.resolve().deps(id).filter(|&(_id, deps)| {
|
||||
assert!(!deps.is_empty());
|
||||
deps.iter().any(|dep| {
|
||||
// If this target is a build command, then we only want build
|
||||
@ -250,18 +260,34 @@ fn compute_deps<'a, 'cfg>(
|
||||
|
||||
// If this dependency is only available for certain platforms,
|
||||
// make sure we're only enabling it for that platform.
|
||||
if !bcx.dep_platform_activated(dep, unit.kind) {
|
||||
if !bcx.target_data.dep_platform_activated(dep, unit.kind) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If this is an optional dependency, and the new feature resolver
|
||||
// did not enable it, don't include it.
|
||||
if dep.is_optional() {
|
||||
let features_for = match unit_for.is_for_build_dep() {
|
||||
true => FeaturesFor::BuildDep,
|
||||
false => FeaturesFor::NormalOrDev,
|
||||
};
|
||||
|
||||
let feats = state.activated_features(id, features_for);
|
||||
if !feats.contains(&dep.name_in_toml()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// If we've gotten past all that, then this dependency is
|
||||
// actually used!
|
||||
true
|
||||
})
|
||||
});
|
||||
// Separate line to avoid rustfmt indentation. Must collect due to `state` capture.
|
||||
let filtered_deps: Vec<_> = filtered_deps.collect();
|
||||
|
||||
let mut ret = Vec::new();
|
||||
for (id, _) in deps {
|
||||
for (id, _) in filtered_deps {
|
||||
let pkg = match state.get(id)? {
|
||||
Some(pkg) => pkg,
|
||||
None => continue,
|
||||
@ -271,7 +297,10 @@ fn compute_deps<'a, 'cfg>(
|
||||
None => continue,
|
||||
};
|
||||
let mode = check_or_build_mode(unit.mode, lib);
|
||||
let dep_unit_for = unit_for.with_for_host(lib.for_host());
|
||||
let dep_unit_for = unit_for
|
||||
.with_for_host(lib.for_host())
|
||||
// If it is a custom build script, then it *only* has build dependencies.
|
||||
.with_build_dep(unit.target.is_custom_build());
|
||||
|
||||
if bcx.config.cli_unstable().dual_proc_macros && lib.proc_macro() && !unit.kind.is_host() {
|
||||
let unit_dep = new_unit_dep(state, unit, pkg, lib, dep_unit_for, unit.kind, mode)?;
|
||||
@ -299,7 +328,7 @@ fn compute_deps<'a, 'cfg>(
|
||||
if unit.target.is_custom_build() {
|
||||
return Ok(ret);
|
||||
}
|
||||
ret.extend(dep_build_script(unit, state)?);
|
||||
ret.extend(dep_build_script(unit, unit_for, state)?);
|
||||
|
||||
// If this target is a binary, test, example, etc, then it depends on
|
||||
// the library of the same package. The call to `resolve.deps` above
|
||||
@ -326,7 +355,7 @@ fn compute_deps<'a, 'cfg>(
|
||||
t.is_bin() &&
|
||||
// Skip binaries with required features that have not been selected.
|
||||
t.required_features().unwrap_or(&no_required_features).iter().all(|f| {
|
||||
unit.features.contains(&f.as_str())
|
||||
unit.features.contains(&InternedString::new(f.as_str()))
|
||||
})
|
||||
})
|
||||
.map(|t| {
|
||||
@ -353,6 +382,7 @@ fn compute_deps<'a, 'cfg>(
|
||||
/// the returned set of units must all be run before `unit` is run.
|
||||
fn compute_deps_custom_build<'a, 'cfg>(
|
||||
unit: &Unit<'a>,
|
||||
unit_for: UnitFor,
|
||||
state: &mut State<'a, 'cfg>,
|
||||
) -> CargoResult<Vec<UnitDep<'a>>> {
|
||||
if let Some(links) = unit.pkg.manifest().links() {
|
||||
@ -361,6 +391,9 @@ fn compute_deps_custom_build<'a, 'cfg>(
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
}
|
||||
// All dependencies of this unit should use profiles for custom
|
||||
// builds.
|
||||
let script_unit_for = UnitFor::new_build(unit_for.is_for_build_dep());
|
||||
// When not overridden, then the dependencies to run a build script are:
|
||||
//
|
||||
// 1. Compiling the build script itself.
|
||||
@ -375,9 +408,7 @@ fn compute_deps_custom_build<'a, 'cfg>(
|
||||
unit,
|
||||
unit.pkg,
|
||||
unit.target,
|
||||
// All dependencies of this unit should use profiles for custom
|
||||
// builds.
|
||||
UnitFor::new_build(),
|
||||
script_unit_for,
|
||||
// Build scripts always compiled for the host.
|
||||
CompileKind::Host,
|
||||
CompileMode::Build,
|
||||
@ -396,7 +427,7 @@ fn compute_deps_doc<'a, 'cfg>(
|
||||
.deps(unit.pkg.package_id())
|
||||
.filter(|&(_id, deps)| {
|
||||
deps.iter().any(|dep| match dep.kind() {
|
||||
DepKind::Normal => bcx.dep_platform_activated(dep, unit.kind),
|
||||
DepKind::Normal => bcx.target_data.dep_platform_activated(dep, unit.kind),
|
||||
_ => false,
|
||||
})
|
||||
});
|
||||
@ -444,7 +475,7 @@ fn compute_deps_doc<'a, 'cfg>(
|
||||
}
|
||||
|
||||
// Be sure to build/run the build script for documented libraries.
|
||||
ret.extend(dep_build_script(unit, state)?);
|
||||
ret.extend(dep_build_script(unit, UnitFor::new_normal(), state)?);
|
||||
|
||||
// If we document a binary/example, we need the library available.
|
||||
if unit.target.is_bin() || unit.target.is_example() {
|
||||
@ -486,6 +517,7 @@ fn maybe_lib<'a>(
|
||||
/// build script.
|
||||
fn dep_build_script<'a>(
|
||||
unit: &Unit<'a>,
|
||||
unit_for: UnitFor,
|
||||
state: &State<'a, '_>,
|
||||
) -> CargoResult<Option<UnitDep<'a>>> {
|
||||
unit.pkg
|
||||
@ -499,12 +531,38 @@ fn dep_build_script<'a>(
|
||||
.bcx
|
||||
.profiles
|
||||
.get_profile_run_custom_build(&unit.profile);
|
||||
// UnitFor::new_build is used because we want the `host` flag set
|
||||
// for all of our build dependencies (so they all get
|
||||
// build-override profiles), including compiling the build.rs
|
||||
// script itself.
|
||||
//
|
||||
// If `is_for_build_dep` here is `false`, that means we are a
|
||||
// build.rs script for a normal dependency and we want to set the
|
||||
// CARGO_FEATURE_* environment variables to the features as a
|
||||
// normal dep.
|
||||
//
|
||||
// If `is_for_build_dep` here is `true`, that means that this
|
||||
// package is being used as a build dependency, and so we only
|
||||
// want to set CARGO_FEATURE_* variables for the build-dependency
|
||||
// side of the graph.
|
||||
//
|
||||
// Keep in mind that the RunCustomBuild unit and the Compile
|
||||
// build.rs unit use the same features. This is because some
|
||||
// people use `cfg!` and `#[cfg]` expressions to check for enabled
|
||||
// features instead of just checking `CARGO_FEATURE_*` at runtime.
|
||||
// In the case with `-Zfeatures=build_dep`, and a shared
|
||||
// dependency has different features enabled for normal vs. build,
|
||||
// then the build.rs script will get compiled twice. I believe it
|
||||
// is not feasible to only build it once because it would break a
|
||||
// large number of scripts (they would think they have the wrong
|
||||
// set of features enabled).
|
||||
let script_unit_for = UnitFor::new_build(unit_for.is_for_build_dep());
|
||||
new_unit_dep_with_profile(
|
||||
state,
|
||||
unit,
|
||||
unit.pkg,
|
||||
t,
|
||||
UnitFor::new_build(),
|
||||
script_unit_for,
|
||||
unit.kind,
|
||||
CompileMode::RunCustomBuild,
|
||||
profile,
|
||||
@ -569,7 +627,11 @@ fn new_unit_dep_with_profile<'a>(
|
||||
let public = state
|
||||
.resolve()
|
||||
.is_public_dep(parent.pkg.package_id(), pkg.package_id());
|
||||
let features = state.resolve().features_sorted(pkg.package_id());
|
||||
let features_for = match unit_for.is_for_build_dep() {
|
||||
true => FeaturesFor::BuildDep,
|
||||
false => FeaturesFor::NormalOrDev,
|
||||
};
|
||||
let features = state.activated_features(pkg.package_id(), features_for);
|
||||
let unit = state
|
||||
.bcx
|
||||
.units
|
||||
@ -674,6 +736,19 @@ impl<'a, 'cfg> State<'a, 'cfg> {
|
||||
}
|
||||
}
|
||||
|
||||
fn activated_features(
|
||||
&self,
|
||||
pkg_id: PackageId,
|
||||
features_for: FeaturesFor,
|
||||
) -> Vec<InternedString> {
|
||||
let features = if self.is_std {
|
||||
self.std_features.unwrap()
|
||||
} else {
|
||||
self.usr_features
|
||||
};
|
||||
features.activated_features(pkg_id, features_for)
|
||||
}
|
||||
|
||||
fn get(&mut self, id: PackageId) -> CargoResult<Option<&'a Package>> {
|
||||
if let Some(pkg) = self.package_cache.get(&id) {
|
||||
return Ok(Some(pkg));
|
||||
|
@ -341,6 +341,7 @@ pub struct CliUnstable {
|
||||
pub doctest_xcompile: bool,
|
||||
pub panic_abort_tests: bool,
|
||||
pub jobserver_per_rustc: bool,
|
||||
pub features: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl CliUnstable {
|
||||
@ -380,6 +381,13 @@ impl CliUnstable {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_features(value: Option<&str>) -> Vec<String> {
|
||||
match value {
|
||||
None => Vec::new(),
|
||||
Some(v) => v.split(',').map(|s| s.to_string()).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
// Asserts that there is no argument to the flag.
|
||||
fn parse_empty(key: &str, value: Option<&str>) -> CargoResult<bool> {
|
||||
if let Some(v) = value {
|
||||
@ -409,6 +417,7 @@ impl CliUnstable {
|
||||
"doctest-xcompile" => self.doctest_xcompile = parse_empty(k, v)?,
|
||||
"panic-abort-tests" => self.panic_abort_tests = parse_empty(k, v)?,
|
||||
"jobserver-per-rustc" => self.jobserver_per_rustc = parse_empty(k, v)?,
|
||||
"features" => self.features = Some(parse_features(v)),
|
||||
_ => bail!("unknown `-Z` flag specified: {}", k),
|
||||
}
|
||||
|
||||
|
@ -483,7 +483,7 @@ fn merge_toml_overrides(
|
||||
profile: &mut Profile,
|
||||
toml: &TomlProfile,
|
||||
) {
|
||||
if unit_for.is_build() {
|
||||
if unit_for.is_for_host() {
|
||||
if let Some(ref build_override) = toml.build_override {
|
||||
merge_profile(profile, build_override);
|
||||
}
|
||||
@ -766,7 +766,49 @@ pub struct UnitFor {
|
||||
/// A target for `build.rs` or any of its dependencies, or a proc-macro or
|
||||
/// any of its dependencies. This enables `build-override` profiles for
|
||||
/// these targets.
|
||||
build: bool,
|
||||
///
|
||||
/// An invariant is that if `build_dep` is true, `host` must be true.
|
||||
///
|
||||
/// Note that this is `true` for `RunCustomBuild` units, even though that
|
||||
/// unit should *not* use build-override profiles. This is a bit of a
|
||||
/// special case. When computing the `RunCustomBuild` unit, it manually
|
||||
/// uses the `get_profile_run_custom_build` method to get the correct
|
||||
/// profile information for the unit. `host` needs to be true so that all
|
||||
/// of the dependencies of that `RunCustomBuild` unit have this flag be
|
||||
/// sticky (and forced to `true` for all further dependencies) — which is
|
||||
/// the whole point of `UnitFor`.
|
||||
host: bool,
|
||||
/// A target for a build dependency (or any of its dependencies). This is
|
||||
/// used for computing features of build dependencies independently of
|
||||
/// other dependency kinds.
|
||||
///
|
||||
/// The subtle difference between this and `host` is that the build script
|
||||
/// for a non-host package sets this to `false` because it wants the
|
||||
/// features of the non-host package (whereas `host` is true because the
|
||||
/// build script is being built for the host). `build_dep` becomes `true`
|
||||
/// for build-dependencies, or any of their dependencies. For example, with
|
||||
/// this dependency tree:
|
||||
///
|
||||
/// ```text
|
||||
/// foo
|
||||
/// ├── foo build.rs
|
||||
/// │ └── shared_dep (BUILD dependency)
|
||||
/// │ └── shared_dep build.rs
|
||||
/// └── shared_dep (Normal dependency)
|
||||
/// └── shared_dep build.rs
|
||||
/// ```
|
||||
///
|
||||
/// In this example, `foo build.rs` is HOST=true, BUILD_DEP=false. This is
|
||||
/// so that `foo build.rs` gets the profile settings for build scripts
|
||||
/// (HOST=true) and features of foo (BUILD_DEP=false) because build scripts
|
||||
/// need to know which features their package is being built with.
|
||||
///
|
||||
/// But in the case of `shared_dep`, when built as a build dependency,
|
||||
/// both flags are true (it only wants the build-dependency features).
|
||||
/// When `shared_dep` is built as a normal dependency, then `shared_dep
|
||||
/// build.rs` is HOST=true, BUILD_DEP=false for the same reasons that
|
||||
/// foo's build script is set that way.
|
||||
build_dep: bool,
|
||||
/// How Cargo processes the `panic` setting or profiles. This is done to
|
||||
/// handle test/benches inheriting from dev/release, as well as forcing
|
||||
/// `for_host` units to always unwind.
|
||||
@ -793,15 +835,22 @@ impl UnitFor {
|
||||
/// proc macro/plugin, or test/bench).
|
||||
pub fn new_normal() -> UnitFor {
|
||||
UnitFor {
|
||||
build: false,
|
||||
host: false,
|
||||
build_dep: false,
|
||||
panic_setting: PanicSetting::ReadProfile,
|
||||
}
|
||||
}
|
||||
|
||||
/// A unit for a custom build script or its dependencies.
|
||||
pub fn new_build() -> UnitFor {
|
||||
///
|
||||
/// The `build_dep` parameter is whether or not this is for a build
|
||||
/// dependency. Build scripts for non-host units should use `false`
|
||||
/// because they want to use the features of the package they are running
|
||||
/// for.
|
||||
pub fn new_build(build_dep: bool) -> UnitFor {
|
||||
UnitFor {
|
||||
build: true,
|
||||
host: true,
|
||||
build_dep,
|
||||
// Force build scripts to always use `panic=unwind` for now to
|
||||
// maximally share dependencies with procedural macros.
|
||||
panic_setting: PanicSetting::AlwaysUnwind,
|
||||
@ -811,7 +860,8 @@ impl UnitFor {
|
||||
/// A unit for a proc macro or compiler plugin or their dependencies.
|
||||
pub fn new_compiler() -> UnitFor {
|
||||
UnitFor {
|
||||
build: false,
|
||||
host: false,
|
||||
build_dep: false,
|
||||
// Force plugins to use `panic=abort` so panics in the compiler do
|
||||
// not abort the process but instead end with a reasonable error
|
||||
// message that involves catching the panic in the compiler.
|
||||
@ -827,7 +877,8 @@ impl UnitFor {
|
||||
/// compiler flag.
|
||||
pub fn new_test(config: &Config) -> UnitFor {
|
||||
UnitFor {
|
||||
build: false,
|
||||
host: false,
|
||||
build_dep: false,
|
||||
// We're testing out an unstable feature (`-Zpanic-abort-tests`)
|
||||
// which inherits the panic setting from the dev/release profile
|
||||
// (basically avoid recompiles) but historical defaults required
|
||||
@ -840,7 +891,7 @@ impl UnitFor {
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a variant based on `for_host` setting.
|
||||
/// Returns a new copy based on `for_host` setting.
|
||||
///
|
||||
/// When `for_host` is true, this clears `panic_abort_ok` in a sticky
|
||||
/// fashion so that all its dependencies also have `panic_abort_ok=false`.
|
||||
@ -849,7 +900,8 @@ impl UnitFor {
|
||||
/// graph where everything is `panic=unwind`.
|
||||
pub fn with_for_host(self, for_host: bool) -> UnitFor {
|
||||
UnitFor {
|
||||
build: self.build || for_host,
|
||||
host: self.host || for_host,
|
||||
build_dep: self.build_dep,
|
||||
panic_setting: if for_host {
|
||||
PanicSetting::AlwaysUnwind
|
||||
} else {
|
||||
@ -858,14 +910,30 @@ impl UnitFor {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if this unit is for a custom build script or one of its
|
||||
/// dependencies.
|
||||
pub fn is_build(self) -> bool {
|
||||
self.build
|
||||
/// Returns a new copy updating it for a build dependency.
|
||||
///
|
||||
/// This is part of the machinery responsible for handling feature
|
||||
/// decoupling for build dependencies in the new feature resolver.
|
||||
pub fn with_build_dep(mut self, build_dep: bool) -> UnitFor {
|
||||
if build_dep {
|
||||
assert!(self.host);
|
||||
}
|
||||
self.build_dep = self.build_dep || build_dep;
|
||||
self
|
||||
}
|
||||
|
||||
/// Returns `true` if this unit is for a build script or any of its
|
||||
/// dependencies, or a proc macro or any of its dependencies.
|
||||
pub fn is_for_host(&self) -> bool {
|
||||
self.host
|
||||
}
|
||||
|
||||
pub fn is_for_build_dep(&self) -> bool {
|
||||
self.build_dep
|
||||
}
|
||||
|
||||
/// Returns how `panic` settings should be handled for this profile
|
||||
fn panic_setting(self) -> PanicSetting {
|
||||
fn panic_setting(&self) -> PanicSetting {
|
||||
self.panic_setting
|
||||
}
|
||||
|
||||
@ -873,21 +941,37 @@ impl UnitFor {
|
||||
pub fn all_values() -> &'static [UnitFor] {
|
||||
static ALL: &[UnitFor] = &[
|
||||
UnitFor {
|
||||
build: false,
|
||||
host: false,
|
||||
build_dep: false,
|
||||
panic_setting: PanicSetting::ReadProfile,
|
||||
},
|
||||
UnitFor {
|
||||
build: true,
|
||||
host: true,
|
||||
build_dep: false,
|
||||
panic_setting: PanicSetting::AlwaysUnwind,
|
||||
},
|
||||
UnitFor {
|
||||
build: false,
|
||||
host: false,
|
||||
build_dep: false,
|
||||
panic_setting: PanicSetting::AlwaysUnwind,
|
||||
},
|
||||
UnitFor {
|
||||
build: false,
|
||||
host: false,
|
||||
build_dep: false,
|
||||
panic_setting: PanicSetting::Inherit,
|
||||
},
|
||||
// build_dep=true must always have host=true
|
||||
// `Inherit` is not used in build dependencies.
|
||||
UnitFor {
|
||||
host: true,
|
||||
build_dep: true,
|
||||
panic_setting: PanicSetting::ReadProfile,
|
||||
},
|
||||
UnitFor {
|
||||
host: true,
|
||||
build_dep: true,
|
||||
panic_setting: PanicSetting::AlwaysUnwind,
|
||||
},
|
||||
];
|
||||
ALL
|
||||
}
|
||||
|
@ -164,20 +164,21 @@ impl Context {
|
||||
}
|
||||
}
|
||||
debug!("checking if {} is already activated", summary.package_id());
|
||||
if opts.all_features {
|
||||
if opts.features.all_features {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let has_default_feature = summary.features().contains_key("default");
|
||||
Ok(match self.resolve_features.get(&id) {
|
||||
Some(prev) => {
|
||||
opts.features.is_subset(prev)
|
||||
&& (!opts.uses_default_features
|
||||
opts.features.features.is_subset(prev)
|
||||
&& (!opts.features.uses_default_features
|
||||
|| prev.contains("default")
|
||||
|| !has_default_feature)
|
||||
}
|
||||
None => {
|
||||
opts.features.is_empty() && (!opts.uses_default_features || !has_default_feature)
|
||||
opts.features.features.is_empty()
|
||||
&& (!opts.features.uses_default_features || !has_default_feature)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -340,7 +340,7 @@ fn build_requirements<'a, 'b: 'a>(
|
||||
) -> CargoResult<Requirements<'a>> {
|
||||
let mut reqs = Requirements::new(s);
|
||||
|
||||
if opts.all_features {
|
||||
if opts.features.all_features {
|
||||
for key in s.features().keys() {
|
||||
reqs.require_feature(*key)?;
|
||||
}
|
||||
@ -348,12 +348,12 @@ fn build_requirements<'a, 'b: 'a>(
|
||||
reqs.require_dependency(dep.name_in_toml());
|
||||
}
|
||||
} else {
|
||||
for &f in opts.features.iter() {
|
||||
for &f in opts.features.features.iter() {
|
||||
reqs.require_value(&FeatureValue::new(f, s))?;
|
||||
}
|
||||
}
|
||||
|
||||
if opts.uses_default_features && s.features().contains_key("default") {
|
||||
if opts.features.uses_default_features && s.features().contains_key("default") {
|
||||
reqs.require_feature(InternedString::new("default"))?;
|
||||
}
|
||||
|
||||
|
@ -365,6 +365,7 @@ impl EncodableResolve {
|
||||
metadata,
|
||||
unused_patches,
|
||||
version,
|
||||
HashMap::new(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
534
src/cargo/core/resolver/features.rs
Normal file
534
src/cargo/core/resolver/features.rs
Normal file
@ -0,0 +1,534 @@
|
||||
//! Feature resolver.
|
||||
//!
|
||||
//! This is a new feature resolver that runs independently of the main
|
||||
//! dependency resolver. It is intended to make it easier to experiment with
|
||||
//! new behaviors. When `-Zfeatures` is not used, it will fall back to using
|
||||
//! the original `Resolve` feature computation. With `-Zfeatures` enabled,
|
||||
//! this will walk the dependency graph and compute the features using a
|
||||
//! different algorithm. One of its key characteristics is that it can avoid
|
||||
//! unifying features for shared dependencies in some situations.
|
||||
//!
|
||||
//! The preferred way to engage this new resolver is via
|
||||
//! `resolve_ws_with_opts`.
|
||||
//!
|
||||
//! There are many assumptions made about the resolver itself. It assumes
|
||||
//! validation has already been done on the feature maps, and doesn't do any
|
||||
//! validation itself. It assumes dev-dependencies within a dependency have
|
||||
//! been removed.
|
||||
|
||||
use crate::core::compiler::{CompileKind, RustcTargetData};
|
||||
use crate::core::dependency::{DepKind, Dependency};
|
||||
use crate::core::resolver::types::FeaturesSet;
|
||||
use crate::core::resolver::Resolve;
|
||||
use crate::core::{FeatureValue, InternedString, PackageId, PackageIdSpec, Workspace};
|
||||
use crate::util::{CargoResult, Config};
|
||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||
use std::rc::Rc;
|
||||
|
||||
/// Map of activated features.
|
||||
///
|
||||
/// The key is `(PackageId, bool)` where the bool is `true` if these
|
||||
/// are features for a build dependency.
|
||||
type ActivateMap = HashMap<(PackageId, bool), BTreeSet<InternedString>>;
|
||||
|
||||
/// Set of all activated features for all packages in the resolve graph.
|
||||
pub struct ResolvedFeatures {
|
||||
activated_features: ActivateMap,
|
||||
/// This is only here for legacy support when `-Zfeatures` is not enabled.
|
||||
legacy: Option<HashMap<PackageId, Vec<InternedString>>>,
|
||||
opts: FeatureOpts,
|
||||
}
|
||||
|
||||
/// Options for how the feature resolver works.
|
||||
#[derive(Default)]
|
||||
struct FeatureOpts {
|
||||
/// -Zpackage-features, changes behavior of feature flags in a workspace.
|
||||
package_features: bool,
|
||||
/// -Zfeatures is enabled, use new resolver.
|
||||
new_resolver: bool,
|
||||
/// Build deps will not share share features with other dep kinds.
|
||||
decouple_build_deps: bool,
|
||||
/// Dev dep features will not be activated unless needed.
|
||||
decouple_dev_deps: bool,
|
||||
/// Targets that are not in use will not activate features.
|
||||
ignore_inactive_targets: bool,
|
||||
/// If enabled, compare against old resolver (for testing).
|
||||
compare: bool,
|
||||
}
|
||||
|
||||
/// Flag to indicate if Cargo is building *any* dev units (tests, examples, etc.).
|
||||
///
|
||||
/// This disables decoupling of dev dependencies. It may be possible to relax
|
||||
/// this in the future, but it will require significant changes to how unit
|
||||
/// dependencies are computed, and can result in longer build times with
|
||||
/// `cargo test` because the lib may need to be built 3 times instead of
|
||||
/// twice.
|
||||
pub enum HasDevUnits {
|
||||
Yes,
|
||||
No,
|
||||
}
|
||||
|
||||
/// Flag to indicate if features are requested for a build dependency or not.
|
||||
#[derive(PartialEq)]
|
||||
pub enum FeaturesFor {
|
||||
NormalOrDev,
|
||||
BuildDep,
|
||||
}
|
||||
|
||||
impl FeatureOpts {
|
||||
fn new(config: &Config, has_dev_units: HasDevUnits) -> CargoResult<FeatureOpts> {
|
||||
let mut opts = FeatureOpts::default();
|
||||
let unstable_flags = config.cli_unstable();
|
||||
opts.package_features = unstable_flags.package_features;
|
||||
let mut enable = |feat_opts: &Vec<String>| {
|
||||
opts.new_resolver = true;
|
||||
for opt in feat_opts {
|
||||
match opt.as_ref() {
|
||||
"build_dep" => opts.decouple_build_deps = true,
|
||||
"dev_dep" => opts.decouple_dev_deps = true,
|
||||
"itarget" => opts.ignore_inactive_targets = true,
|
||||
"all" => {
|
||||
opts.decouple_build_deps = true;
|
||||
opts.decouple_dev_deps = true;
|
||||
opts.ignore_inactive_targets = true;
|
||||
}
|
||||
"compare" => opts.compare = true,
|
||||
"ws" => unimplemented!(),
|
||||
"host" => unimplemented!(),
|
||||
s => anyhow::bail!("-Zfeatures flag `{}` is not supported", s),
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
};
|
||||
if let Some(feat_opts) = unstable_flags.features.as_ref() {
|
||||
enable(feat_opts)?;
|
||||
}
|
||||
// This env var is intended for testing only.
|
||||
if let Ok(env_opts) = std::env::var("__CARGO_FORCE_NEW_FEATURES") {
|
||||
if env_opts == "1" {
|
||||
opts.new_resolver = true;
|
||||
} else {
|
||||
let env_opts = env_opts.split(',').map(|s| s.to_string()).collect();
|
||||
enable(&env_opts)?;
|
||||
}
|
||||
}
|
||||
if let HasDevUnits::Yes = has_dev_units {
|
||||
opts.decouple_dev_deps = false;
|
||||
}
|
||||
Ok(opts)
|
||||
}
|
||||
}
|
||||
|
||||
/// Features flags requested for a package.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct RequestedFeatures {
|
||||
pub features: FeaturesSet,
|
||||
pub all_features: bool,
|
||||
pub uses_default_features: bool,
|
||||
}
|
||||
|
||||
impl RequestedFeatures {
|
||||
/// Creates a new RequestedFeatures from the given command-line flags.
|
||||
pub fn from_command_line(
|
||||
features: &[String],
|
||||
all_features: bool,
|
||||
uses_default_features: bool,
|
||||
) -> RequestedFeatures {
|
||||
RequestedFeatures {
|
||||
features: Rc::new(RequestedFeatures::split_features(features)),
|
||||
all_features,
|
||||
uses_default_features,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new RequestedFeatures with the given `all_features` setting.
|
||||
pub fn new_all(all_features: bool) -> RequestedFeatures {
|
||||
RequestedFeatures {
|
||||
features: Rc::new(BTreeSet::new()),
|
||||
all_features,
|
||||
uses_default_features: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn split_features(features: &[String]) -> BTreeSet<InternedString> {
|
||||
features
|
||||
.iter()
|
||||
.flat_map(|s| s.split_whitespace())
|
||||
.flat_map(|s| s.split(','))
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(InternedString::new)
|
||||
.collect::<BTreeSet<InternedString>>()
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolvedFeatures {
|
||||
/// Returns the list of features that are enabled for the given package.
|
||||
pub fn activated_features(
|
||||
&self,
|
||||
pkg_id: PackageId,
|
||||
features_for: FeaturesFor,
|
||||
) -> Vec<InternedString> {
|
||||
self.activated_features_int(pkg_id, features_for, true)
|
||||
}
|
||||
|
||||
/// Variant of `activated_features` that returns an empty Vec if this is
|
||||
/// not a valid pkg_id/is_build combination. Used by `cargo clean` which
|
||||
/// doesn't know the exact set.
|
||||
pub fn activated_features_unverified(
|
||||
&self,
|
||||
pkg_id: PackageId,
|
||||
features_for: FeaturesFor,
|
||||
) -> Vec<InternedString> {
|
||||
self.activated_features_int(pkg_id, features_for, false)
|
||||
}
|
||||
|
||||
fn activated_features_int(
|
||||
&self,
|
||||
pkg_id: PackageId,
|
||||
features_for: FeaturesFor,
|
||||
verify: bool,
|
||||
) -> Vec<InternedString> {
|
||||
if let Some(legacy) = &self.legacy {
|
||||
legacy.get(&pkg_id).map_or_else(Vec::new, |v| v.clone())
|
||||
} else {
|
||||
let is_build = self.opts.decouple_build_deps && features_for == FeaturesFor::BuildDep;
|
||||
if let Some(fs) = self.activated_features.get(&(pkg_id, is_build)) {
|
||||
fs.iter().cloned().collect()
|
||||
} else if verify {
|
||||
panic!("features did not find {:?} {:?}", pkg_id, is_build)
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FeatureResolver<'a, 'cfg> {
|
||||
ws: &'a Workspace<'cfg>,
|
||||
target_data: &'a RustcTargetData,
|
||||
/// The platform to build for, requested by the user.
|
||||
requested_target: CompileKind,
|
||||
resolve: &'a Resolve,
|
||||
/// Options that change how the feature resolver operates.
|
||||
opts: FeatureOpts,
|
||||
/// Map of features activated for each package.
|
||||
activated_features: ActivateMap,
|
||||
/// Keeps track of which packages have had its dependencies processed.
|
||||
/// Used to avoid cycles, and to speed up processing.
|
||||
processed_deps: HashSet<(PackageId, bool)>,
|
||||
}
|
||||
|
||||
impl<'a, 'cfg> FeatureResolver<'a, 'cfg> {
|
||||
/// Runs the resolution algorithm and returns a new `ResolvedFeatures`
|
||||
/// with the result.
|
||||
pub fn resolve(
|
||||
ws: &Workspace<'cfg>,
|
||||
target_data: &RustcTargetData,
|
||||
resolve: &Resolve,
|
||||
requested_features: &RequestedFeatures,
|
||||
specs: &[PackageIdSpec],
|
||||
requested_target: CompileKind,
|
||||
has_dev_units: HasDevUnits,
|
||||
) -> CargoResult<ResolvedFeatures> {
|
||||
use crate::util::profile;
|
||||
let _p = profile::start("resolve features");
|
||||
|
||||
let opts = FeatureOpts::new(ws.config(), has_dev_units)?;
|
||||
if !opts.new_resolver {
|
||||
// Legacy mode.
|
||||
return Ok(ResolvedFeatures {
|
||||
activated_features: HashMap::new(),
|
||||
legacy: Some(resolve.features_clone()),
|
||||
opts,
|
||||
});
|
||||
}
|
||||
let mut r = FeatureResolver {
|
||||
ws,
|
||||
target_data,
|
||||
requested_target,
|
||||
resolve,
|
||||
opts,
|
||||
activated_features: HashMap::new(),
|
||||
processed_deps: HashSet::new(),
|
||||
};
|
||||
r.do_resolve(specs, requested_features)?;
|
||||
log::debug!("features={:#?}", r.activated_features);
|
||||
if r.opts.compare {
|
||||
r.compare();
|
||||
}
|
||||
Ok(ResolvedFeatures {
|
||||
activated_features: r.activated_features,
|
||||
legacy: None,
|
||||
opts: r.opts,
|
||||
})
|
||||
}
|
||||
|
||||
/// Performs the process of resolving all features for the resolve graph.
|
||||
fn do_resolve(
|
||||
&mut self,
|
||||
specs: &[PackageIdSpec],
|
||||
requested_features: &RequestedFeatures,
|
||||
) -> CargoResult<()> {
|
||||
let member_features = self.ws.members_with_features(specs, requested_features)?;
|
||||
for (member, requested_features) in &member_features {
|
||||
let fvs = self.fvs_from_requested(member.package_id(), requested_features);
|
||||
self.activate_pkg(member.package_id(), &fvs, false)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn activate_pkg(
|
||||
&mut self,
|
||||
pkg_id: PackageId,
|
||||
fvs: &[FeatureValue],
|
||||
for_build: bool,
|
||||
) -> CargoResult<()> {
|
||||
// Add an empty entry to ensure everything is covered. This is intended for
|
||||
// finding bugs where the resolver missed something it should have visited.
|
||||
// Remove this in the future if `activated_features` uses an empty default.
|
||||
self.activated_features
|
||||
.entry((pkg_id, for_build))
|
||||
.or_insert_with(BTreeSet::new);
|
||||
for fv in fvs {
|
||||
self.activate_fv(pkg_id, fv, for_build)?;
|
||||
}
|
||||
if !self.processed_deps.insert((pkg_id, for_build)) {
|
||||
// Already processed dependencies. There's no need to process them
|
||||
// again. This is primarily to avoid cycles, but also helps speed
|
||||
// things up.
|
||||
//
|
||||
// This is safe because if another package comes along and adds a
|
||||
// feature on this package, it will immediately add it (in
|
||||
// `activate_fv`), and recurse as necessary right then and there.
|
||||
// For example, consider we've already processed our dependencies,
|
||||
// and another package comes along and enables one of our optional
|
||||
// dependencies, it will do so immediately in the
|
||||
// `FeatureValue::CrateFeature` branch, and then immediately
|
||||
// recurse into that optional dependency. This also holds true for
|
||||
// features that enable other features.
|
||||
return Ok(());
|
||||
}
|
||||
for (dep_pkg_id, deps) in self.deps(pkg_id, for_build) {
|
||||
for (dep, dep_for_build) in deps {
|
||||
if dep.is_optional() {
|
||||
// Optional dependencies are enabled in `activate_fv` when
|
||||
// a feature enables it.
|
||||
continue;
|
||||
}
|
||||
// Recurse into the dependency.
|
||||
let fvs = self.fvs_from_dependency(dep_pkg_id, dep);
|
||||
self.activate_pkg(dep_pkg_id, &fvs, dep_for_build)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Activate a single FeatureValue for a package.
|
||||
fn activate_fv(
|
||||
&mut self,
|
||||
pkg_id: PackageId,
|
||||
fv: &FeatureValue,
|
||||
for_build: bool,
|
||||
) -> CargoResult<()> {
|
||||
match fv {
|
||||
FeatureValue::Feature(f) => {
|
||||
self.activate_rec(pkg_id, *f, for_build)?;
|
||||
}
|
||||
FeatureValue::Crate(dep_name) => {
|
||||
// Activate the feature name on self.
|
||||
self.activate_rec(pkg_id, *dep_name, for_build)?;
|
||||
// Activate the optional dep.
|
||||
for (dep_pkg_id, deps) in self.deps(pkg_id, for_build) {
|
||||
for (dep, dep_for_build) in deps {
|
||||
if dep.name_in_toml() != *dep_name {
|
||||
continue;
|
||||
}
|
||||
let fvs = self.fvs_from_dependency(dep_pkg_id, dep);
|
||||
self.activate_pkg(dep_pkg_id, &fvs, dep_for_build)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
FeatureValue::CrateFeature(dep_name, dep_feature) => {
|
||||
// Activate a feature within a dependency.
|
||||
for (dep_pkg_id, deps) in self.deps(pkg_id, for_build) {
|
||||
for (dep, dep_for_build) in deps {
|
||||
if dep.name_in_toml() != *dep_name {
|
||||
continue;
|
||||
}
|
||||
if dep.is_optional() {
|
||||
// Activate the crate on self.
|
||||
let fv = FeatureValue::Crate(*dep_name);
|
||||
self.activate_fv(pkg_id, &fv, for_build)?;
|
||||
}
|
||||
// Activate the feature on the dependency.
|
||||
let summary = self.resolve.summary(dep_pkg_id);
|
||||
let fv = FeatureValue::new(*dep_feature, summary);
|
||||
self.activate_fv(dep_pkg_id, &fv, dep_for_build)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Activate the given feature for the given package, and then recursively
|
||||
/// activate any other features that feature enables.
|
||||
fn activate_rec(
|
||||
&mut self,
|
||||
pkg_id: PackageId,
|
||||
feature_to_enable: InternedString,
|
||||
for_build: bool,
|
||||
) -> CargoResult<()> {
|
||||
let enabled = self
|
||||
.activated_features
|
||||
.entry((pkg_id, for_build))
|
||||
.or_insert_with(BTreeSet::new);
|
||||
if !enabled.insert(feature_to_enable) {
|
||||
// Already enabled.
|
||||
return Ok(());
|
||||
}
|
||||
let summary = self.resolve.summary(pkg_id);
|
||||
let feature_map = summary.features();
|
||||
let fvs = match feature_map.get(&feature_to_enable) {
|
||||
Some(fvs) => fvs,
|
||||
None => {
|
||||
// TODO: this should only happen for optional dependencies.
|
||||
// Other cases should be validated by Summary's `build_feature_map`.
|
||||
// Figure out some way to validate this assumption.
|
||||
log::debug!(
|
||||
"pkg {:?} does not define feature {}",
|
||||
pkg_id,
|
||||
feature_to_enable
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
for fv in fvs {
|
||||
self.activate_fv(pkg_id, fv, for_build)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns Vec of FeatureValues from a Dependency definition.
|
||||
fn fvs_from_dependency(&self, dep_id: PackageId, dep: &Dependency) -> Vec<FeatureValue> {
|
||||
let summary = self.resolve.summary(dep_id);
|
||||
let feature_map = summary.features();
|
||||
let mut result: Vec<FeatureValue> = dep
|
||||
.features()
|
||||
.iter()
|
||||
.map(|f| FeatureValue::new(*f, summary))
|
||||
.collect();
|
||||
let default = InternedString::new("default");
|
||||
if dep.uses_default_features() && feature_map.contains_key(&default) {
|
||||
result.push(FeatureValue::Feature(default));
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Returns Vec of FeatureValues from a set of command-line features.
|
||||
fn fvs_from_requested(
|
||||
&self,
|
||||
pkg_id: PackageId,
|
||||
requested_features: &RequestedFeatures,
|
||||
) -> Vec<FeatureValue> {
|
||||
let summary = self.resolve.summary(pkg_id);
|
||||
let feature_map = summary.features();
|
||||
if requested_features.all_features {
|
||||
let mut fvs: Vec<FeatureValue> = feature_map
|
||||
.keys()
|
||||
.map(|k| FeatureValue::Feature(*k))
|
||||
.collect();
|
||||
// Add optional deps.
|
||||
// Top-level requested features can never apply to
|
||||
// build-dependencies, so for_build is `false` here.
|
||||
for (_dep_pkg_id, deps) in self.deps(pkg_id, false) {
|
||||
for (dep, _dep_for_build) in deps {
|
||||
if dep.is_optional() {
|
||||
// This may result in duplicates, but that should be ok.
|
||||
fvs.push(FeatureValue::Crate(dep.name_in_toml()));
|
||||
}
|
||||
}
|
||||
}
|
||||
fvs
|
||||
} else {
|
||||
let mut result: Vec<FeatureValue> = requested_features
|
||||
.features
|
||||
.as_ref()
|
||||
.iter()
|
||||
.map(|f| FeatureValue::new(*f, summary))
|
||||
.collect();
|
||||
let default = InternedString::new("default");
|
||||
if requested_features.uses_default_features && feature_map.contains_key(&default) {
|
||||
result.push(FeatureValue::Feature(default));
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the dependencies for a package, filtering out inactive targets.
|
||||
fn deps(
|
||||
&self,
|
||||
pkg_id: PackageId,
|
||||
for_build: bool,
|
||||
) -> Vec<(PackageId, Vec<(&'a Dependency, bool)>)> {
|
||||
// Helper for determining if a platform is activated.
|
||||
let platform_activated = |dep: &Dependency| -> bool {
|
||||
// We always care about build-dependencies, and they are always
|
||||
// Host. If we are computing dependencies "for a build script",
|
||||
// even normal dependencies are host-only.
|
||||
if for_build || dep.is_build() {
|
||||
return self
|
||||
.target_data
|
||||
.dep_platform_activated(dep, CompileKind::Host);
|
||||
}
|
||||
// Not a build dependency, and not for a build script, so must be Target.
|
||||
self.target_data
|
||||
.dep_platform_activated(dep, self.requested_target)
|
||||
};
|
||||
self.resolve
|
||||
.deps(pkg_id)
|
||||
.map(|(dep_id, deps)| {
|
||||
let deps = deps
|
||||
.iter()
|
||||
.filter(|dep| {
|
||||
if dep.platform().is_some()
|
||||
&& self.opts.ignore_inactive_targets
|
||||
&& !platform_activated(dep)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if self.opts.decouple_dev_deps && dep.kind() == DepKind::Development {
|
||||
return false;
|
||||
}
|
||||
true
|
||||
})
|
||||
.map(|dep| {
|
||||
let dep_for_build =
|
||||
for_build || (self.opts.decouple_build_deps && dep.is_build());
|
||||
(dep, dep_for_build)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
(dep_id, deps)
|
||||
})
|
||||
.filter(|(_id, deps)| !deps.is_empty())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Compare the activated features to the resolver. Used for testing.
|
||||
fn compare(&self) {
|
||||
let mut found = false;
|
||||
for ((pkg_id, dep_kind), features) in &self.activated_features {
|
||||
let r_features = self.resolve.features(*pkg_id);
|
||||
if !r_features.iter().eq(features.iter()) {
|
||||
eprintln!(
|
||||
"{}/{:?} features mismatch\nresolve: {:?}\nnew: {:?}\n",
|
||||
pkg_id, dep_kind, r_features, features
|
||||
);
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
if found {
|
||||
panic!("feature mismatch");
|
||||
}
|
||||
}
|
||||
}
|
@ -62,12 +62,14 @@ use crate::util::profile;
|
||||
|
||||
use self::context::Context;
|
||||
use self::dep_cache::RegistryQueryer;
|
||||
use self::features::RequestedFeatures;
|
||||
use self::types::{ConflictMap, ConflictReason, DepsFrame};
|
||||
use self::types::{FeaturesSet, RcVecIter, RemainingDeps, ResolverProgress};
|
||||
|
||||
pub use self::encode::Metadata;
|
||||
pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
|
||||
pub use self::errors::{ActivateError, ActivateResult, ResolveError};
|
||||
pub use self::features::HasDevUnits;
|
||||
pub use self::resolve::{Resolve, ResolveVersion};
|
||||
pub use self::types::ResolveOpts;
|
||||
|
||||
@ -76,6 +78,7 @@ mod context;
|
||||
mod dep_cache;
|
||||
mod encode;
|
||||
mod errors;
|
||||
pub mod features;
|
||||
mod resolve;
|
||||
mod types;
|
||||
|
||||
@ -106,9 +109,6 @@ mod types;
|
||||
/// * `config` - a location to print warnings and such, or `None` if no warnings
|
||||
/// should be printed
|
||||
///
|
||||
/// * `print_warnings` - whether or not to print backwards-compatibility
|
||||
/// warnings and such
|
||||
///
|
||||
/// * `check_public_visible_dependencies` - a flag for whether to enforce the restrictions
|
||||
/// introduced in the "public & private dependencies" RFC (1977). The current implementation
|
||||
/// makes sure that there is only one version of each name visible to each package.
|
||||
@ -141,17 +141,27 @@ pub fn resolve(
|
||||
let cksum = summary.checksum().map(|s| s.to_string());
|
||||
cksums.insert(summary.package_id(), cksum);
|
||||
}
|
||||
let graph = cx.graph();
|
||||
let replacements = cx.resolve_replacements(®istry);
|
||||
let features = cx
|
||||
.resolve_features
|
||||
.iter()
|
||||
.map(|(k, v)| (*k, v.iter().cloned().collect()))
|
||||
.collect();
|
||||
let summaries = cx
|
||||
.activations
|
||||
.into_iter()
|
||||
.map(|(_key, (summary, _age))| (summary.package_id(), summary))
|
||||
.collect();
|
||||
let resolve = Resolve::new(
|
||||
cx.graph(),
|
||||
cx.resolve_replacements(®istry),
|
||||
cx.resolve_features
|
||||
.iter()
|
||||
.map(|(k, v)| (*k, v.iter().map(|x| x.to_string()).collect()))
|
||||
.collect(),
|
||||
graph,
|
||||
replacements,
|
||||
features,
|
||||
cksums,
|
||||
BTreeMap::new(),
|
||||
Vec::new(),
|
||||
ResolveVersion::default_for_new_lockfiles(),
|
||||
summaries,
|
||||
);
|
||||
|
||||
check_cycles(&resolve)?;
|
||||
@ -161,11 +171,11 @@ pub fn resolve(
|
||||
Ok(resolve)
|
||||
}
|
||||
|
||||
/// Recursively activates the dependencies for `top`, in depth-first order,
|
||||
/// Recursively activates the dependencies for `summaries`, in depth-first order,
|
||||
/// backtracking across possible candidates for each dependency as necessary.
|
||||
///
|
||||
/// If all dependencies can be activated and resolved to a version in the
|
||||
/// dependency graph, cx.resolve is returned.
|
||||
/// dependency graph, `cx` is returned.
|
||||
fn activate_deps_loop(
|
||||
mut cx: Context,
|
||||
registry: &mut RegistryQueryer<'_>,
|
||||
@ -368,9 +378,11 @@ fn activate_deps_loop(
|
||||
let pid = candidate.package_id();
|
||||
let opts = ResolveOpts {
|
||||
dev_deps: false,
|
||||
features: Rc::clone(&features),
|
||||
all_features: false,
|
||||
uses_default_features: dep.uses_default_features(),
|
||||
features: RequestedFeatures {
|
||||
features: Rc::clone(&features),
|
||||
all_features: false,
|
||||
uses_default_features: dep.uses_default_features(),
|
||||
},
|
||||
};
|
||||
trace!(
|
||||
"{}[{}]>{} trying {}",
|
||||
|
@ -1,15 +1,13 @@
|
||||
use super::encode::Metadata;
|
||||
use crate::core::dependency::DepKind;
|
||||
use crate::core::interning::InternedString;
|
||||
use crate::core::{Dependency, PackageId, PackageIdSpec, Summary, Target};
|
||||
use crate::util::errors::CargoResult;
|
||||
use crate::util::Graph;
|
||||
use std::borrow::Borrow;
|
||||
use std::cmp;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fmt;
|
||||
use std::iter::FromIterator;
|
||||
|
||||
use crate::core::dependency::DepKind;
|
||||
use crate::core::{Dependency, PackageId, PackageIdSpec, Summary, Target};
|
||||
use crate::util::errors::CargoResult;
|
||||
use crate::util::Graph;
|
||||
|
||||
use super::encode::Metadata;
|
||||
|
||||
/// Represents a fully-resolved package dependency graph. Each node in the graph
|
||||
/// is a package and edges represent dependencies between packages.
|
||||
@ -28,9 +26,9 @@ pub struct Resolve {
|
||||
/// An empty `HashSet` to avoid creating a new `HashSet` for every package
|
||||
/// that does not have any features, and to avoid using `Option` to
|
||||
/// simplify the API.
|
||||
empty_features: HashSet<String>,
|
||||
empty_features: Vec<InternedString>,
|
||||
/// Features enabled for a given package.
|
||||
features: HashMap<PackageId, HashSet<String>>,
|
||||
features: HashMap<PackageId, Vec<InternedString>>,
|
||||
/// Checksum for each package. A SHA256 hash of the `.crate` file used to
|
||||
/// validate the correct crate file is used. This is `None` for sources
|
||||
/// that do not use `.crate` files, like path or git dependencies.
|
||||
@ -50,6 +48,7 @@ pub struct Resolve {
|
||||
/// Version of the `Cargo.lock` format, see
|
||||
/// `cargo::core::resolver::encode` for more.
|
||||
version: ResolveVersion,
|
||||
summaries: HashMap<PackageId, Summary>,
|
||||
}
|
||||
|
||||
/// A version to indicate how a `Cargo.lock` should be serialized. Currently
|
||||
@ -73,11 +72,12 @@ impl Resolve {
|
||||
pub fn new(
|
||||
graph: Graph<PackageId, Vec<Dependency>>,
|
||||
replacements: HashMap<PackageId, PackageId>,
|
||||
features: HashMap<PackageId, HashSet<String>>,
|
||||
features: HashMap<PackageId, Vec<InternedString>>,
|
||||
checksums: HashMap<PackageId, Option<String>>,
|
||||
metadata: Metadata,
|
||||
unused_patches: Vec<PackageId>,
|
||||
version: ResolveVersion,
|
||||
summaries: HashMap<PackageId, Summary>,
|
||||
) -> Resolve {
|
||||
let reverse_replacements = replacements.iter().map(|(&p, &r)| (r, p)).collect();
|
||||
let public_dependencies = graph
|
||||
@ -103,10 +103,11 @@ impl Resolve {
|
||||
checksums,
|
||||
metadata,
|
||||
unused_patches,
|
||||
empty_features: HashSet::new(),
|
||||
empty_features: Vec::new(),
|
||||
reverse_replacements,
|
||||
public_dependencies,
|
||||
version,
|
||||
summaries,
|
||||
}
|
||||
}
|
||||
|
||||
@ -285,10 +286,16 @@ unable to verify that `{0}` is the same as when the lockfile was generated
|
||||
&self.replacements
|
||||
}
|
||||
|
||||
pub fn features(&self, pkg: PackageId) -> &HashSet<String> {
|
||||
pub fn features(&self, pkg: PackageId) -> &[InternedString] {
|
||||
self.features.get(&pkg).unwrap_or(&self.empty_features)
|
||||
}
|
||||
|
||||
/// This is only here for legacy support, it will be removed when
|
||||
/// switching to the new feature resolver.
|
||||
pub fn features_clone(&self) -> HashMap<PackageId, Vec<InternedString>> {
|
||||
self.features.clone()
|
||||
}
|
||||
|
||||
pub fn is_public_dep(&self, pkg: PackageId, dep: PackageId) -> bool {
|
||||
self.public_dependencies
|
||||
.get(&pkg)
|
||||
@ -296,12 +303,6 @@ unable to verify that `{0}` is the same as when the lockfile was generated
|
||||
.unwrap_or_else(|| panic!("Unknown dependency {:?} for package {:?}", dep, pkg))
|
||||
}
|
||||
|
||||
pub fn features_sorted(&self, pkg: PackageId) -> Vec<&str> {
|
||||
let mut v = Vec::from_iter(self.features(pkg).iter().map(|s| s.as_ref()));
|
||||
v.sort_unstable();
|
||||
v
|
||||
}
|
||||
|
||||
pub fn query(&self, spec: &str) -> CargoResult<PackageId> {
|
||||
PackageIdSpec::query_str(spec, self.iter())
|
||||
}
|
||||
@ -374,6 +375,10 @@ unable to verify that `{0}` is the same as when the lockfile was generated
|
||||
pub fn version(&self) -> &ResolveVersion {
|
||||
&self.version
|
||||
}
|
||||
|
||||
pub fn summary(&self, pkg_id: PackageId) -> &Summary {
|
||||
&self.summaries[&pkg_id]
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Resolve {
|
||||
@ -388,7 +393,7 @@ impl PartialEq for Resolve {
|
||||
compare! {
|
||||
// fields to compare
|
||||
graph replacements reverse_replacements empty_features features
|
||||
checksums metadata unused_patches public_dependencies
|
||||
checksums metadata unused_patches public_dependencies summaries
|
||||
|
|
||||
// fields to ignore
|
||||
version
|
||||
|
@ -1,16 +1,15 @@
|
||||
use super::features::RequestedFeatures;
|
||||
use crate::core::interning::InternedString;
|
||||
use crate::core::{Dependency, PackageId, Summary};
|
||||
use crate::util::errors::CargoResult;
|
||||
use crate::util::Config;
|
||||
use im_rc;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use crate::core::interning::InternedString;
|
||||
use crate::core::{Dependency, PackageId, Summary};
|
||||
use crate::util::errors::CargoResult;
|
||||
use crate::util::Config;
|
||||
|
||||
use im_rc;
|
||||
|
||||
pub struct ResolverProgress {
|
||||
ticks: u16,
|
||||
start: Instant,
|
||||
@ -106,12 +105,8 @@ pub struct ResolveOpts {
|
||||
///
|
||||
/// This may be set to `false` by things like `cargo install` or `-Z avoid-dev-deps`.
|
||||
pub dev_deps: bool,
|
||||
/// Set of features to enable (`--features=…`).
|
||||
pub features: FeaturesSet,
|
||||
/// Indicates *all* features should be enabled (`--all-features`).
|
||||
pub all_features: bool,
|
||||
/// Include the `default` feature (`--no-default-features` sets this false).
|
||||
pub uses_default_features: bool,
|
||||
/// Set of features requested on the command-line.
|
||||
pub features: RequestedFeatures,
|
||||
}
|
||||
|
||||
impl ResolveOpts {
|
||||
@ -119,9 +114,7 @@ impl ResolveOpts {
|
||||
pub fn everything() -> ResolveOpts {
|
||||
ResolveOpts {
|
||||
dev_deps: true,
|
||||
features: Rc::new(BTreeSet::new()),
|
||||
all_features: true,
|
||||
uses_default_features: true,
|
||||
features: RequestedFeatures::new_all(true),
|
||||
}
|
||||
}
|
||||
|
||||
@ -133,21 +126,13 @@ impl ResolveOpts {
|
||||
) -> ResolveOpts {
|
||||
ResolveOpts {
|
||||
dev_deps,
|
||||
features: Rc::new(ResolveOpts::split_features(features)),
|
||||
all_features,
|
||||
uses_default_features,
|
||||
features: RequestedFeatures::from_command_line(
|
||||
features,
|
||||
all_features,
|
||||
uses_default_features,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn split_features(features: &[String]) -> BTreeSet<InternedString> {
|
||||
features
|
||||
.iter()
|
||||
.flat_map(|s| s.split_whitespace())
|
||||
.flat_map(|s| s.split(','))
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(InternedString::new)
|
||||
.collect::<BTreeSet<InternedString>>()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -10,6 +10,7 @@ use url::Url;
|
||||
|
||||
use crate::core::features::Features;
|
||||
use crate::core::registry::PackageRegistry;
|
||||
use crate::core::resolver::features::RequestedFeatures;
|
||||
use crate::core::{Dependency, PackageId, PackageIdSpec};
|
||||
use crate::core::{EitherManifest, Package, SourceId, VirtualManifest};
|
||||
use crate::ops;
|
||||
@ -833,6 +834,86 @@ impl<'cfg> Workspace<'cfg> {
|
||||
pub fn set_target_dir(&mut self, target_dir: Filesystem) {
|
||||
self.target_dir = Some(target_dir);
|
||||
}
|
||||
|
||||
/// Returns a Vec of `(&Package, RequestedFeatures)` tuples that
|
||||
/// represent the workspace members that were requested on the command-line.
|
||||
///
|
||||
/// `specs` may be empty, which indicates it should return all workspace
|
||||
/// members. In this case, `requested_features.all_features` must be
|
||||
/// `true`. This is used for generating `Cargo.lock`, which must include
|
||||
/// all members with all features enabled.
|
||||
pub fn members_with_features(
|
||||
&self,
|
||||
specs: &[PackageIdSpec],
|
||||
requested_features: &RequestedFeatures,
|
||||
) -> CargoResult<Vec<(&Package, RequestedFeatures)>> {
|
||||
assert!(
|
||||
!specs.is_empty() || requested_features.all_features,
|
||||
"no specs requires all_features"
|
||||
);
|
||||
if specs.is_empty() {
|
||||
// When resolving the entire workspace, resolve each member with
|
||||
// all features enabled.
|
||||
return Ok(self
|
||||
.members()
|
||||
.map(|m| (m, RequestedFeatures::new_all(true)))
|
||||
.collect());
|
||||
}
|
||||
if self.config().cli_unstable().package_features {
|
||||
if specs.len() > 1 && !requested_features.features.is_empty() {
|
||||
anyhow::bail!("cannot specify features for more than one package");
|
||||
}
|
||||
let members: Vec<(&Package, RequestedFeatures)> = self
|
||||
.members()
|
||||
.filter(|m| specs.iter().any(|spec| spec.matches(m.package_id())))
|
||||
.map(|m| (m, requested_features.clone()))
|
||||
.collect();
|
||||
if members.is_empty() {
|
||||
// `cargo build -p foo`, where `foo` is not a member.
|
||||
// Do not allow any command-line flags (defaults only).
|
||||
if !(requested_features.features.is_empty()
|
||||
&& !requested_features.all_features
|
||||
&& requested_features.uses_default_features)
|
||||
{
|
||||
anyhow::bail!("cannot specify features for packages outside of workspace");
|
||||
}
|
||||
// Add all members from the workspace so we can ensure `-p nonmember`
|
||||
// is in the resolve graph.
|
||||
return Ok(self
|
||||
.members()
|
||||
.map(|m| (m, RequestedFeatures::new_all(false)))
|
||||
.collect());
|
||||
}
|
||||
return Ok(members);
|
||||
} else {
|
||||
let ms = self.members().filter_map(|member| {
|
||||
let member_id = member.package_id();
|
||||
match self.current_opt() {
|
||||
// The features passed on the command-line only apply to
|
||||
// the "current" package (determined by the cwd).
|
||||
Some(current) if member_id == current.package_id() => {
|
||||
Some((member, requested_features.clone()))
|
||||
}
|
||||
_ => {
|
||||
// Ignore members that are not enabled on the command-line.
|
||||
if specs.iter().any(|spec| spec.matches(member_id)) {
|
||||
// -p for a workspace member that is not the
|
||||
// "current" one, don't use the local
|
||||
// `--features`, only allow `--all-features`.
|
||||
Some((
|
||||
member,
|
||||
RequestedFeatures::new_all(requested_features.all_features),
|
||||
))
|
||||
} else {
|
||||
// This member was not requested on the command-line, skip.
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
return Ok(ms.collect());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'cfg> Packages<'cfg> {
|
||||
|
@ -4,10 +4,13 @@ use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::core::compiler::unit_dependencies;
|
||||
use crate::core::compiler::UnitInterner;
|
||||
use crate::core::compiler::{BuildConfig, BuildContext, CompileKind, CompileMode, Context};
|
||||
use crate::core::compiler::{RustcTargetData, UnitInterner};
|
||||
use crate::core::profiles::{Profiles, UnitFor};
|
||||
use crate::core::Workspace;
|
||||
use crate::core::resolver::features::{
|
||||
FeatureResolver, FeaturesFor, HasDevUnits, RequestedFeatures,
|
||||
};
|
||||
use crate::core::{PackageIdSpec, Workspace};
|
||||
use crate::ops;
|
||||
use crate::util::errors::{CargoResult, CargoResultExt};
|
||||
use crate::util::paths;
|
||||
@ -61,6 +64,7 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
|
||||
let interner = UnitInterner::new();
|
||||
let mut build_config = BuildConfig::new(config, Some(1), &opts.target, CompileMode::Build)?;
|
||||
build_config.requested_profile = opts.requested_profile;
|
||||
let target_data = RustcTargetData::new(ws, build_config.requested_kind)?;
|
||||
let bcx = BuildContext::new(
|
||||
ws,
|
||||
&packages,
|
||||
@ -69,6 +73,22 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
|
||||
profiles,
|
||||
&interner,
|
||||
HashMap::new(),
|
||||
target_data,
|
||||
)?;
|
||||
let requested_features = RequestedFeatures::new_all(true);
|
||||
let specs = opts
|
||||
.spec
|
||||
.iter()
|
||||
.map(|spec| PackageIdSpec::parse(spec))
|
||||
.collect::<CargoResult<Vec<_>>>()?;
|
||||
let features = FeatureResolver::resolve(
|
||||
ws,
|
||||
&bcx.target_data,
|
||||
&resolve,
|
||||
&requested_features,
|
||||
&specs,
|
||||
bcx.build_config.requested_kind,
|
||||
HasDevUnits::Yes,
|
||||
)?;
|
||||
let mut units = Vec::new();
|
||||
|
||||
@ -98,7 +118,14 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
|
||||
*mode,
|
||||
)
|
||||
};
|
||||
let features = resolve.features_sorted(pkg.package_id());
|
||||
// Use unverified here since this is being more
|
||||
// exhaustive than what is actually needed.
|
||||
let features_for = match unit_for.is_for_build_dep() {
|
||||
true => FeaturesFor::BuildDep,
|
||||
false => FeaturesFor::NormalOrDev,
|
||||
};
|
||||
let features =
|
||||
features.activated_features_unverified(pkg.package_id(), features_for);
|
||||
units.push(bcx.units.intern(
|
||||
pkg, target, profile, *kind, *mode, features, /*is_std*/ false,
|
||||
));
|
||||
@ -109,7 +136,7 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
|
||||
}
|
||||
|
||||
let unit_dependencies =
|
||||
unit_dependencies::build_unit_dependencies(&bcx, &resolve, None, &units, &[])?;
|
||||
unit_dependencies::build_unit_dependencies(&bcx, &resolve, &features, None, &units, &[])?;
|
||||
let mut cx = Context::new(config, &bcx, unit_dependencies, build_config.requested_kind)?;
|
||||
cx.prepare_units(None, &units)?;
|
||||
|
||||
|
@ -31,10 +31,11 @@ use std::sync::Arc;
|
||||
use crate::core::compiler::standard_lib;
|
||||
use crate::core::compiler::unit_dependencies::build_unit_dependencies;
|
||||
use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context};
|
||||
use crate::core::compiler::{CompileKind, CompileMode, Unit};
|
||||
use crate::core::compiler::{CompileKind, CompileMode, RustcTargetData, Unit};
|
||||
use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
|
||||
use crate::core::profiles::{Profiles, UnitFor};
|
||||
use crate::core::resolver::{Resolve, ResolveOpts};
|
||||
use crate::core::resolver::features::{self, FeaturesFor};
|
||||
use crate::core::resolver::{HasDevUnits, Resolve, ResolveOpts};
|
||||
use crate::core::{LibKind, Package, PackageSet, Target};
|
||||
use crate::core::{PackageId, PackageIdSpec, TargetKind, Workspace};
|
||||
use crate::ops;
|
||||
@ -306,18 +307,31 @@ pub fn compile_ws<'a>(
|
||||
build_config.requested_profile,
|
||||
ws.features(),
|
||||
)?;
|
||||
let target_data = RustcTargetData::new(ws, build_config.requested_kind)?;
|
||||
|
||||
let specs = spec.to_package_id_specs(ws)?;
|
||||
let dev_deps = ws.require_optional_deps() || filter.need_dev_deps(build_config.mode);
|
||||
let opts = ResolveOpts::new(dev_deps, features, all_features, !no_default_features);
|
||||
let resolve = ops::resolve_ws_with_opts(ws, opts, &specs)?;
|
||||
let has_dev_units = match filter.need_dev_deps(build_config.mode) {
|
||||
true => HasDevUnits::Yes,
|
||||
false => HasDevUnits::No,
|
||||
};
|
||||
let resolve = ops::resolve_ws_with_opts(
|
||||
ws,
|
||||
&target_data,
|
||||
build_config.requested_kind,
|
||||
&opts,
|
||||
&specs,
|
||||
has_dev_units,
|
||||
)?;
|
||||
let WorkspaceResolve {
|
||||
mut pkg_set,
|
||||
workspace_resolve,
|
||||
targeted_resolve: resolve,
|
||||
resolved_features,
|
||||
} = resolve;
|
||||
|
||||
let std_resolve = if let Some(crates) = &config.cli_unstable().build_std {
|
||||
let std_resolve_features = if let Some(crates) = &config.cli_unstable().build_std {
|
||||
if build_config.build_plan {
|
||||
config
|
||||
.shell()
|
||||
@ -329,10 +343,11 @@ pub fn compile_ws<'a>(
|
||||
// requested_target to an enum, or some other approach.
|
||||
anyhow::bail!("-Zbuild-std requires --target");
|
||||
}
|
||||
let (mut std_package_set, std_resolve) = standard_lib::resolve_std(ws, crates)?;
|
||||
let (mut std_package_set, std_resolve, std_features) =
|
||||
standard_lib::resolve_std(ws, &target_data, build_config.requested_kind, crates)?;
|
||||
remove_dylib_crate_type(&mut std_package_set)?;
|
||||
pkg_set.add_set(std_package_set);
|
||||
Some(std_resolve)
|
||||
Some((std_resolve, std_features))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@ -397,13 +412,16 @@ pub fn compile_ws<'a>(
|
||||
profiles,
|
||||
&interner,
|
||||
HashMap::new(),
|
||||
target_data,
|
||||
)?;
|
||||
|
||||
let units = generate_targets(
|
||||
ws,
|
||||
&to_builds,
|
||||
filter,
|
||||
build_config.requested_kind,
|
||||
&resolve,
|
||||
&resolved_features,
|
||||
&bcx,
|
||||
)?;
|
||||
|
||||
@ -420,10 +438,12 @@ pub fn compile_ws<'a>(
|
||||
crates.push("test".to_string());
|
||||
}
|
||||
}
|
||||
let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
|
||||
standard_lib::generate_std_roots(
|
||||
&bcx,
|
||||
&crates,
|
||||
std_resolve.as_ref().unwrap(),
|
||||
std_resolve,
|
||||
std_features,
|
||||
build_config.requested_kind,
|
||||
)?
|
||||
} else {
|
||||
@ -460,8 +480,14 @@ pub fn compile_ws<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
let unit_dependencies =
|
||||
build_unit_dependencies(&bcx, &resolve, std_resolve.as_ref(), &units, &std_roots)?;
|
||||
let unit_dependencies = build_unit_dependencies(
|
||||
&bcx,
|
||||
&resolve,
|
||||
&resolved_features,
|
||||
std_resolve_features.as_ref(),
|
||||
&units,
|
||||
&std_roots,
|
||||
)?;
|
||||
|
||||
let ret = {
|
||||
let _p = profile::start("compiling");
|
||||
@ -658,6 +684,7 @@ fn generate_targets<'a>(
|
||||
filter: &CompileFilter,
|
||||
default_arch_kind: CompileKind,
|
||||
resolve: &'a Resolve,
|
||||
resolved_features: &features::ResolvedFeatures,
|
||||
bcx: &BuildContext<'a, '_>,
|
||||
) -> CargoResult<Vec<Unit<'a>>> {
|
||||
// Helper for creating a `Unit` struct.
|
||||
@ -720,7 +747,11 @@ fn generate_targets<'a>(
|
||||
let profile =
|
||||
bcx.profiles
|
||||
.get_profile(pkg.package_id(), ws.is_member(pkg), unit_for, target_mode);
|
||||
let features = resolve.features_sorted(pkg.package_id());
|
||||
|
||||
let features = Vec::from(resolved_features.activated_features(
|
||||
pkg.package_id(),
|
||||
FeaturesFor::NormalOrDev, // Root units are never build dependencies.
|
||||
));
|
||||
bcx.units.intern(
|
||||
pkg,
|
||||
target,
|
||||
@ -854,6 +885,10 @@ fn generate_targets<'a>(
|
||||
|
||||
// Only include targets that are libraries or have all required
|
||||
// features available.
|
||||
//
|
||||
// `features_map` is a map of &Package -> enabled_features
|
||||
// It is computed by the set of enabled features for the package plus
|
||||
// every enabled feature of every enabled dependency.
|
||||
let mut features_map = HashMap::new();
|
||||
let mut units = HashSet::new();
|
||||
for Proposal {
|
||||
@ -865,9 +900,9 @@ fn generate_targets<'a>(
|
||||
{
|
||||
let unavailable_features = match target.required_features() {
|
||||
Some(rf) => {
|
||||
let features = features_map
|
||||
.entry(pkg)
|
||||
.or_insert_with(|| resolve_all_features(resolve, pkg.package_id()));
|
||||
let features = features_map.entry(pkg).or_insert_with(|| {
|
||||
resolve_all_features(resolve, resolved_features, pkg.package_id())
|
||||
});
|
||||
rf.iter().filter(|f| !features.contains(*f)).collect()
|
||||
}
|
||||
None => Vec::new(),
|
||||
@ -895,18 +930,32 @@ fn generate_targets<'a>(
|
||||
Ok(units.into_iter().collect())
|
||||
}
|
||||
|
||||
/// Gets all of the features enabled for a package, plus its dependencies'
|
||||
/// features.
|
||||
///
|
||||
/// Dependencies are added as `dep_name/feat_name` because `required-features`
|
||||
/// wants to support that syntax.
|
||||
fn resolve_all_features(
|
||||
resolve_with_overrides: &Resolve,
|
||||
resolved_features: &features::ResolvedFeatures,
|
||||
package_id: PackageId,
|
||||
) -> HashSet<String> {
|
||||
let mut features = resolve_with_overrides.features(package_id).clone();
|
||||
let mut features: HashSet<String> = resolved_features
|
||||
.activated_features(package_id, FeaturesFor::NormalOrDev)
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
|
||||
// Include features enabled for use by dependencies so targets can also use them with the
|
||||
// required-features field when deciding whether to be built or skipped.
|
||||
for (dep_id, deps) in resolve_with_overrides.deps(package_id) {
|
||||
for feature in resolve_with_overrides.features(dep_id) {
|
||||
for dep in deps {
|
||||
features.insert(dep.name_in_toml().to_string() + "/" + feature);
|
||||
for dep in deps {
|
||||
let features_for = match dep.is_build() {
|
||||
true => FeaturesFor::BuildDep,
|
||||
false => FeaturesFor::NormalOrDev,
|
||||
};
|
||||
for feature in resolved_features.activated_features(dep_id, features_for) {
|
||||
features.insert(dep.name_in_toml().to_string() + "/" + &feature);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
use crate::core::resolver::ResolveOpts;
|
||||
use crate::core::compiler::RustcTargetData;
|
||||
use crate::core::resolver::{HasDevUnits, ResolveOpts};
|
||||
use crate::core::{Shell, Workspace};
|
||||
use crate::ops;
|
||||
use crate::util::CargoResult;
|
||||
@ -24,7 +25,16 @@ pub fn doc(ws: &Workspace<'_>, options: &DocOptions<'_>) -> CargoResult<()> {
|
||||
options.compile_opts.all_features,
|
||||
!options.compile_opts.no_default_features,
|
||||
);
|
||||
let ws_resolve = ops::resolve_ws_with_opts(ws, opts, &specs)?;
|
||||
let requested_kind = options.compile_opts.build_config.requested_kind;
|
||||
let target_data = RustcTargetData::new(ws, requested_kind)?;
|
||||
let ws_resolve = ops::resolve_ws_with_opts(
|
||||
ws,
|
||||
&target_data,
|
||||
requested_kind,
|
||||
&opts,
|
||||
&specs,
|
||||
HasDevUnits::No,
|
||||
)?;
|
||||
|
||||
let ids = specs
|
||||
.iter()
|
||||
|
@ -24,7 +24,7 @@ pub fn generate_lockfile(ws: &Workspace<'_>) -> CargoResult<()> {
|
||||
let resolve = ops::resolve_with_previous(
|
||||
&mut registry,
|
||||
ws,
|
||||
ResolveOpts::everything(),
|
||||
&ResolveOpts::everything(),
|
||||
None,
|
||||
None,
|
||||
&[],
|
||||
@ -64,7 +64,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes
|
||||
ops::resolve_with_previous(
|
||||
&mut registry,
|
||||
ws,
|
||||
ResolveOpts::everything(),
|
||||
&ResolveOpts::everything(),
|
||||
None,
|
||||
None,
|
||||
&[],
|
||||
@ -110,7 +110,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes
|
||||
let resolve = ops::resolve_with_previous(
|
||||
&mut registry,
|
||||
ws,
|
||||
ResolveOpts::everything(),
|
||||
&ResolveOpts::everything(),
|
||||
Some(&previous_resolve),
|
||||
Some(&to_avoid),
|
||||
&[],
|
||||
|
@ -7,8 +7,8 @@ use anyhow::{bail, format_err};
|
||||
use tempfile::Builder as TempFileBuilder;
|
||||
|
||||
use crate::core::compiler::Freshness;
|
||||
use crate::core::compiler::{CompileKind, DefaultExecutor, Executor};
|
||||
use crate::core::resolver::ResolveOpts;
|
||||
use crate::core::compiler::{CompileKind, DefaultExecutor, Executor, RustcTargetData};
|
||||
use crate::core::resolver::{HasDevUnits, ResolveOpts};
|
||||
use crate::core::{Edition, Package, PackageId, PackageIdSpec, Source, SourceId, Workspace};
|
||||
use crate::ops;
|
||||
use crate::ops::common_for_install_and_uninstall::*;
|
||||
@ -492,10 +492,22 @@ fn check_yanked_install(ws: &Workspace<'_>) -> CargoResult<()> {
|
||||
return Ok(());
|
||||
}
|
||||
let specs = vec![PackageIdSpec::from_package_id(ws.current()?.package_id())];
|
||||
// CompileKind here doesn't really matter, it's only needed for features.
|
||||
let target_data = RustcTargetData::new(ws, CompileKind::Host)?;
|
||||
// It would be best if `source` could be passed in here to avoid a
|
||||
// duplicate "Updating", but since `source` is taken by value, then it
|
||||
// wouldn't be available for `compile_ws`.
|
||||
let ws_resolve = ops::resolve_ws_with_opts(ws, ResolveOpts::everything(), &specs)?;
|
||||
// TODO: It would be easier to use resolve_ws, but it does not honor
|
||||
// require_optional_deps to avoid writing the lock file. It might be good
|
||||
// to try to fix that.
|
||||
let ws_resolve = ops::resolve_ws_with_opts(
|
||||
ws,
|
||||
&target_data,
|
||||
CompileKind::Host,
|
||||
&ResolveOpts::everything(),
|
||||
&specs,
|
||||
HasDevUnits::No,
|
||||
)?;
|
||||
let mut sources = ws_resolve.pkg_set.sources_mut();
|
||||
|
||||
// Checking the yanked status involves taking a look at the registry and
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::core::compiler::{CompileKind, CompileTarget, TargetInfo};
|
||||
use crate::core::resolver::{Resolve, ResolveOpts};
|
||||
use crate::core::{dependency, Dependency, Package, PackageId, Workspace};
|
||||
use crate::core::compiler::{CompileKind, CompileTarget, RustcTargetData};
|
||||
use crate::core::dependency::DepKind;
|
||||
use crate::core::resolver::{HasDevUnits, Resolve, ResolveOpts};
|
||||
use crate::core::{Dependency, InternedString, Package, PackageId, Workspace};
|
||||
use crate::ops::{self, Packages};
|
||||
use crate::util::CargoResult;
|
||||
use cargo_platform::Platform;
|
||||
@ -34,13 +35,7 @@ pub fn output_metadata(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> Cargo
|
||||
let packages = ws.members().cloned().collect();
|
||||
(packages, None)
|
||||
} else {
|
||||
let resolve_opts = ResolveOpts::new(
|
||||
/*dev_deps*/ true,
|
||||
&opt.features,
|
||||
opt.all_features,
|
||||
!opt.no_default_features,
|
||||
);
|
||||
let (packages, resolve) = build_resolve_graph(ws, resolve_opts, &opt.filter_platform)?;
|
||||
let (packages, resolve) = build_resolve_graph(ws, opt)?;
|
||||
(packages, Some(resolve))
|
||||
};
|
||||
|
||||
@ -78,7 +73,7 @@ struct MetadataResolveNode {
|
||||
id: PackageId,
|
||||
dependencies: Vec<PackageId>,
|
||||
deps: Vec<Dep>,
|
||||
features: Vec<String>,
|
||||
features: Vec<InternedString>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@ -90,7 +85,7 @@ struct Dep {
|
||||
|
||||
#[derive(Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct DepKindInfo {
|
||||
kind: dependency::DepKind,
|
||||
kind: DepKind,
|
||||
target: Option<Platform>,
|
||||
}
|
||||
|
||||
@ -106,23 +101,31 @@ impl From<&Dependency> for DepKindInfo {
|
||||
/// Builds the resolve graph as it will be displayed to the user.
|
||||
fn build_resolve_graph(
|
||||
ws: &Workspace<'_>,
|
||||
resolve_opts: ResolveOpts,
|
||||
target: &Option<String>,
|
||||
metadata_opts: &OutputMetadataOptions,
|
||||
) -> CargoResult<(Vec<Package>, MetadataResolve)> {
|
||||
let target_info = match target {
|
||||
Some(target) => {
|
||||
let config = ws.config();
|
||||
let ct = CompileTarget::new(target)?;
|
||||
let short_name = ct.short_name().to_string();
|
||||
let kind = CompileKind::Target(ct);
|
||||
let rustc = config.load_global_rustc(Some(ws))?;
|
||||
Some((short_name, TargetInfo::new(config, kind, &rustc, kind)?))
|
||||
}
|
||||
None => None,
|
||||
// TODO: Without --filter-platform, features are being resolved for `host` only.
|
||||
// How should this work?
|
||||
let requested_kind = match &metadata_opts.filter_platform {
|
||||
Some(t) => CompileKind::Target(CompileTarget::new(t)?),
|
||||
None => CompileKind::Host,
|
||||
};
|
||||
let target_data = RustcTargetData::new(ws, requested_kind)?;
|
||||
// Resolve entire workspace.
|
||||
let specs = Packages::All.to_package_id_specs(ws)?;
|
||||
let ws_resolve = ops::resolve_ws_with_opts(ws, resolve_opts, &specs)?;
|
||||
let resolve_opts = ResolveOpts::new(
|
||||
/*dev_deps*/ true,
|
||||
&metadata_opts.features,
|
||||
metadata_opts.all_features,
|
||||
!metadata_opts.no_default_features,
|
||||
);
|
||||
let ws_resolve = ops::resolve_ws_with_opts(
|
||||
ws,
|
||||
&target_data,
|
||||
requested_kind,
|
||||
&resolve_opts,
|
||||
&specs,
|
||||
HasDevUnits::Yes,
|
||||
)?;
|
||||
// Download all Packages. This is needed to serialize the information
|
||||
// for every package. In theory this could honor target filtering,
|
||||
// but that would be somewhat complex.
|
||||
@ -132,6 +135,7 @@ fn build_resolve_graph(
|
||||
.into_iter()
|
||||
.map(|pkg| (pkg.package_id(), pkg.clone()))
|
||||
.collect();
|
||||
|
||||
// Start from the workspace roots, and recurse through filling out the
|
||||
// map, filtering targets as necessary.
|
||||
let mut node_map = HashMap::new();
|
||||
@ -141,7 +145,8 @@ fn build_resolve_graph(
|
||||
member_pkg.package_id(),
|
||||
&ws_resolve.targeted_resolve,
|
||||
&package_map,
|
||||
target_info.as_ref(),
|
||||
&target_data,
|
||||
requested_kind,
|
||||
);
|
||||
}
|
||||
// Get a Vec of Packages.
|
||||
@ -161,27 +166,22 @@ fn build_resolve_graph_r(
|
||||
pkg_id: PackageId,
|
||||
resolve: &Resolve,
|
||||
package_map: &HashMap<PackageId, Package>,
|
||||
target: Option<&(String, TargetInfo)>,
|
||||
target_data: &RustcTargetData,
|
||||
requested_kind: CompileKind,
|
||||
) {
|
||||
if node_map.contains_key(&pkg_id) {
|
||||
return;
|
||||
}
|
||||
let features = resolve
|
||||
.features_sorted(pkg_id)
|
||||
.into_iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
let features = resolve.features(pkg_id).iter().cloned().collect();
|
||||
|
||||
let deps: Vec<Dep> = resolve
|
||||
.deps(pkg_id)
|
||||
.filter(|(_dep_id, deps)| match target {
|
||||
Some((short_name, info)) => deps.iter().any(|dep| {
|
||||
let platform = match dep.platform() {
|
||||
Some(p) => p,
|
||||
None => return true,
|
||||
};
|
||||
platform.matches(short_name, info.cfg())
|
||||
}),
|
||||
None => true,
|
||||
.filter(|(_dep_id, deps)| match requested_kind {
|
||||
CompileKind::Target(_) => deps
|
||||
.iter()
|
||||
.any(|dep| target_data.dep_platform_activated(dep, requested_kind)),
|
||||
// No --filter-platform is interpreted as "all platforms".
|
||||
CompileKind::Host => true,
|
||||
})
|
||||
.filter_map(|(dep_id, deps)| {
|
||||
let mut dep_kinds: Vec<_> = deps.iter().map(DepKindInfo::from).collect();
|
||||
@ -210,6 +210,13 @@ fn build_resolve_graph_r(
|
||||
};
|
||||
node_map.insert(pkg_id, node);
|
||||
for dep_id in to_visit {
|
||||
build_resolve_graph_r(node_map, dep_id, resolve, package_map, target);
|
||||
build_resolve_graph_r(
|
||||
node_map,
|
||||
dep_id,
|
||||
resolve,
|
||||
package_map,
|
||||
target_data,
|
||||
requested_kind,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -16,10 +16,10 @@ use serde_json::{self, json};
|
||||
use tar::{Archive, Builder, EntryType, Header};
|
||||
|
||||
use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor};
|
||||
use crate::core::resolver::ResolveOpts;
|
||||
|
||||
use crate::core::Feature;
|
||||
use crate::core::{
|
||||
Package, PackageId, PackageIdSpec, PackageSet, Resolve, Source, SourceId, Verbosity, Workspace,
|
||||
Package, PackageId, PackageSet, Resolve, Source, SourceId, Verbosity, Workspace,
|
||||
};
|
||||
use crate::ops;
|
||||
use crate::sources::PathSource;
|
||||
@ -152,21 +152,15 @@ fn build_lock(ws: &Workspace<'_>) -> CargoResult<String> {
|
||||
let new_pkg = Package::new(manifest, orig_pkg.manifest_path());
|
||||
|
||||
// Regenerate Cargo.lock using the old one as a guide.
|
||||
let specs = vec![PackageIdSpec::from_package_id(new_pkg.package_id())];
|
||||
let tmp_ws = Workspace::ephemeral(new_pkg, ws.config(), None, true)?;
|
||||
let new_resolve = ops::resolve_ws_with_opts(&tmp_ws, ResolveOpts::everything(), &specs)?;
|
||||
let (pkg_set, new_resolve) = ops::resolve_ws(&tmp_ws)?;
|
||||
|
||||
if let Some(orig_resolve) = orig_resolve {
|
||||
compare_resolve(
|
||||
config,
|
||||
tmp_ws.current()?,
|
||||
&orig_resolve,
|
||||
&new_resolve.targeted_resolve,
|
||||
)?;
|
||||
compare_resolve(config, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
|
||||
}
|
||||
check_yanked(config, &new_resolve.pkg_set, &new_resolve.targeted_resolve)?;
|
||||
check_yanked(config, &pkg_set, &new_resolve)?;
|
||||
|
||||
ops::resolve_to_string(&tmp_ws, &new_resolve.targeted_resolve)
|
||||
ops::resolve_to_string(&tmp_ws, &new_resolve)
|
||||
}
|
||||
|
||||
// Checks that the package has some piece of metadata that a human can
|
||||
|
@ -10,19 +10,19 @@
|
||||
//! - `resolve_with_previous`: A low-level function for running the resolver,
|
||||
//! providing the most power and flexibility.
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::rc::Rc;
|
||||
|
||||
use log::{debug, trace};
|
||||
|
||||
use crate::core::compiler::{CompileKind, RustcTargetData};
|
||||
use crate::core::registry::PackageRegistry;
|
||||
use crate::core::resolver::{self, Resolve, ResolveOpts};
|
||||
use crate::core::resolver::features::{FeatureResolver, ResolvedFeatures};
|
||||
use crate::core::resolver::{self, HasDevUnits, Resolve, ResolveOpts};
|
||||
use crate::core::summary::Summary;
|
||||
use crate::core::Feature;
|
||||
use crate::core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace};
|
||||
use crate::ops;
|
||||
use crate::sources::PathSource;
|
||||
use crate::util::errors::{CargoResult, CargoResultExt};
|
||||
use crate::util::profile;
|
||||
use log::{debug, trace};
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Result for `resolve_ws_with_opts`.
|
||||
pub struct WorkspaceResolve<'a> {
|
||||
@ -36,6 +36,8 @@ pub struct WorkspaceResolve<'a> {
|
||||
/// The narrowed resolve, with the specific features enabled, and only the
|
||||
/// given package specs requested.
|
||||
pub targeted_resolve: Resolve,
|
||||
/// The features activated per package.
|
||||
pub resolved_features: ResolvedFeatures,
|
||||
}
|
||||
|
||||
const UNUSED_PATCH_WARNING: &str = "\
|
||||
@ -72,12 +74,14 @@ pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolv
|
||||
/// members. In this case, `opts.all_features` must be `true`.
|
||||
pub fn resolve_ws_with_opts<'a>(
|
||||
ws: &Workspace<'a>,
|
||||
opts: ResolveOpts,
|
||||
target_data: &RustcTargetData,
|
||||
requested_target: CompileKind,
|
||||
opts: &ResolveOpts,
|
||||
specs: &[PackageIdSpec],
|
||||
has_dev_units: HasDevUnits,
|
||||
) -> CargoResult<WorkspaceResolve<'a>> {
|
||||
let mut registry = PackageRegistry::new(ws.config())?;
|
||||
let mut add_patches = true;
|
||||
|
||||
let resolve = if ws.ignore_lock() {
|
||||
None
|
||||
} else if ws.require_optional_deps() {
|
||||
@ -122,10 +126,21 @@ pub fn resolve_ws_with_opts<'a>(
|
||||
|
||||
let pkg_set = get_resolved_packages(&resolved_with_overrides, registry)?;
|
||||
|
||||
let resolved_features = FeatureResolver::resolve(
|
||||
ws,
|
||||
target_data,
|
||||
&resolved_with_overrides,
|
||||
&opts.features,
|
||||
specs,
|
||||
requested_target,
|
||||
has_dev_units,
|
||||
)?;
|
||||
|
||||
Ok(WorkspaceResolve {
|
||||
pkg_set,
|
||||
workspace_resolve: resolve,
|
||||
targeted_resolve: resolved_with_overrides,
|
||||
resolved_features,
|
||||
})
|
||||
}
|
||||
|
||||
@ -137,7 +152,7 @@ fn resolve_with_registry<'cfg>(
|
||||
let resolve = resolve_with_previous(
|
||||
registry,
|
||||
ws,
|
||||
ResolveOpts::everything(),
|
||||
&ResolveOpts::everything(),
|
||||
prev.as_ref(),
|
||||
None,
|
||||
&[],
|
||||
@ -168,17 +183,12 @@ fn resolve_with_registry<'cfg>(
|
||||
pub fn resolve_with_previous<'cfg>(
|
||||
registry: &mut PackageRegistry<'cfg>,
|
||||
ws: &Workspace<'cfg>,
|
||||
opts: ResolveOpts,
|
||||
opts: &ResolveOpts,
|
||||
previous: Option<&Resolve>,
|
||||
to_avoid: Option<&HashSet<PackageId>>,
|
||||
specs: &[PackageIdSpec],
|
||||
register_patches: bool,
|
||||
) -> CargoResult<Resolve> {
|
||||
assert!(
|
||||
!specs.is_empty() || opts.all_features,
|
||||
"no specs requires all_features"
|
||||
);
|
||||
|
||||
// We only want one Cargo at a time resolving a crate graph since this can
|
||||
// involve a lot of frobbing of the global caches.
|
||||
let _lock = ws.config().acquire_package_cache_lock()?;
|
||||
@ -258,80 +268,20 @@ pub fn resolve_with_previous<'cfg>(
|
||||
registry.add_sources(Some(member.package_id().source_id()))?;
|
||||
}
|
||||
|
||||
let mut summaries = Vec::new();
|
||||
if ws.config().cli_unstable().package_features {
|
||||
let mut members = Vec::new();
|
||||
if specs.is_empty() {
|
||||
members.extend(ws.members());
|
||||
} else {
|
||||
if specs.len() > 1 && !opts.features.is_empty() {
|
||||
anyhow::bail!("cannot specify features for more than one package");
|
||||
}
|
||||
members.extend(
|
||||
ws.members()
|
||||
.filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))),
|
||||
);
|
||||
// Edge case: running `cargo build -p foo`, where `foo` is not a member
|
||||
// of current workspace. Add all packages from workspace to get `foo`
|
||||
// into the resolution graph.
|
||||
if members.is_empty() {
|
||||
if !(opts.features.is_empty() && !opts.all_features && opts.uses_default_features) {
|
||||
anyhow::bail!("cannot specify features for packages outside of workspace");
|
||||
}
|
||||
members.extend(ws.members());
|
||||
}
|
||||
}
|
||||
for member in members {
|
||||
let summaries: Vec<(Summary, ResolveOpts)> = ws
|
||||
.members_with_features(specs, &opts.features)?
|
||||
.into_iter()
|
||||
.map(|(member, features)| {
|
||||
let summary = registry.lock(member.summary().clone());
|
||||
summaries.push((summary, opts.clone()))
|
||||
}
|
||||
} else {
|
||||
for member in ws.members() {
|
||||
let summary_resolve_opts = if specs.is_empty() {
|
||||
// When resolving the entire workspace, resolve each member
|
||||
// with all features enabled.
|
||||
opts.clone()
|
||||
} else {
|
||||
// If we're not resolving everything though then we're constructing the
|
||||
// exact crate graph we're going to build. Here we don't necessarily
|
||||
// want to keep around all workspace crates as they may not all be
|
||||
// built/tested.
|
||||
//
|
||||
// Additionally, the `opts` specified represents command line
|
||||
// flags, which really only matters for the current package
|
||||
// (determined by the cwd). If other packages are specified (via
|
||||
// `-p`) then the command line flags like features don't apply to
|
||||
// them.
|
||||
//
|
||||
// As a result, if this `member` is the current member of the
|
||||
// workspace, then we use `opts` specified. Otherwise we use a
|
||||
// base `opts` with no features specified but using default features
|
||||
// for any other packages specified with `-p`.
|
||||
let member_id = member.package_id();
|
||||
match ws.current_opt() {
|
||||
Some(current) if member_id == current.package_id() => opts.clone(),
|
||||
_ => {
|
||||
if specs.iter().any(|spec| spec.matches(member_id)) {
|
||||
// -p for a workspace member that is not the
|
||||
// "current" one, don't use the local `--features`.
|
||||
ResolveOpts {
|
||||
dev_deps: opts.dev_deps,
|
||||
features: Rc::default(),
|
||||
all_features: opts.all_features,
|
||||
uses_default_features: true,
|
||||
}
|
||||
} else {
|
||||
// `-p` for non-member, skip.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let summary = registry.lock(member.summary().clone());
|
||||
summaries.push((summary, summary_resolve_opts));
|
||||
}
|
||||
};
|
||||
(
|
||||
summary,
|
||||
ResolveOpts {
|
||||
dev_deps: opts.dev_deps,
|
||||
features,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let root_replace = ws.root_replace();
|
||||
|
||||
|
@ -473,3 +473,71 @@ cargo +nightly -Zunstable-options -Zconfig-include --config somefile.toml build
|
||||
```
|
||||
|
||||
CLI paths are relative to the current working directory.
|
||||
|
||||
## Features
|
||||
|
||||
The `-Zfeatures` option causes Cargo to use a new feature resolver that can
|
||||
resolve features differently from before. It takes a comma separated list of
|
||||
options to indicate which new behaviors to enable. With no options, it should
|
||||
behave the same as without the flag.
|
||||
|
||||
```console
|
||||
cargo +nightly -Zfeatures=itarget,build_dep
|
||||
```
|
||||
|
||||
The available options are:
|
||||
|
||||
* `itarget` — Ignores features for target-specific dependencies for targets
|
||||
that don't match the current compile target. For example:
|
||||
|
||||
```toml
|
||||
[dependency.common]
|
||||
version = "1.0"
|
||||
features = ["f1"]
|
||||
|
||||
[target.'cfg(windows)'.dependencies.common]
|
||||
version = "1.0"
|
||||
features = ["f2"]
|
||||
```
|
||||
|
||||
When building this example for a non-Windows platform, the `f2` feature will
|
||||
*not* be enabled.
|
||||
|
||||
* `build_dep` — Prevents features enabled on build dependencies from being
|
||||
enabled for normal dependencies. For example:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
log = "0.4"
|
||||
|
||||
[build-dependencies]
|
||||
log = {version = "0.4", features=['std']}
|
||||
```
|
||||
|
||||
When building the build script, the `log` crate will be built with the `std`
|
||||
feature. When building the library of your package, it will not enable the
|
||||
feature.
|
||||
|
||||
* `dev_dep` — Prevents features enabled on dev dependencies from being enabled
|
||||
for normal dependencies. For example:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
serde = {version = "1.0", default-features = false}
|
||||
|
||||
[dev-dependencies]
|
||||
serde = {version = "1.0", features = ["std"]}
|
||||
```
|
||||
|
||||
In this example, the library will normally link against `serde` without the
|
||||
`std` feature. However, when built as a test or example, it will include the
|
||||
`std` feature.
|
||||
|
||||
This mode is ignored if you are building any test, bench, or example. That
|
||||
is, dev dependency features will still be unified if you run commands like
|
||||
`cargo test` or `cargo build --all-targets`.
|
||||
|
||||
* `all` — Enable all feature options (`itarget,build_dep,dev_dep`).
|
||||
|
||||
* `compare` — This option compares the resolved features to the old resolver,
|
||||
and will print any differences.
|
||||
|
@ -2142,3 +2142,53 @@ fn all_features_virtual_ws() {
|
||||
.with_stdout("f1\nf2\nf3\n")
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn slash_optional_enables() {
|
||||
// --features dep/feat will enable `dep` and set its feature.
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
|
||||
[dependencies]
|
||||
dep = {path="dep", optional=true}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
#[cfg(not(feature="dep"))]
|
||||
compile_error!("dep not set");
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"dep/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "dep"
|
||||
version = "0.1.0"
|
||||
|
||||
[features]
|
||||
feat = []
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"dep/src/lib.rs",
|
||||
r#"
|
||||
#[cfg(not(feature="feat"))]
|
||||
compile_error!("feat not set");
|
||||
"#,
|
||||
)
|
||||
.build();
|
||||
|
||||
p.cargo("check")
|
||||
.with_status(101)
|
||||
.with_stderr_contains("[..]dep not set[..]")
|
||||
.run();
|
||||
|
||||
p.cargo("check --features dep/feat").run();
|
||||
}
|
||||
|
895
tests/testsuite/features2.rs
Normal file
895
tests/testsuite/features2.rs
Normal file
@ -0,0 +1,895 @@
|
||||
//! Tests for the new feature resolver.
|
||||
|
||||
use cargo_test_support::project;
|
||||
use cargo_test_support::registry::{Dependency, Package};
|
||||
|
||||
#[cargo_test]
|
||||
fn inactivate_targets() {
|
||||
// Basic test of `itarget`. A shared dependency where an inactive [target]
|
||||
// changes the features.
|
||||
Package::new("common", "1.0.0")
|
||||
.feature("f1", &[])
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
#[cfg(feature = "f1")]
|
||||
compile_error!("f1 should not activate");
|
||||
"#,
|
||||
)
|
||||
.publish();
|
||||
|
||||
Package::new("bar", "1.0.0")
|
||||
.add_dep(
|
||||
Dependency::new("common", "1.0")
|
||||
.target("cfg(whatever)")
|
||||
.enable_features(&["f1"]),
|
||||
)
|
||||
.publish();
|
||||
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
|
||||
[dependencies]
|
||||
common = "1.0"
|
||||
bar = "1.0"
|
||||
"#,
|
||||
)
|
||||
.file("src/lib.rs", "")
|
||||
.build();
|
||||
|
||||
p.cargo("check")
|
||||
.with_status(101)
|
||||
.with_stderr_contains("[..]f1 should not activate[..]")
|
||||
.run();
|
||||
|
||||
p.cargo("check -Zfeatures=itarget")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn inactive_target_optional() {
|
||||
// Activating optional [target] dependencies for inactivate target.
|
||||
Package::new("common", "1.0.0")
|
||||
.feature("f1", &[])
|
||||
.feature("f2", &[])
|
||||
.feature("f3", &[])
|
||||
.feature("f4", &[])
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
pub fn f() {
|
||||
if cfg!(feature="f1") { println!("f1"); }
|
||||
if cfg!(feature="f2") { println!("f2"); }
|
||||
if cfg!(feature="f3") { println!("f3"); }
|
||||
if cfg!(feature="f4") { println!("f4"); }
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.publish();
|
||||
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
common = "1.0"
|
||||
|
||||
[target.'cfg(whatever)'.dependencies]
|
||||
dep1 = {path='dep1', optional=true}
|
||||
dep2 = {path='dep2', optional=true, features=["f3"]}
|
||||
common = {version="1.0", optional=true, features=["f4"]}
|
||||
|
||||
[features]
|
||||
foo1 = ["dep1/f2"]
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/main.rs",
|
||||
r#"
|
||||
fn main() {
|
||||
if cfg!(feature="foo1") { println!("foo1"); }
|
||||
if cfg!(feature="dep1") { println!("dep1"); }
|
||||
if cfg!(feature="dep2") { println!("dep2"); }
|
||||
if cfg!(feature="common") { println!("common"); }
|
||||
common::f();
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"dep1/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "dep1"
|
||||
version = "0.1.0"
|
||||
|
||||
[dependencies]
|
||||
common = {version="1.0", features=["f1"]}
|
||||
|
||||
[features]
|
||||
f2 = ["common/f2"]
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"dep1/src/lib.rs",
|
||||
r#"compile_error!("dep1 should not build");"#,
|
||||
)
|
||||
.file(
|
||||
"dep2/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "dep2"
|
||||
version = "0.1.0"
|
||||
|
||||
[dependencies]
|
||||
common = "1.0"
|
||||
|
||||
[features]
|
||||
f3 = ["common/f3"]
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"dep2/src/lib.rs",
|
||||
r#"compile_error!("dep2 should not build");"#,
|
||||
)
|
||||
.build();
|
||||
|
||||
p.cargo("run --all-features")
|
||||
.with_stdout("foo1\ndep1\ndep2\ncommon\nf1\nf2\nf3\nf4\n")
|
||||
.run();
|
||||
p.cargo("run --features dep1")
|
||||
.with_stdout("dep1\nf1\n")
|
||||
.run();
|
||||
p.cargo("run --features foo1")
|
||||
.with_stdout("foo1\ndep1\nf1\nf2\n")
|
||||
.run();
|
||||
p.cargo("run --features dep2")
|
||||
.with_stdout("dep2\nf3\n")
|
||||
.run();
|
||||
p.cargo("run --features common")
|
||||
.with_stdout("common\nf4\n")
|
||||
.run();
|
||||
|
||||
p.cargo("run -Zfeatures=itarget --all-features")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.with_stdout("foo1\n")
|
||||
.run();
|
||||
p.cargo("run -Zfeatures=itarget --features dep1")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.with_stdout("dep1\n")
|
||||
.run();
|
||||
p.cargo("run -Zfeatures=itarget --features foo1")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.with_stdout("foo1\n")
|
||||
.run();
|
||||
p.cargo("run -Zfeatures=itarget --features dep2")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.with_stdout("dep2\n")
|
||||
.run();
|
||||
p.cargo("run -Zfeatures=itarget --features common")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.with_stdout("common")
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn decouple_build_deps() {
|
||||
// Basic test for `build_dep` decouple.
|
||||
Package::new("common", "1.0.0")
|
||||
.feature("f1", &[])
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
#[cfg(feature = "f1")]
|
||||
pub fn foo() {}
|
||||
#[cfg(not(feature = "f1"))]
|
||||
pub fn bar() {}
|
||||
"#,
|
||||
)
|
||||
.publish();
|
||||
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[build-dependencies]
|
||||
common = {version="1.0", features=["f1"]}
|
||||
|
||||
[dependencies]
|
||||
common = "1.0"
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"build.rs",
|
||||
r#"
|
||||
use common::foo;
|
||||
fn main() {}
|
||||
"#,
|
||||
)
|
||||
.file("src/lib.rs", "use common::bar;")
|
||||
.build();
|
||||
|
||||
p.cargo("check")
|
||||
.with_status(101)
|
||||
.with_stderr_contains("[..]unresolved import `common::bar`[..]")
|
||||
.run();
|
||||
|
||||
p.cargo("check -Zfeatures=build_dep")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn decouple_build_deps_nested() {
|
||||
// `build_dep` decouple of transitive dependencies.
|
||||
Package::new("common", "1.0.0")
|
||||
.feature("f1", &[])
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
#[cfg(feature = "f1")]
|
||||
pub fn foo() {}
|
||||
#[cfg(not(feature = "f1"))]
|
||||
pub fn bar() {}
|
||||
"#,
|
||||
)
|
||||
.publish();
|
||||
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[build-dependencies]
|
||||
bdep = {path="bdep"}
|
||||
|
||||
[dependencies]
|
||||
common = "1.0"
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"build.rs",
|
||||
r#"
|
||||
use bdep::foo;
|
||||
fn main() {}
|
||||
"#,
|
||||
)
|
||||
.file("src/lib.rs", "use common::bar;")
|
||||
.file(
|
||||
"bdep/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "bdep"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
common = {version="1.0", features=["f1"]}
|
||||
"#,
|
||||
)
|
||||
.file("bdep/src/lib.rs", "pub use common::foo;")
|
||||
.build();
|
||||
|
||||
p.cargo("check")
|
||||
.with_status(101)
|
||||
.with_stderr_contains("[..]unresolved import `common::bar`[..]")
|
||||
.run();
|
||||
|
||||
p.cargo("check -Zfeatures=build_dep")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn decouple_dev_deps() {
|
||||
// Basic test for `dev_dep` decouple.
|
||||
Package::new("common", "1.0.0")
|
||||
.feature("f1", &[])
|
||||
.feature("f2", &[])
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
// const ensures it uses the correct dependency at *build time*
|
||||
// compared to *link time*.
|
||||
#[cfg(all(feature="f1", not(feature="f2")))]
|
||||
pub const X: u32 = 1;
|
||||
|
||||
#[cfg(all(feature="f1", feature="f2"))]
|
||||
pub const X: u32 = 3;
|
||||
|
||||
pub fn foo() -> u32 {
|
||||
let mut res = 0;
|
||||
if cfg!(feature = "f1") {
|
||||
res |= 1;
|
||||
}
|
||||
if cfg!(feature = "f2") {
|
||||
res |= 2;
|
||||
}
|
||||
res
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.publish();
|
||||
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
common = {version="1.0", features=["f1"]}
|
||||
|
||||
[dev-dependencies]
|
||||
common = {version="1.0", features=["f2"]}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/main.rs",
|
||||
r#"
|
||||
fn main() {
|
||||
let expected: u32 = std::env::args().skip(1).next().unwrap().parse().unwrap();
|
||||
assert_eq!(foo::foo(), expected);
|
||||
assert_eq!(foo::build_time(), expected);
|
||||
assert_eq!(common::foo(), expected);
|
||||
assert_eq!(common::X, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bin() {
|
||||
assert_eq!(foo::foo(), 3);
|
||||
assert_eq!(common::foo(), 3);
|
||||
assert_eq!(common::X, 3);
|
||||
assert_eq!(foo::build_time(), 3);
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
pub fn foo() -> u32 {
|
||||
common::foo()
|
||||
}
|
||||
|
||||
pub fn build_time() -> u32 {
|
||||
common::X
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lib() {
|
||||
assert_eq!(foo(), 3);
|
||||
assert_eq!(common::foo(), 3);
|
||||
assert_eq!(common::X, 3);
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"tests/t1.rs",
|
||||
r#"
|
||||
#[test]
|
||||
fn test_t1() {
|
||||
assert_eq!(foo::foo(), 3);
|
||||
assert_eq!(common::foo(), 3);
|
||||
assert_eq!(common::X, 3);
|
||||
assert_eq!(foo::build_time(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_main() {
|
||||
// Features are unified for main when run with `cargo test`,
|
||||
// even with -Zfeatures=dev_dep.
|
||||
let s = std::process::Command::new("target/debug/foo")
|
||||
.arg("3")
|
||||
.status().unwrap();
|
||||
assert!(s.success());
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.build();
|
||||
|
||||
p.cargo("run 3").run();
|
||||
|
||||
p.cargo("run -Zfeatures=dev_dep 1")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
|
||||
p.cargo("test").run();
|
||||
|
||||
p.cargo("test -Zfeatures=dev_dep")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn build_script_runtime_features() {
|
||||
// Check that the CARGO_FEATURE_* environment variable is set correctly.
|
||||
//
|
||||
// This has a common dependency between build/normal/dev-deps, and it
|
||||
// queries which features it was built with in different circumstances.
|
||||
Package::new("common", "1.0.0")
|
||||
.feature("normal", &[])
|
||||
.feature("dev", &[])
|
||||
.feature("build", &[])
|
||||
.file(
|
||||
"build.rs",
|
||||
r#"
|
||||
fn is_set(name: &str) -> bool {
|
||||
std::env::var(name) == Ok("1".to_string())
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let mut res = 0;
|
||||
if is_set("CARGO_FEATURE_NORMAL") {
|
||||
res |= 1;
|
||||
}
|
||||
if is_set("CARGO_FEATURE_DEV") {
|
||||
res |= 2;
|
||||
}
|
||||
if is_set("CARGO_FEATURE_BUILD") {
|
||||
res |= 4;
|
||||
}
|
||||
println!("cargo:rustc-cfg=RunCustomBuild=\"{}\"", res);
|
||||
|
||||
let mut res = 0;
|
||||
if cfg!(feature = "normal") {
|
||||
res |= 1;
|
||||
}
|
||||
if cfg!(feature = "dev") {
|
||||
res |= 2;
|
||||
}
|
||||
if cfg!(feature = "build") {
|
||||
res |= 4;
|
||||
}
|
||||
println!("cargo:rustc-cfg=CustomBuild=\"{}\"", res);
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
pub fn foo() -> u32 {
|
||||
let mut res = 0;
|
||||
if cfg!(feature = "normal") {
|
||||
res |= 1;
|
||||
}
|
||||
if cfg!(feature = "dev") {
|
||||
res |= 2;
|
||||
}
|
||||
if cfg!(feature = "build") {
|
||||
res |= 4;
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub fn build_time() -> u32 {
|
||||
#[cfg(RunCustomBuild="1")] return 1;
|
||||
#[cfg(RunCustomBuild="3")] return 3;
|
||||
#[cfg(RunCustomBuild="4")] return 4;
|
||||
#[cfg(RunCustomBuild="5")] return 5;
|
||||
#[cfg(RunCustomBuild="7")] return 7;
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.publish();
|
||||
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[build-dependencies]
|
||||
common = {version="1.0", features=["build"]}
|
||||
|
||||
[dependencies]
|
||||
common = {version="1.0", features=["normal"]}
|
||||
|
||||
[dev-dependencies]
|
||||
common = {version="1.0", features=["dev"]}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"build.rs",
|
||||
r#"
|
||||
fn main() {
|
||||
assert_eq!(common::foo(), common::build_time());
|
||||
println!("cargo:rustc-cfg=from_build=\"{}\"", common::foo());
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
pub fn foo() -> u32 {
|
||||
common::foo()
|
||||
}
|
||||
|
||||
pub fn build_time() -> u32 {
|
||||
common::build_time()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lib() {
|
||||
assert_eq!(common::foo(), common::build_time());
|
||||
assert_eq!(common::foo(),
|
||||
std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap());
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/main.rs",
|
||||
r#"
|
||||
fn main() {
|
||||
assert_eq!(common::foo(), common::build_time());
|
||||
assert_eq!(common::foo(),
|
||||
std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bin() {
|
||||
assert_eq!(common::foo(), common::build_time());
|
||||
assert_eq!(common::foo(),
|
||||
std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap());
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"tests/t1.rs",
|
||||
r#"
|
||||
#[test]
|
||||
fn test_t1() {
|
||||
assert_eq!(common::foo(), common::build_time());
|
||||
assert_eq!(common::foo(),
|
||||
std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_main() {
|
||||
// Features are unified for main when run with `cargo test`,
|
||||
// even with -Zfeatures=dev_dep.
|
||||
let s = std::process::Command::new("target/debug/foo")
|
||||
.status().unwrap();
|
||||
assert!(s.success());
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.build();
|
||||
|
||||
// Old way, unifies all 3.
|
||||
p.cargo("run").env("CARGO_FEATURE_EXPECT", "7").run();
|
||||
|
||||
// normal + build unify
|
||||
p.cargo("run -Zfeatures=dev_dep")
|
||||
.env("CARGO_FEATURE_EXPECT", "5")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
|
||||
// Normal only.
|
||||
p.cargo("run -Zfeatures=dev_dep,build_dep")
|
||||
.env("CARGO_FEATURE_EXPECT", "1")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
|
||||
p.cargo("test").env("CARGO_FEATURE_EXPECT", "7").run();
|
||||
|
||||
// dev_deps are still unified with `cargo test`
|
||||
p.cargo("test -Zfeatures=dev_dep")
|
||||
.env("CARGO_FEATURE_EXPECT", "7")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
|
||||
// normal + dev unify
|
||||
p.cargo("test -Zfeatures=build_dep")
|
||||
.env("CARGO_FEATURE_EXPECT", "3")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn cyclical_dev_dep() {
|
||||
// Check how a cyclical dev-dependency will work.
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
dev = []
|
||||
|
||||
[dev-dependencies]
|
||||
foo = { path = '.', features = ["dev"] }
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
pub fn assert_dev(enabled: bool) {
|
||||
assert_eq!(enabled, cfg!(feature="dev"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_in_lib() {
|
||||
assert_dev(true);
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/main.rs",
|
||||
r#"
|
||||
fn main() {
|
||||
let expected: bool = std::env::args().skip(1).next().unwrap().parse().unwrap();
|
||||
foo::assert_dev(expected);
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"tests/t1.rs",
|
||||
r#"
|
||||
#[test]
|
||||
fn integration_links() {
|
||||
foo::assert_dev(true);
|
||||
// The lib linked with main.rs will also be unified.
|
||||
let s = std::process::Command::new("target/debug/foo")
|
||||
.arg("true")
|
||||
.status().unwrap();
|
||||
assert!(s.success());
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.build();
|
||||
|
||||
// Old way unifies features.
|
||||
p.cargo("run true").run();
|
||||
|
||||
// Should decouple main.
|
||||
p.cargo("run -Zfeatures=dev_dep false")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
|
||||
// dev feature should always be enabled in tests.
|
||||
p.cargo("test").run();
|
||||
|
||||
// And this should be no different.
|
||||
p.cargo("test -Zfeatures=dev_dep")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn all_feature_opts() {
|
||||
// All feature options at once.
|
||||
Package::new("common", "1.0.0")
|
||||
.feature("normal", &[])
|
||||
.feature("build", &[])
|
||||
.feature("dev", &[])
|
||||
.feature("itarget", &[])
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
pub fn feats() -> u32 {
|
||||
let mut res = 0;
|
||||
if cfg!(feature="normal") { res |= 1; }
|
||||
if cfg!(feature="build") { res |= 2; }
|
||||
if cfg!(feature="dev") { res |= 4; }
|
||||
if cfg!(feature="itarget") { res |= 8; }
|
||||
res
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.publish();
|
||||
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
common = {version = "1.0", features=["normal"]}
|
||||
|
||||
[dev-dependencies]
|
||||
common = {version = "1.0", features=["dev"]}
|
||||
|
||||
[build-dependencies]
|
||||
common = {version = "1.0", features=["build"]}
|
||||
|
||||
[target.'cfg(whatever)'.dependencies]
|
||||
common = {version = "1.0", features=["itarget"]}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/main.rs",
|
||||
r#"
|
||||
fn main() {
|
||||
expect();
|
||||
}
|
||||
|
||||
fn expect() {
|
||||
let expected: u32 = std::env::var("EXPECTED_FEATS").unwrap().parse().unwrap();
|
||||
assert_eq!(expected, common::feats());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_test() {
|
||||
expect();
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.build();
|
||||
|
||||
p.cargo("run").env("EXPECTED_FEATS", "15").run();
|
||||
|
||||
// Only normal feature.
|
||||
p.cargo("run -Zfeatures=all")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.env("EXPECTED_FEATS", "1")
|
||||
.run();
|
||||
|
||||
p.cargo("test").env("EXPECTED_FEATS", "15").run();
|
||||
|
||||
// only normal+dev
|
||||
p.cargo("test -Zfeatures=all")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.env("EXPECTED_FEATS", "5")
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn required_features_build_dep() {
|
||||
// Check that required-features handles build-dependencies correctly.
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[[bin]]
|
||||
name = "x"
|
||||
required-features = ["bdep/f1"]
|
||||
|
||||
[build-dependencies]
|
||||
bdep = {path="bdep"}
|
||||
"#,
|
||||
)
|
||||
.file("build.rs", "fn main() {}")
|
||||
.file(
|
||||
"src/bin/x.rs",
|
||||
r#"
|
||||
fn main() {}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"bdep/Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "bdep"
|
||||
version = "0.1.0"
|
||||
|
||||
[features]
|
||||
f1 = []
|
||||
"#,
|
||||
)
|
||||
.file("bdep/src/lib.rs", "")
|
||||
.build();
|
||||
|
||||
p.cargo("run")
|
||||
.with_status(101)
|
||||
.with_stderr(
|
||||
"\
|
||||
[ERROR] target `x` in package `foo` requires the features: `bdep/f1`
|
||||
Consider enabling them by passing, e.g., `--features=\"bdep/f1\"`
|
||||
",
|
||||
)
|
||||
.run();
|
||||
|
||||
p.cargo("run --features bdep/f1 -Zfeatures=build_dep")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.run();
|
||||
}
|
||||
|
||||
#[cargo_test]
|
||||
fn disabled_shared_build_dep() {
|
||||
// Check for situation where an optional dep of a shared dep is enabled in
|
||||
// a normal dependency, but disabled in an optional one. The unit tree is:
|
||||
// foo
|
||||
// ├── foo build.rs
|
||||
// | └── common (BUILD dependency, NO FEATURES)
|
||||
// └── common (Normal dependency, default features)
|
||||
// └── somedep
|
||||
Package::new("somedep", "1.0.0")
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
pub fn f() { println!("hello from somedep"); }
|
||||
"#,
|
||||
)
|
||||
.publish();
|
||||
Package::new("common", "1.0.0")
|
||||
.feature("default", &["somedep"])
|
||||
.add_dep(Dependency::new("somedep", "1.0").optional(true))
|
||||
.file(
|
||||
"src/lib.rs",
|
||||
r#"
|
||||
pub fn check_somedep() -> bool {
|
||||
#[cfg(feature="somedep")]
|
||||
{
|
||||
extern crate somedep;
|
||||
somedep::f();
|
||||
true
|
||||
}
|
||||
#[cfg(not(feature="somedep"))]
|
||||
{
|
||||
println!("no somedep");
|
||||
false
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.publish();
|
||||
|
||||
let p = project()
|
||||
.file(
|
||||
"Cargo.toml",
|
||||
r#"
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "1.0.0"
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
common = "1.0"
|
||||
|
||||
[build-dependencies]
|
||||
common = {version = "1.0", default-features = false}
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"src/main.rs",
|
||||
"fn main() { assert!(common::check_somedep()); }",
|
||||
)
|
||||
.file(
|
||||
"build.rs",
|
||||
"fn main() { assert!(!common::check_somedep()); }",
|
||||
)
|
||||
.build();
|
||||
|
||||
p.cargo("run -Zfeatures=build_dep -v")
|
||||
.masquerade_as_nightly_cargo()
|
||||
.with_stdout("hello from somedep")
|
||||
.run();
|
||||
}
|
@ -44,6 +44,7 @@ mod doc;
|
||||
mod edition;
|
||||
mod error;
|
||||
mod features;
|
||||
mod features2;
|
||||
mod fetch;
|
||||
mod fix;
|
||||
mod freshness;
|
||||
|
@ -413,7 +413,7 @@ fn named_config_profile() {
|
||||
assert_eq!(p.overflow_checks, true); // "dev" built-in (ignore package override)
|
||||
|
||||
// build-override
|
||||
let bo = profiles.get_profile(a_pkg, true, UnitFor::new_build(), CompileMode::Build);
|
||||
let bo = profiles.get_profile(a_pkg, true, UnitFor::new_build(false), CompileMode::Build);
|
||||
assert_eq!(bo.name, "foo");
|
||||
assert_eq!(bo.codegen_units, Some(6)); // "foo" build override from config
|
||||
assert_eq!(bo.opt_level, "1"); // SAME as normal
|
||||
|
Loading…
x
Reference in New Issue
Block a user