Add new feature resolver.

This commit is contained in:
Eric Huss 2019-09-10 15:38:01 -07:00
parent db80baad21
commit 7caa1612cf
21 changed files with 1372 additions and 158 deletions

View File

@ -1,7 +1,9 @@
//! Code for building the standard library.
use crate::core::compiler::{BuildContext, CompileKind, CompileMode, Unit};
use crate::core::compiler::{BuildContext, CompileKind, CompileMode, RustcTargetData, Unit};
use crate::core::dependency::DepKind;
use crate::core::profiles::UnitFor;
use crate::core::resolver::features::ResolvedFeatures;
use crate::core::resolver::ResolveOpts;
use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace};
use crate::ops::{self, Packages};
@ -31,8 +33,10 @@ pub fn parse_unstable_flag(value: Option<&str>) -> Vec<String> {
/// Resolve the standard library dependencies.
pub fn resolve_std<'cfg>(
ws: &Workspace<'cfg>,
target_data: &RustcTargetData,
requested_target: CompileKind,
crates: &[String],
) -> CargoResult<(PackageSet<'cfg>, Resolve)> {
) -> CargoResult<(PackageSet<'cfg>, Resolve, ResolvedFeatures)> {
let src_path = detect_sysroot_src_path(ws)?;
let to_patch = [
"rustc-std-workspace-core",
@ -99,8 +103,12 @@ pub fn resolve_std<'cfg>(
/*dev_deps*/ false, &features, /*all_features*/ false,
/*uses_default_features*/ true,
);
let resolve = ops::resolve_ws_with_opts(&std_ws, &opts, &specs)?;
Ok((resolve.pkg_set, resolve.targeted_resolve))
let resolve = ops::resolve_ws_with_opts(&std_ws, target_data, requested_target, &opts, &specs)?;
Ok((
resolve.pkg_set,
resolve.targeted_resolve,
resolve.resolved_features,
))
}
/// Generate a list of root `Unit`s for the standard library.
@ -110,6 +118,7 @@ pub fn generate_std_roots<'a>(
bcx: &BuildContext<'a, '_>,
crates: &[String],
std_resolve: &'a Resolve,
std_features: &ResolvedFeatures,
kind: CompileKind,
) -> CargoResult<Vec<Unit<'a>>> {
// Generate the root Units for the standard library.
@ -139,7 +148,11 @@ pub fn generate_std_roots<'a>(
unit_for,
mode,
);
let features = std_resolve.features_sorted(pkg.package_id());
let features = std_features.activated_features(
pkg.package_id(),
DepKind::Normal,
bcx.build_config.requested_kind,
);
Ok(bcx.units.intern(
pkg, lib, profile, kind, mode, features, /*is_std*/ true,
))

View File

@ -1,5 +1,5 @@
use crate::core::compiler::{CompileKind, CompileMode};
use crate::core::{profiles::Profile, Package, Target};
use crate::core::{profiles::Profile, InternedString, Package, Target};
use crate::util::hex::short_hash;
use std::cell::RefCell;
use std::collections::HashSet;
@ -50,7 +50,7 @@ pub struct UnitInner<'a> {
pub mode: CompileMode,
/// The `cfg` features to enable for this unit.
/// This must be sorted.
pub features: Vec<&'a str>,
pub features: Vec<InternedString>,
/// Whether this is a standard library unit.
pub is_std: bool,
}
@ -145,7 +145,7 @@ impl<'a> UnitInterner<'a> {
profile: Profile,
kind: CompileKind,
mode: CompileMode,
features: Vec<&'a str>,
features: Vec<InternedString>,
is_std: bool,
) -> Unit<'a> {
let inner = self.intern_inner(&UnitInner {

View File

@ -20,6 +20,7 @@ use crate::core::compiler::{BuildContext, CompileKind, CompileMode};
use crate::core::dependency::DepKind;
use crate::core::package::Downloads;
use crate::core::profiles::{Profile, UnitFor};
use crate::core::resolver::features::ResolvedFeatures;
use crate::core::resolver::Resolve;
use crate::core::{InternedString, Package, PackageId, Target};
use crate::CargoResult;
@ -53,7 +54,9 @@ struct State<'a, 'cfg> {
unit_dependencies: UnitGraph<'a>,
package_cache: HashMap<PackageId, &'a Package>,
usr_resolve: &'a Resolve,
usr_features: &'a ResolvedFeatures,
std_resolve: Option<&'a Resolve>,
std_features: Option<&'a ResolvedFeatures>,
/// This flag is `true` while generating the dependencies for the standard
/// library.
is_std: bool,
@ -62,10 +65,15 @@ struct State<'a, 'cfg> {
pub fn build_unit_dependencies<'a, 'cfg>(
bcx: &'a BuildContext<'a, 'cfg>,
resolve: &'a Resolve,
std_resolve: Option<&'a Resolve>,
features: &'a ResolvedFeatures,
std_resolve: Option<&'a (Resolve, ResolvedFeatures)>,
roots: &[Unit<'a>],
std_roots: &[Unit<'a>],
) -> CargoResult<UnitGraph<'a>> {
let (std_resolve, std_features) = match std_resolve {
Some((r, f)) => (Some(r), Some(f)),
None => (None, None),
};
let mut state = State {
bcx,
downloads: bcx.packages.enable_download()?,
@ -73,7 +81,9 @@ pub fn build_unit_dependencies<'a, 'cfg>(
unit_dependencies: HashMap::new(),
package_cache: HashMap::new(),
usr_resolve: resolve,
usr_features: features,
std_resolve,
std_features,
is_std: false,
};
@ -220,7 +230,7 @@ fn compute_deps<'a, 'cfg>(
unit_for: UnitFor,
) -> CargoResult<Vec<UnitDep<'a>>> {
if unit.mode.is_run_custom_build() {
return compute_deps_custom_build(unit, state);
return compute_deps_custom_build(unit, unit_for.dep_kind(), state);
} else if unit.mode.is_doc() {
// Note: this does not include doc test.
return compute_deps_doc(unit, state);
@ -228,40 +238,45 @@ fn compute_deps<'a, 'cfg>(
let bcx = state.bcx;
let id = unit.pkg.package_id();
let deps = state.resolve().deps(id).filter(|&(_id, deps)| {
assert!(!deps.is_empty());
deps.iter().any(|dep| {
// If this target is a build command, then we only want build
// dependencies, otherwise we want everything *other than* build
// dependencies.
if unit.target.is_custom_build() != dep.is_build() {
return false;
}
let filtered_deps = state
.resolve()
.deps(id)
.map(|(id, deps)| {
assert!(!deps.is_empty());
let filtered_deps = deps.iter().filter(|dep| {
// If this target is a build command, then we only want build
// dependencies, otherwise we want everything *other than* build
// dependencies.
if unit.target.is_custom_build() != dep.is_build() {
return false;
}
// If this dependency is **not** a transitive dependency, then it
// only applies to test/example targets.
if !dep.is_transitive()
&& !unit.target.is_test()
&& !unit.target.is_example()
&& !unit.mode.is_any_test()
{
return false;
}
// If this dependency is **not** a transitive dependency, then it
// only applies to test/example targets.
if !dep.is_transitive()
&& !unit.target.is_test()
&& !unit.target.is_example()
&& !unit.mode.is_any_test()
{
return false;
}
// If this dependency is only available for certain platforms,
// make sure we're only enabling it for that platform.
if !bcx.target_data.dep_platform_activated(dep, unit.kind) {
return false;
}
// If this dependency is only available for certain platforms,
// make sure we're only enabling it for that platform.
if !bcx.target_data.dep_platform_activated(dep, unit.kind) {
return false;
}
// If we've gotten past all that, then this dependency is
// actually used!
true
// If we've gotten past all that, then this dependency is
// actually used!
true
});
(id, filtered_deps.collect::<Vec<_>>())
})
});
.filter(|(_id, deps)| !deps.is_empty());
let mut ret = Vec::new();
for (id, _) in deps {
for (id, deps) in filtered_deps {
let pkg = match state.get(id)? {
Some(pkg) => pkg,
None => continue,
@ -271,7 +286,16 @@ fn compute_deps<'a, 'cfg>(
None => continue,
};
let mode = check_or_build_mode(unit.mode, lib);
let dep_unit_for = unit_for.with_for_host(lib.for_host());
let dep_kind = if unit.target.is_custom_build() {
DepKind::Build
} else if deps.iter().any(|dep| !dep.is_transitive()) {
DepKind::Development
} else {
DepKind::Normal
};
let dep_unit_for = unit_for
.with_for_host(lib.for_host())
.with_dep_kind(dep_kind);
if bcx.config.cli_unstable().dual_proc_macros && lib.proc_macro() && !unit.kind.is_host() {
let unit_dep = new_unit_dep(state, unit, pkg, lib, dep_unit_for, unit.kind, mode)?;
@ -299,7 +323,7 @@ fn compute_deps<'a, 'cfg>(
if unit.target.is_custom_build() {
return Ok(ret);
}
ret.extend(dep_build_script(unit, state)?);
ret.extend(dep_build_script(unit, unit_for.dep_kind(), state)?);
// If this target is a binary, test, example, etc, then it depends on
// the library of the same package. The call to `resolve.deps` above
@ -326,7 +350,7 @@ fn compute_deps<'a, 'cfg>(
t.is_bin() &&
// Skip binaries with required features that have not been selected.
t.required_features().unwrap_or(&no_required_features).iter().all(|f| {
unit.features.contains(&f.as_str())
unit.features.contains(&InternedString::new(f.as_str()))
})
})
.map(|t| {
@ -353,6 +377,7 @@ fn compute_deps<'a, 'cfg>(
/// the returned set of units must all be run before `unit` is run.
fn compute_deps_custom_build<'a, 'cfg>(
unit: &Unit<'a>,
dep_kind: DepKind,
state: &mut State<'a, 'cfg>,
) -> CargoResult<Vec<UnitDep<'a>>> {
if let Some(links) = unit.pkg.manifest().links() {
@ -361,6 +386,9 @@ fn compute_deps_custom_build<'a, 'cfg>(
return Ok(Vec::new());
}
}
// All dependencies of this unit should use profiles for custom
// builds.
let unit_for = UnitFor::new_build().with_dep_kind(dep_kind);
// When not overridden, then the dependencies to run a build script are:
//
// 1. Compiling the build script itself.
@ -375,9 +403,7 @@ fn compute_deps_custom_build<'a, 'cfg>(
unit,
unit.pkg,
unit.target,
// All dependencies of this unit should use profiles for custom
// builds.
UnitFor::new_build(),
unit_for,
// Build scripts always compiled for the host.
CompileKind::Host,
CompileMode::Build,
@ -444,7 +470,7 @@ fn compute_deps_doc<'a, 'cfg>(
}
// Be sure to build/run the build script for documented libraries.
ret.extend(dep_build_script(unit, state)?);
ret.extend(dep_build_script(unit, DepKind::Normal, state)?);
// If we document a binary/example, we need the library available.
if unit.target.is_bin() || unit.target.is_example() {
@ -486,6 +512,7 @@ fn maybe_lib<'a>(
/// build script.
fn dep_build_script<'a>(
unit: &Unit<'a>,
dep_kind: DepKind,
state: &State<'a, '_>,
) -> CargoResult<Option<UnitDep<'a>>> {
unit.pkg
@ -499,12 +526,13 @@ fn dep_build_script<'a>(
.bcx
.profiles
.get_profile_run_custom_build(&unit.profile);
let script_unit_for = UnitFor::new_build().with_dep_kind(dep_kind);
new_unit_dep_with_profile(
state,
unit,
unit.pkg,
t,
UnitFor::new_build(),
script_unit_for,
unit.kind,
CompileMode::RunCustomBuild,
profile,
@ -569,7 +597,7 @@ fn new_unit_dep_with_profile<'a>(
let public = state
.resolve()
.is_public_dep(parent.pkg.package_id(), pkg.package_id());
let features = state.resolve().features_sorted(pkg.package_id());
let features = state.activated_features(pkg.package_id(), unit_for.dep_kind(), kind);
let unit = state
.bcx
.units
@ -674,6 +702,20 @@ impl<'a, 'cfg> State<'a, 'cfg> {
}
}
fn activated_features(
&self,
pkg_id: PackageId,
dep_kind: DepKind,
compile_kind: CompileKind,
) -> Vec<InternedString> {
let features = if self.is_std {
self.std_features.unwrap()
} else {
self.usr_features
};
features.activated_features(pkg_id, dep_kind, compile_kind)
}
fn get(&mut self, id: PackageId) -> CargoResult<Option<&'a Package>> {
if let Some(pkg) = self.package_cache.get(&id) {
return Ok(Some(pkg));

View File

@ -92,6 +92,23 @@ pub enum DepKind {
Build,
}
impl DepKind {
/// Convert a DepKind from a package to one of its dependencies.
///
/// The rules here determine how feature decoupling works. This works in
/// conjunction with the new feature resolver.
pub fn sticky_kind(&self, to: DepKind) -> DepKind {
use DepKind::*;
match (self, to) {
(Normal, _) => to,
(Build, _) => Build,
(Development, Normal) => Development,
(Development, Build) => Build,
(Development, Development) => Development,
}
}
}
fn parse_req_with_deprecated(
name: InternedString,
req: &str,

View File

@ -341,6 +341,7 @@ pub struct CliUnstable {
pub doctest_xcompile: bool,
pub panic_abort_tests: bool,
pub jobserver_per_rustc: bool,
pub features: Option<Vec<String>>,
}
impl CliUnstable {
@ -380,6 +381,13 @@ impl CliUnstable {
}
}
fn parse_features(value: Option<&str>) -> Vec<String> {
match value {
None => Vec::new(),
Some(v) => v.split(',').map(|s| s.to_string()).collect(),
}
}
// Asserts that there is no argument to the flag.
fn parse_empty(key: &str, value: Option<&str>) -> CargoResult<bool> {
if let Some(v) = value {
@ -409,6 +417,7 @@ impl CliUnstable {
"doctest-xcompile" => self.doctest_xcompile = parse_empty(k, v)?,
"panic-abort-tests" => self.panic_abort_tests = parse_empty(k, v)?,
"jobserver-per-rustc" => self.jobserver_per_rustc = parse_empty(k, v)?,
"features" => self.features = Some(parse_features(v)),
_ => bail!("unknown `-Z` flag specified: {}", k),
}

View File

@ -1,4 +1,5 @@
use crate::core::compiler::CompileMode;
use crate::core::dependency::DepKind;
use crate::core::interning::InternedString;
use crate::core::{Feature, Features, PackageId, PackageIdSpec, Resolve, Shell};
use crate::util::errors::CargoResultExt;
@ -767,6 +768,8 @@ pub struct UnitFor {
/// any of its dependencies. This enables `build-override` profiles for
/// these targets.
build: bool,
/// The dependency kind (normal, dev, build).
dep_kind: DepKind,
/// How Cargo processes the `panic` setting or profiles. This is done to
/// handle test/benches inheriting from dev/release, as well as forcing
/// `for_host` units to always unwind.
@ -794,6 +797,7 @@ impl UnitFor {
pub fn new_normal() -> UnitFor {
UnitFor {
build: false,
dep_kind: DepKind::Normal,
panic_setting: PanicSetting::ReadProfile,
}
}
@ -802,6 +806,7 @@ impl UnitFor {
pub fn new_build() -> UnitFor {
UnitFor {
build: true,
dep_kind: DepKind::Normal,
// Force build scripts to always use `panic=unwind` for now to
// maximally share dependencies with procedural macros.
panic_setting: PanicSetting::AlwaysUnwind,
@ -812,6 +817,7 @@ impl UnitFor {
pub fn new_compiler() -> UnitFor {
UnitFor {
build: false,
dep_kind: DepKind::Normal,
// Force plugins to use `panic=abort` so panics in the compiler do
// not abort the process but instead end with a reasonable error
// message that involves catching the panic in the compiler.
@ -819,7 +825,7 @@ impl UnitFor {
}
}
/// A unit for a test/bench target or their dependencies.
/// A unit for a test/bench target.
///
/// Note that `config` is taken here for unstable CLI features to detect
/// whether `panic=abort` is supported for tests. Historical versions of
@ -828,6 +834,7 @@ impl UnitFor {
pub fn new_test(config: &Config) -> UnitFor {
UnitFor {
build: false,
dep_kind: DepKind::Development,
// We're testing out an unstable feature (`-Zpanic-abort-tests`)
// which inherits the panic setting from the dev/release profile
// (basically avoid recompiles) but historical defaults required
@ -850,6 +857,7 @@ impl UnitFor {
pub fn with_for_host(self, for_host: bool) -> UnitFor {
UnitFor {
build: self.build || for_host,
dep_kind: self.dep_kind,
panic_setting: if for_host {
PanicSetting::AlwaysUnwind
} else {
@ -858,6 +866,15 @@ impl UnitFor {
}
}
/// Creates a variant for the given dependency kind.
///
/// This is part of the machinery responsible for handling feature
/// decoupling in the new feature resolver.
pub fn with_dep_kind(mut self, kind: DepKind) -> UnitFor {
self.dep_kind = self.dep_kind.sticky_kind(kind);
self
}
/// Returns `true` if this unit is for a custom build script or one of its
/// dependencies.
pub fn is_build(self) -> bool {
@ -869,23 +886,33 @@ impl UnitFor {
self.panic_setting
}
/// Returns the dependency kind this unit is intended for.
pub fn dep_kind(&self) -> DepKind {
self.dep_kind
}
/// All possible values, used by `clean`.
pub fn all_values() -> &'static [UnitFor] {
// dep_kind isn't needed for profiles, so its value doesn't matter.
static ALL: &[UnitFor] = &[
UnitFor {
build: false,
dep_kind: DepKind::Normal,
panic_setting: PanicSetting::ReadProfile,
},
UnitFor {
build: true,
dep_kind: DepKind::Normal,
panic_setting: PanicSetting::AlwaysUnwind,
},
UnitFor {
build: false,
dep_kind: DepKind::Normal,
panic_setting: PanicSetting::AlwaysUnwind,
},
UnitFor {
build: false,
dep_kind: DepKind::Normal,
panic_setting: PanicSetting::Inherit,
},
];

View File

@ -365,6 +365,7 @@ impl EncodableResolve {
metadata,
unused_patches,
version,
HashMap::new(),
))
}
}

View File

@ -1,8 +1,93 @@
//! Feature resolver.
//!
//! This is a new feature resolver that runs independently of the main
//! dependency resolver. It is intended to make it easier to experiment with
//! new behaviors. When `-Zfeatures` is not used, it will fall back to using
//! the original `Resolve` feature computation. With `-Zfeatures` enabled,
//! this will walk the dependency graph and compute the features using a
//! different algorithm. One of its key characteristics is that it can avoid
//! unifying features for shared dependencies in some situations.
//!
//! The preferred way to engage this new resolver is via
//! `resolve_ws_with_opts`.
//!
//! There are many assumptions made about the resolver itself. It assumes
//! validation has already been done on the feature maps, and doesn't do any
//! validation itself. It assumes dev-dependencies within a dependency have
//! been removed.
use crate::core::compiler::{CompileKind, RustcTargetData};
use crate::core::dependency::{DepKind, Dependency};
use crate::core::resolver::types::FeaturesSet;
use crate::core::InternedString;
use std::collections::BTreeSet;
use crate::core::resolver::Resolve;
use crate::core::{FeatureValue, InternedString, PackageId, PackageIdSpec, Workspace};
use crate::util::{CargoResult, Config};
use std::collections::{BTreeSet, HashMap, HashSet};
use std::rc::Rc;
type ActivateMap = HashMap<(PackageId, DepKind, CompileKind), BTreeSet<InternedString>>;
/// Set of all activated features for all packages in the resolve graph.
pub struct ResolvedFeatures {
activated_features: ActivateMap,
/// This is only here for legacy support when `-Zfeatures` is not enabled.
legacy: Option<HashMap<PackageId, Vec<InternedString>>>,
opts: FeatureOpts,
}
/// Options for how the feature resolver works.
#[derive(Default)]
struct FeatureOpts {
/// -Zpackage-features, changes behavior of feature flags in a workspace.
package_features: bool,
/// -Zfeatures is enabled, use new resolver.
new_resolver: bool,
/// Build deps will not share share features with other dep kinds.
decouple_build_deps: bool,
/// Dev dep features will not be activated unless needed.
decouple_dev_deps: bool,
/// Targets that are not in use will not activate features.
ignore_inactive_targets: bool,
/// If enabled, compare against old resolver (for testing).
compare: bool,
}
impl FeatureOpts {
fn new(config: &Config) -> CargoResult<FeatureOpts> {
let mut opts = FeatureOpts::default();
let unstable_flags = config.cli_unstable();
opts.package_features = unstable_flags.package_features;
let mut enable = |feat_opts: &Vec<String>| {
opts.new_resolver = true;
for opt in feat_opts {
match opt.as_ref() {
"build_dep" => opts.decouple_build_deps = true,
"dev_dep" => opts.decouple_dev_deps = true,
"itarget" => opts.ignore_inactive_targets = true,
"compare" => opts.compare = true,
"ws" => unimplemented!(),
"host" => unimplemented!(),
s => anyhow::bail!("-Zfeatures flag `{}` is not supported", s),
}
}
Ok(())
};
if let Some(feat_opts) = unstable_flags.features.as_ref() {
enable(feat_opts)?;
}
// This env var is intended for testing only.
if let Ok(env_opts) = std::env::var("__CARGO_FORCE_NEW_FEATURES") {
if env_opts == "1" {
opts.new_resolver = true;
} else {
let env_opts = env_opts.split(',').map(|s| s.to_string()).collect();
enable(&env_opts)?;
}
}
Ok(opts)
}
}
/// Features flags requested for a package.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct RequestedFeatures {
@ -44,3 +129,421 @@ impl RequestedFeatures {
.collect::<BTreeSet<InternedString>>()
}
}
impl ResolvedFeatures {
/// Returns the list of features that are enabled for the given package.
pub fn activated_features(
&self,
pkg_id: PackageId,
dep_kind: DepKind,
compile_kind: CompileKind,
) -> Vec<InternedString> {
if let Some(legacy) = &self.legacy {
legacy.get(&pkg_id).map_or_else(Vec::new, |v| v.clone())
} else {
// TODO: Remove panic, return empty set.
let dep_kind = if (!self.opts.decouple_build_deps && dep_kind == DepKind::Build)
|| (!self.opts.decouple_dev_deps && dep_kind == DepKind::Development)
{
// Decoupling disabled, everything is unified under "Normal".
DepKind::Normal
} else {
dep_kind
};
let fs = self
.activated_features
.get(&(pkg_id, dep_kind, compile_kind))
.unwrap_or_else(|| panic!("features did not find {:?} {:?}", pkg_id, dep_kind));
fs.iter().cloned().collect()
}
}
}
pub struct FeatureResolver<'a, 'cfg> {
ws: &'a Workspace<'cfg>,
target_data: &'a RustcTargetData,
/// The platform to build for, requested by the user.
requested_target: CompileKind,
resolve: &'a Resolve,
/// Features requested by the user on the command-line.
requested_features: &'a RequestedFeatures,
/// Packages to build, requested on the command-line.
specs: &'a [PackageIdSpec],
/// Options that change how the feature resolver operates.
opts: FeatureOpts,
/// Map of features activated for each package.
activated_features: ActivateMap,
/// Keeps track of which packages have had its dependencies processed.
/// Used to avoid cycles, and to speed up processing.
processed_deps: HashSet<(PackageId, DepKind, CompileKind)>,
}
impl<'a, 'cfg> FeatureResolver<'a, 'cfg> {
/// Runs the resolution algorithm and returns a new `ResolvedFeatures`
/// with the result.
pub fn resolve(
ws: &Workspace<'cfg>,
target_data: &RustcTargetData,
resolve: &Resolve,
requested_features: &RequestedFeatures,
specs: &[PackageIdSpec],
requested_target: CompileKind,
) -> CargoResult<ResolvedFeatures> {
use crate::util::profile;
let _p = profile::start("resolve features");
let opts = FeatureOpts::new(ws.config())?;
if !opts.new_resolver {
// Legacy mode.
return Ok(ResolvedFeatures {
activated_features: HashMap::new(),
legacy: Some(resolve.features_clone()),
opts,
});
}
let mut r = FeatureResolver {
ws,
target_data,
requested_target,
resolve,
requested_features,
specs,
opts,
activated_features: HashMap::new(),
processed_deps: HashSet::new(),
};
r.do_resolve()?;
log::debug!("features={:#?}", r.activated_features);
if r.opts.compare {
r.compare();
}
Ok(ResolvedFeatures {
activated_features: r.activated_features,
legacy: None,
opts: r.opts,
})
}
/// Performs the process of resolving all features for the resolve graph.
fn do_resolve(&mut self) -> CargoResult<()> {
if self.opts.package_features {
let mut found = false;
for member in self.ws.members() {
let member_id = member.package_id();
if self.specs.iter().any(|spec| spec.matches(member_id)) {
found = true;
self.activate_member(member_id, self.requested_features)?;
}
}
if !found {
// -p for a non-member. Just resolve all with defaults.
let default = RequestedFeatures::new_all(false);
for member in self.ws.members() {
self.activate_member(member.package_id(), &default)?;
}
}
} else {
for member in self.ws.members() {
let member_id = member.package_id();
match self.ws.current_opt() {
Some(current) if member_id == current.package_id() => {
// The "current" member gets activated with the flags
// from the command line.
self.activate_member(member_id, self.requested_features)?;
}
_ => {
// Ignore members that are not enabled on the command-line.
if self.specs.iter().any(|spec| spec.matches(member_id)) {
// -p for a workspace member that is not the
// "current" one, don't use the local `--features`.
let not_current_requested =
RequestedFeatures::new_all(self.requested_features.all_features);
self.activate_member(member_id, &not_current_requested)?;
}
}
}
}
}
Ok(())
}
/// Enable the given features on the given workspace member.
fn activate_member(
&mut self,
pkg_id: PackageId,
requested_features: &RequestedFeatures,
) -> CargoResult<()> {
let fvs = self.fvs_from_requested(pkg_id, CompileKind::Host, requested_features);
self.activate_member_fvs(pkg_id, CompileKind::Host, &fvs)?;
if let CompileKind::Target(_) = self.requested_target {
let fvs = self.fvs_from_requested(pkg_id, self.requested_target, requested_features);
self.activate_member_fvs(pkg_id, self.requested_target, &fvs)?;
}
Ok(())
}
fn activate_member_fvs(
&mut self,
pkg_id: PackageId,
compile_kind: CompileKind,
fvs: &[FeatureValue],
) -> CargoResult<()> {
self.activate_with_platform(pkg_id, DepKind::Normal, compile_kind, &fvs)?;
if self.opts.decouple_dev_deps {
// Activate the member as a dev dep, assuming it has at least one
// test, bench, or example. This ensures the member's normal deps get
// unified with its dev deps.
self.activate_with_platform(pkg_id, DepKind::Development, compile_kind, &fvs)?;
}
Ok(())
}
fn activate_with_platform(
&mut self,
pkg_id: PackageId,
dep_kind: DepKind,
compile_kind: CompileKind,
fvs: &[FeatureValue],
) -> CargoResult<()> {
// Add an empty entry to ensure everything is covered. This is intended for
// finding bugs where the resolver missed something it should have visited.
// Remove this in the future if `activated_features` uses an empty default.
self.activated_features
.entry((pkg_id, dep_kind, compile_kind))
.or_insert_with(BTreeSet::new);
for fv in fvs {
self.activate_fv(pkg_id, dep_kind, compile_kind, fv)?;
}
if !self.processed_deps.insert((pkg_id, dep_kind, compile_kind)) {
// Already processed dependencies.
return Ok(());
}
// Activate any of its dependencies.
for (dep_pkg_id, deps) in self.deps(pkg_id, compile_kind) {
for dep in deps {
if dep.is_optional() {
continue;
}
// Recurse into the dependency.
let fvs = self.fvs_from_dependency(dep_pkg_id, dep);
self.activate_with_platform(
dep_pkg_id,
self.sticky_dep_kind(dep_kind, dep.kind()),
compile_kind,
&fvs,
)?;
}
}
return Ok(());
}
fn activate_fv(
&mut self,
pkg_id: PackageId,
dep_kind: DepKind,
compile_kind: CompileKind,
fv: &FeatureValue,
) -> CargoResult<()> {
match fv {
FeatureValue::Feature(f) => {
self.activate_rec(pkg_id, dep_kind, compile_kind, *f)?;
}
FeatureValue::Crate(dep_name) => {
// Activate the feature name on self.
self.activate_rec(pkg_id, dep_kind, compile_kind, *dep_name)?;
// Activate the optional dep.
for (dep_pkg_id, deps) in self.deps(pkg_id, compile_kind) {
for dep in deps {
if dep.name_in_toml() == *dep_name {
let fvs = self.fvs_from_dependency(dep_pkg_id, dep);
self.activate_with_platform(
dep_pkg_id,
self.sticky_dep_kind(dep_kind, dep.kind()),
compile_kind,
&fvs,
)?;
}
}
}
}
FeatureValue::CrateFeature(dep_name, dep_feature) => {
// Activate a feature within a dependency.
for (dep_pkg_id, deps) in self.deps(pkg_id, compile_kind) {
for dep in deps {
if dep.name_in_toml() == *dep_name {
if dep.is_optional() {
// Activate the crate on self.
let fv = FeatureValue::Crate(*dep_name);
self.activate_fv(pkg_id, dep_kind, compile_kind, &fv)?;
}
// Activate the feature on the dependency.
let summary = self.resolve.summary(dep_pkg_id);
let fv = FeatureValue::new(*dep_feature, summary);
self.activate_fv(
dep_pkg_id,
self.sticky_dep_kind(dep_kind, dep.kind()),
compile_kind,
&fv,
)?;
}
}
}
}
}
Ok(())
}
/// Activate the given feature for the given package, and then recursively
/// activate any other features that feature enables.
fn activate_rec(
&mut self,
pkg_id: PackageId,
dep_kind: DepKind,
compile_kind: CompileKind,
feature_to_enable: InternedString,
) -> CargoResult<()> {
let enabled = self
.activated_features
.entry((pkg_id, dep_kind, compile_kind))
.or_insert_with(BTreeSet::new);
if !enabled.insert(feature_to_enable) {
// Already enabled.
return Ok(());
}
let summary = self.resolve.summary(pkg_id);
let feature_map = summary.features();
let fvs = match feature_map.get(&feature_to_enable) {
Some(fvs) => fvs,
None => {
// TODO: this should only happen for optional dependencies.
// Other cases should be validated by Summary's `build_feature_map`.
// Figure out some way to validate this assumption.
log::debug!(
"pkg {:?} does not define feature {}",
pkg_id,
feature_to_enable
);
return Ok(());
}
};
for fv in fvs {
self.activate_fv(pkg_id, dep_kind, compile_kind, fv)?;
}
Ok(())
}
/// Returns Vec of FeatureValues from a Dependency definition.
fn fvs_from_dependency(&self, dep_id: PackageId, dep: &Dependency) -> Vec<FeatureValue> {
let summary = self.resolve.summary(dep_id);
let feature_map = summary.features();
let mut result: Vec<FeatureValue> = dep
.features()
.iter()
.map(|f| FeatureValue::new(*f, summary))
.collect();
let default = InternedString::new("default");
if dep.uses_default_features() && feature_map.contains_key(&default) {
result.push(FeatureValue::Feature(default));
}
result
}
/// Returns Vec of FeatureValues from a set of command-line features.
fn fvs_from_requested(
&self,
pkg_id: PackageId,
compile_kind: CompileKind,
requested_features: &RequestedFeatures,
) -> Vec<FeatureValue> {
let summary = self.resolve.summary(pkg_id);
let feature_map = summary.features();
if requested_features.all_features {
let mut fvs: Vec<FeatureValue> = feature_map
.keys()
.map(|k| FeatureValue::Feature(*k))
.collect();
// Add optional deps.
for (_dep_pkg_id, deps) in self.deps(pkg_id, compile_kind) {
for dep in deps {
if dep.is_optional() {
// This may result in duplicates, but that should be ok.
fvs.push(FeatureValue::Crate(dep.name_in_toml()));
}
}
}
fvs
} else {
let mut result: Vec<FeatureValue> = requested_features
.features
.as_ref()
.iter()
.map(|f| FeatureValue::new(*f, summary))
.collect();
let default = InternedString::new("default");
if requested_features.uses_default_features && feature_map.contains_key(&default) {
result.push(FeatureValue::Feature(default));
}
result
}
}
/// Returns the dependencies for a package, filtering out inactive targets.
fn deps(
&self,
pkg_id: PackageId,
compile_kind: CompileKind,
) -> Vec<(PackageId, Vec<&'a Dependency>)> {
self.resolve
.deps(pkg_id)
.map(|(dep_id, deps)| {
let deps = deps
.iter()
.filter(|dep| {
!dep.platform().is_some()
|| !self.opts.ignore_inactive_targets
|| self.target_data.dep_platform_activated(dep, compile_kind)
})
.collect::<Vec<_>>();
(dep_id, deps)
})
.collect()
}
/// Convert a DepKind from a package to one of its dependencies.
///
/// The rules here determine how decoupling works.
fn sticky_dep_kind(&self, from: DepKind, to: DepKind) -> DepKind {
if self.opts.decouple_build_deps {
if from == DepKind::Build || to == DepKind::Build {
return DepKind::Build;
}
}
if self.opts.decouple_dev_deps {
if to == DepKind::Development {
return DepKind::Development;
}
if from == DepKind::Development && to != DepKind::Build {
return DepKind::Development;
}
}
return DepKind::Normal;
}
/// Compare the activated features to the resolver. Used for testing.
fn compare(&self) {
let mut found = false;
for ((pkg_id, dep_kind, compile_kind), features) in &self.activated_features {
let r_features = self.resolve.features(*pkg_id);
if !r_features.iter().eq(features.iter()) {
eprintln!(
"{}/{:?}/{:?} features mismatch\nresolve: {:?}\nnew: {:?}\n",
pkg_id, dep_kind, compile_kind, r_features, features
);
found = true;
}
}
if found {
panic!("feature mismatch");
}
}
}

View File

@ -108,9 +108,6 @@ mod types;
/// * `config` - a location to print warnings and such, or `None` if no warnings
/// should be printed
///
/// * `print_warnings` - whether or not to print backwards-compatibility
/// warnings and such
///
/// * `check_public_visible_dependencies` - a flag for whether to enforce the restrictions
/// introduced in the "public & private dependencies" RFC (1977). The current implementation
/// makes sure that there is only one version of each name visible to each package.
@ -143,17 +140,27 @@ pub fn resolve(
let cksum = summary.checksum().map(|s| s.to_string());
cksums.insert(summary.package_id(), cksum);
}
let graph = cx.graph();
let replacements = cx.resolve_replacements(&registry);
let features = cx
.resolve_features
.iter()
.map(|(k, v)| (*k, v.iter().cloned().collect()))
.collect();
let summaries = cx
.activations
.into_iter()
.map(|(_key, (summary, _age))| (summary.package_id(), summary))
.collect();
let resolve = Resolve::new(
cx.graph(),
cx.resolve_replacements(&registry),
cx.resolve_features
.iter()
.map(|(k, v)| (*k, v.iter().map(|x| x.to_string()).collect()))
.collect(),
graph,
replacements,
features,
cksums,
BTreeMap::new(),
Vec::new(),
ResolveVersion::default_for_new_lockfiles(),
summaries,
);
check_cycles(&resolve)?;
@ -163,11 +170,11 @@ pub fn resolve(
Ok(resolve)
}
/// Recursively activates the dependencies for `top`, in depth-first order,
/// Recursively activates the dependencies for `summaries`, in depth-first order,
/// backtracking across possible candidates for each dependency as necessary.
///
/// If all dependencies can be activated and resolved to a version in the
/// dependency graph, cx.resolve is returned.
/// dependency graph, `cx` is returned.
fn activate_deps_loop(
mut cx: Context,
registry: &mut RegistryQueryer<'_>,

View File

@ -1,15 +1,13 @@
use super::encode::Metadata;
use crate::core::dependency::DepKind;
use crate::core::interning::InternedString;
use crate::core::{Dependency, PackageId, PackageIdSpec, Summary, Target};
use crate::util::errors::CargoResult;
use crate::util::Graph;
use std::borrow::Borrow;
use std::cmp;
use std::collections::{HashMap, HashSet};
use std::fmt;
use std::iter::FromIterator;
use crate::core::dependency::DepKind;
use crate::core::{Dependency, PackageId, PackageIdSpec, Summary, Target};
use crate::util::errors::CargoResult;
use crate::util::Graph;
use super::encode::Metadata;
/// Represents a fully-resolved package dependency graph. Each node in the graph
/// is a package and edges represent dependencies between packages.
@ -28,9 +26,9 @@ pub struct Resolve {
/// An empty `HashSet` to avoid creating a new `HashSet` for every package
/// that does not have any features, and to avoid using `Option` to
/// simplify the API.
empty_features: HashSet<String>,
empty_features: Vec<InternedString>,
/// Features enabled for a given package.
features: HashMap<PackageId, HashSet<String>>,
features: HashMap<PackageId, Vec<InternedString>>,
/// Checksum for each package. A SHA256 hash of the `.crate` file used to
/// validate the correct crate file is used. This is `None` for sources
/// that do not use `.crate` files, like path or git dependencies.
@ -50,6 +48,7 @@ pub struct Resolve {
/// Version of the `Cargo.lock` format, see
/// `cargo::core::resolver::encode` for more.
version: ResolveVersion,
summaries: HashMap<PackageId, Summary>,
}
/// A version to indicate how a `Cargo.lock` should be serialized. Currently
@ -73,11 +72,12 @@ impl Resolve {
pub fn new(
graph: Graph<PackageId, Vec<Dependency>>,
replacements: HashMap<PackageId, PackageId>,
features: HashMap<PackageId, HashSet<String>>,
features: HashMap<PackageId, Vec<InternedString>>,
checksums: HashMap<PackageId, Option<String>>,
metadata: Metadata,
unused_patches: Vec<PackageId>,
version: ResolveVersion,
summaries: HashMap<PackageId, Summary>,
) -> Resolve {
let reverse_replacements = replacements.iter().map(|(&p, &r)| (r, p)).collect();
let public_dependencies = graph
@ -103,10 +103,11 @@ impl Resolve {
checksums,
metadata,
unused_patches,
empty_features: HashSet::new(),
empty_features: Vec::new(),
reverse_replacements,
public_dependencies,
version,
summaries,
}
}
@ -285,10 +286,16 @@ unable to verify that `{0}` is the same as when the lockfile was generated
&self.replacements
}
pub fn features(&self, pkg: PackageId) -> &HashSet<String> {
pub fn features(&self, pkg: PackageId) -> &[InternedString] {
self.features.get(&pkg).unwrap_or(&self.empty_features)
}
/// This is only here for legacy support, it will be removed when
/// switching to the new feature resolver.
pub fn features_clone(&self) -> HashMap<PackageId, Vec<InternedString>> {
self.features.clone()
}
pub fn is_public_dep(&self, pkg: PackageId, dep: PackageId) -> bool {
self.public_dependencies
.get(&pkg)
@ -296,12 +303,6 @@ unable to verify that `{0}` is the same as when the lockfile was generated
.unwrap_or_else(|| panic!("Unknown dependency {:?} for package {:?}", dep, pkg))
}
pub fn features_sorted(&self, pkg: PackageId) -> Vec<&str> {
let mut v = Vec::from_iter(self.features(pkg).iter().map(|s| s.as_ref()));
v.sort_unstable();
v
}
pub fn query(&self, spec: &str) -> CargoResult<PackageId> {
PackageIdSpec::query_str(spec, self.iter())
}
@ -374,6 +375,10 @@ unable to verify that `{0}` is the same as when the lockfile was generated
pub fn version(&self) -> &ResolveVersion {
&self.version
}
pub fn summary(&self, pkg_id: PackageId) -> &Summary {
&self.summaries[&pkg_id]
}
}
impl PartialEq for Resolve {
@ -388,7 +393,7 @@ impl PartialEq for Resolve {
compare! {
// fields to compare
graph replacements reverse_replacements empty_features features
checksums metadata unused_patches public_dependencies
checksums metadata unused_patches public_dependencies summaries
|
// fields to ignore
version

View File

@ -6,8 +6,10 @@ use std::path::Path;
use crate::core::compiler::unit_dependencies;
use crate::core::compiler::{BuildConfig, BuildContext, CompileKind, CompileMode, Context};
use crate::core::compiler::{RustcTargetData, UnitInterner};
use crate::core::dependency::DepKind;
use crate::core::profiles::{Profiles, UnitFor};
use crate::core::Workspace;
use crate::core::resolver::features::{FeatureResolver, RequestedFeatures};
use crate::core::{PackageIdSpec, Workspace};
use crate::ops;
use crate::util::errors::{CargoResult, CargoResultExt};
use crate::util::paths;
@ -72,6 +74,20 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
HashMap::new(),
target_data,
)?;
let requested_features = RequestedFeatures::new_all(true);
let specs = opts
.spec
.iter()
.map(|spec| PackageIdSpec::parse(spec))
.collect::<CargoResult<Vec<_>>>()?;
let features = FeatureResolver::resolve(
ws,
&bcx.target_data,
&resolve,
&requested_features,
&specs,
bcx.build_config.requested_kind,
)?;
let mut units = Vec::new();
for spec in opts.spec.iter() {
@ -100,10 +116,13 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
*mode,
)
};
let features = resolve.features_sorted(pkg.package_id());
units.push(bcx.units.intern(
pkg, target, profile, *kind, *mode, features, /*is_std*/ false,
));
for dep_kind in &[DepKind::Normal, DepKind::Development, DepKind::Build] {
let features =
features.activated_features(pkg.package_id(), *dep_kind, *kind);
units.push(bcx.units.intern(
pkg, target, profile, *kind, *mode, features, /*is_std*/ false,
));
}
}
}
}
@ -111,7 +130,7 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
}
let unit_dependencies =
unit_dependencies::build_unit_dependencies(&bcx, &resolve, None, &units, &[])?;
unit_dependencies::build_unit_dependencies(&bcx, &resolve, &features, None, &units, &[])?;
let mut cx = Context::new(config, &bcx, unit_dependencies, build_config.requested_kind)?;
cx.prepare_units(None, &units)?;

View File

@ -33,7 +33,9 @@ use crate::core::compiler::unit_dependencies::build_unit_dependencies;
use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context};
use crate::core::compiler::{CompileKind, CompileMode, RustcTargetData, Unit};
use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
use crate::core::dependency::DepKind;
use crate::core::profiles::{Profiles, UnitFor};
use crate::core::resolver::features;
use crate::core::resolver::{Resolve, ResolveOpts};
use crate::core::{LibKind, Package, PackageSet, Target};
use crate::core::{PackageId, PackageIdSpec, TargetKind, Workspace};
@ -311,14 +313,16 @@ pub fn compile_ws<'a>(
let specs = spec.to_package_id_specs(ws)?;
let dev_deps = ws.require_optional_deps() || filter.need_dev_deps(build_config.mode);
let opts = ResolveOpts::new(dev_deps, features, all_features, !no_default_features);
let resolve = ops::resolve_ws_with_opts(ws, &opts, &specs)?;
let resolve =
ops::resolve_ws_with_opts(ws, &target_data, build_config.requested_kind, &opts, &specs)?;
let WorkspaceResolve {
mut pkg_set,
workspace_resolve,
targeted_resolve: resolve,
resolved_features,
} = resolve;
let std_resolve = if let Some(crates) = &config.cli_unstable().build_std {
let std_resolve_features = if let Some(crates) = &config.cli_unstable().build_std {
if build_config.build_plan {
config
.shell()
@ -330,10 +334,11 @@ pub fn compile_ws<'a>(
// requested_target to an enum, or some other approach.
anyhow::bail!("-Zbuild-std requires --target");
}
let (mut std_package_set, std_resolve) = standard_lib::resolve_std(ws, crates)?;
let (mut std_package_set, std_resolve, std_features) =
standard_lib::resolve_std(ws, &target_data, build_config.requested_kind, crates)?;
remove_dylib_crate_type(&mut std_package_set)?;
pkg_set.add_set(std_package_set);
Some(std_resolve)
Some((std_resolve, std_features))
} else {
None
};
@ -407,6 +412,7 @@ pub fn compile_ws<'a>(
filter,
build_config.requested_kind,
&resolve,
&resolved_features,
&bcx,
)?;
@ -423,10 +429,12 @@ pub fn compile_ws<'a>(
crates.push("test".to_string());
}
}
let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
standard_lib::generate_std_roots(
&bcx,
&crates,
std_resolve.as_ref().unwrap(),
std_resolve,
std_features,
build_config.requested_kind,
)?
} else {
@ -463,8 +471,14 @@ pub fn compile_ws<'a>(
}
}
let unit_dependencies =
build_unit_dependencies(&bcx, &resolve, std_resolve.as_ref(), &units, &std_roots)?;
let unit_dependencies = build_unit_dependencies(
&bcx,
&resolve,
&resolved_features,
std_resolve_features.as_ref(),
&units,
&std_roots,
)?;
let ret = {
let _p = profile::start("compiling");
@ -661,6 +675,7 @@ fn generate_targets<'a>(
filter: &CompileFilter,
default_arch_kind: CompileKind,
resolve: &'a Resolve,
resolved_features: &features::ResolvedFeatures,
bcx: &BuildContext<'a, '_>,
) -> CargoResult<Vec<Unit<'a>>> {
// Helper for creating a `Unit` struct.
@ -723,7 +738,12 @@ fn generate_targets<'a>(
let profile =
bcx.profiles
.get_profile(pkg.package_id(), ws.is_member(pkg), unit_for, target_mode);
let features = resolve.features_sorted(pkg.package_id());
let features = Vec::from(resolved_features.activated_features(
pkg.package_id(),
DepKind::Normal,
kind,
));
bcx.units.intern(
pkg,
target,
@ -857,6 +877,10 @@ fn generate_targets<'a>(
// Only include targets that are libraries or have all required
// features available.
//
// `features_map` is a map of &Package -> enabled_features
// It is computed by the set of enabled features for the package plus
// every enabled feature of every enabled dependency.
let mut features_map = HashMap::new();
let mut units = HashSet::new();
for Proposal {
@ -868,9 +892,14 @@ fn generate_targets<'a>(
{
let unavailable_features = match target.required_features() {
Some(rf) => {
let features = features_map
.entry(pkg)
.or_insert_with(|| resolve_all_features(resolve, pkg.package_id()));
let features = features_map.entry(pkg).or_insert_with(|| {
resolve_all_features(
resolve,
resolved_features,
pkg.package_id(),
default_arch_kind,
)
});
rf.iter().filter(|f| !features.contains(*f)).collect()
}
None => Vec::new(),
@ -900,16 +929,24 @@ fn generate_targets<'a>(
fn resolve_all_features(
resolve_with_overrides: &Resolve,
resolved_features: &features::ResolvedFeatures,
package_id: PackageId,
default_arch_kind: CompileKind,
) -> HashSet<String> {
let mut features = resolve_with_overrides.features(package_id).clone();
let mut features: HashSet<String> = resolved_features
.activated_features(package_id, DepKind::Normal, default_arch_kind)
.iter()
.map(|s| s.to_string())
.collect();
// Include features enabled for use by dependencies so targets can also use them with the
// required-features field when deciding whether to be built or skipped.
for (dep_id, deps) in resolve_with_overrides.deps(package_id) {
for feature in resolve_with_overrides.features(dep_id) {
for feature in
resolved_features.activated_features(dep_id, DepKind::Normal, default_arch_kind)
{
for dep in deps {
features.insert(dep.name_in_toml().to_string() + "/" + feature);
features.insert(dep.name_in_toml().to_string() + "/" + &feature);
}
}
}

View File

@ -1,3 +1,4 @@
use crate::core::compiler::RustcTargetData;
use crate::core::resolver::ResolveOpts;
use crate::core::{Shell, Workspace};
use crate::ops;
@ -24,7 +25,9 @@ pub fn doc(ws: &Workspace<'_>, options: &DocOptions<'_>) -> CargoResult<()> {
options.compile_opts.all_features,
!options.compile_opts.no_default_features,
);
let ws_resolve = ops::resolve_ws_with_opts(ws, &opts, &specs)?;
let requested_kind = options.compile_opts.build_config.requested_kind;
let target_data = RustcTargetData::new(ws, requested_kind)?;
let ws_resolve = ops::resolve_ws_with_opts(ws, &target_data, requested_kind, &opts, &specs)?;
let ids = specs
.iter()

View File

@ -7,7 +7,7 @@ use anyhow::{bail, format_err};
use tempfile::Builder as TempFileBuilder;
use crate::core::compiler::Freshness;
use crate::core::compiler::{CompileKind, DefaultExecutor, Executor};
use crate::core::compiler::{CompileKind, DefaultExecutor, Executor, RustcTargetData};
use crate::core::resolver::ResolveOpts;
use crate::core::{Edition, Package, PackageId, PackageIdSpec, Source, SourceId, Workspace};
use crate::ops;
@ -492,10 +492,21 @@ fn check_yanked_install(ws: &Workspace<'_>) -> CargoResult<()> {
return Ok(());
}
let specs = vec![PackageIdSpec::from_package_id(ws.current()?.package_id())];
// CompileKind here doesn't really matter, it's only needed for features.
let target_data = RustcTargetData::new(ws, CompileKind::Host)?;
// It would be best if `source` could be passed in here to avoid a
// duplicate "Updating", but since `source` is taken by value, then it
// wouldn't be available for `compile_ws`.
let ws_resolve = ops::resolve_ws_with_opts(ws, &ResolveOpts::everything(), &specs)?;
// TODO: It would be easier to use resolve_ws, but it does not honor
// require_optional_deps to avoid writing the lock file. It might be good
// to try to fix that.
let ws_resolve = ops::resolve_ws_with_opts(
ws,
&target_data,
CompileKind::Host,
&ResolveOpts::everything(),
&specs,
)?;
let mut sources = ws_resolve.pkg_set.sources_mut();
// Checking the yanked status involves taking a look at the registry and

View File

@ -1,6 +1,8 @@
use crate::core::compiler::{CompileKind, CompileTarget, TargetInfo};
use crate::core::compiler::{CompileKind, CompileTarget, RustcTargetData};
use crate::core::resolver::{Resolve, ResolveOpts};
use crate::core::{dependency, Dependency, Package, PackageId, Workspace};
use crate::core::dependency::DepKind;
use crate::core::{Dependency, InternedString, Package, PackageId, Workspace};
use crate::ops::{self, Packages};
use crate::util::CargoResult;
use cargo_platform::Platform;
@ -34,13 +36,7 @@ pub fn output_metadata(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> Cargo
let packages = ws.members().cloned().collect();
(packages, None)
} else {
let resolve_opts = ResolveOpts::new(
/*dev_deps*/ true,
&opt.features,
opt.all_features,
!opt.no_default_features,
);
let (packages, resolve) = build_resolve_graph(ws, resolve_opts, &opt.filter_platform)?;
let (packages, resolve) = build_resolve_graph(ws, opt)?;
(packages, Some(resolve))
};
@ -78,7 +74,7 @@ struct MetadataResolveNode {
id: PackageId,
dependencies: Vec<PackageId>,
deps: Vec<Dep>,
features: Vec<String>,
features: Vec<InternedString>,
}
#[derive(Serialize)]
@ -90,7 +86,7 @@ struct Dep {
#[derive(Serialize, PartialEq, Eq, PartialOrd, Ord)]
struct DepKindInfo {
kind: dependency::DepKind,
kind: DepKind,
target: Option<Platform>,
}
@ -106,23 +102,25 @@ impl From<&Dependency> for DepKindInfo {
/// Builds the resolve graph as it will be displayed to the user.
fn build_resolve_graph(
ws: &Workspace<'_>,
resolve_opts: ResolveOpts,
target: &Option<String>,
metadata_opts: &OutputMetadataOptions,
) -> CargoResult<(Vec<Package>, MetadataResolve)> {
let target_info = match target {
Some(target) => {
let config = ws.config();
let ct = CompileTarget::new(target)?;
let short_name = ct.short_name().to_string();
let kind = CompileKind::Target(ct);
let rustc = config.load_global_rustc(Some(ws))?;
Some((short_name, TargetInfo::new(config, kind, &rustc, kind)?))
}
None => None,
// TODO: Without --filter-platform, features are being resolved for `host` only.
// How should this work?
let requested_kind = match &metadata_opts.filter_platform {
Some(t) => CompileKind::Target(CompileTarget::new(t)?),
None => CompileKind::Host,
};
let target_data = RustcTargetData::new(ws, requested_kind)?;
// Resolve entire workspace.
let specs = Packages::All.to_package_id_specs(ws)?;
let ws_resolve = ops::resolve_ws_with_opts(ws, &resolve_opts, &specs)?;
let resolve_opts = ResolveOpts::new(
/*dev_deps*/ true,
&metadata_opts.features,
metadata_opts.all_features,
!metadata_opts.no_default_features,
);
let ws_resolve =
ops::resolve_ws_with_opts(ws, &target_data, requested_kind, &resolve_opts, &specs)?;
// Download all Packages. This is needed to serialize the information
// for every package. In theory this could honor target filtering,
// but that would be somewhat complex.
@ -132,6 +130,7 @@ fn build_resolve_graph(
.into_iter()
.map(|pkg| (pkg.package_id(), pkg.clone()))
.collect();
// Start from the workspace roots, and recurse through filling out the
// map, filtering targets as necessary.
let mut node_map = HashMap::new();
@ -141,7 +140,8 @@ fn build_resolve_graph(
member_pkg.package_id(),
&ws_resolve.targeted_resolve,
&package_map,
target_info.as_ref(),
&target_data,
requested_kind,
);
}
// Get a Vec of Packages.
@ -161,27 +161,22 @@ fn build_resolve_graph_r(
pkg_id: PackageId,
resolve: &Resolve,
package_map: &HashMap<PackageId, Package>,
target: Option<&(String, TargetInfo)>,
target_data: &RustcTargetData,
requested_kind: CompileKind,
) {
if node_map.contains_key(&pkg_id) {
return;
}
let features = resolve
.features_sorted(pkg_id)
.into_iter()
.map(|s| s.to_string())
.collect();
let features = resolve.features(pkg_id).into_iter().cloned().collect();
let deps: Vec<Dep> = resolve
.deps(pkg_id)
.filter(|(_dep_id, deps)| match target {
Some((short_name, info)) => deps.iter().any(|dep| {
let platform = match dep.platform() {
Some(p) => p,
None => return true,
};
platform.matches(short_name, info.cfg())
}),
None => true,
.filter(|(_dep_id, deps)| match requested_kind {
CompileKind::Target(_) => deps
.iter()
.any(|dep| target_data.dep_platform_activated(dep, requested_kind)),
// No --filter-platform is interpreted as "all platforms".
CompileKind::Host => true,
})
.filter_map(|(dep_id, deps)| {
let mut dep_kinds: Vec<_> = deps.iter().map(DepKindInfo::from).collect();
@ -210,6 +205,13 @@ fn build_resolve_graph_r(
};
node_map.insert(pkg_id, node);
for dep_id in to_visit {
build_resolve_graph_r(node_map, dep_id, resolve, package_map, target);
build_resolve_graph_r(
node_map,
dep_id,
resolve,
package_map,
target_data,
requested_kind,
);
}
}

View File

@ -16,10 +16,10 @@ use serde_json::{self, json};
use tar::{Archive, Builder, EntryType, Header};
use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor};
use crate::core::resolver::ResolveOpts;
use crate::core::Feature;
use crate::core::{
Package, PackageId, PackageIdSpec, PackageSet, Resolve, Source, SourceId, Verbosity, Workspace,
Package, PackageId, PackageSet, Resolve, Source, SourceId, Verbosity, Workspace,
};
use crate::ops;
use crate::sources::PathSource;
@ -152,21 +152,15 @@ fn build_lock(ws: &Workspace<'_>) -> CargoResult<String> {
let new_pkg = Package::new(manifest, orig_pkg.manifest_path());
// Regenerate Cargo.lock using the old one as a guide.
let specs = vec![PackageIdSpec::from_package_id(new_pkg.package_id())];
let tmp_ws = Workspace::ephemeral(new_pkg, ws.config(), None, true)?;
let new_resolve = ops::resolve_ws_with_opts(&tmp_ws, &ResolveOpts::everything(), &specs)?;
let (pkg_set, new_resolve) = ops::resolve_ws(&tmp_ws)?;
if let Some(orig_resolve) = orig_resolve {
compare_resolve(
config,
tmp_ws.current()?,
&orig_resolve,
&new_resolve.targeted_resolve,
)?;
compare_resolve(config, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
}
check_yanked(config, &new_resolve.pkg_set, &new_resolve.targeted_resolve)?;
check_yanked(config, &pkg_set, &new_resolve)?;
ops::resolve_to_string(&tmp_ws, &new_resolve.targeted_resolve)
ops::resolve_to_string(&tmp_ws, &new_resolve)
}
// Checks that the package has some piece of metadata that a human can

View File

@ -10,8 +10,9 @@
//! - `resolve_with_previous`: A low-level function for running the resolver,
//! providing the most power and flexibility.
use crate::core::compiler::{CompileKind, RustcTargetData};
use crate::core::registry::PackageRegistry;
use crate::core::resolver::features::RequestedFeatures;
use crate::core::resolver::features::{FeatureResolver, RequestedFeatures, ResolvedFeatures};
use crate::core::resolver::{self, Resolve, ResolveOpts};
use crate::core::Feature;
use crate::core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace};
@ -34,6 +35,8 @@ pub struct WorkspaceResolve<'a> {
/// The narrowed resolve, with the specific features enabled, and only the
/// given package specs requested.
pub targeted_resolve: Resolve,
/// The features activated per package.
pub resolved_features: ResolvedFeatures,
}
const UNUSED_PATCH_WARNING: &str = "\
@ -70,6 +73,8 @@ pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolv
/// members. In this case, `opts.all_features` must be `true`.
pub fn resolve_ws_with_opts<'a>(
ws: &Workspace<'a>,
target_data: &RustcTargetData,
requested_target: CompileKind,
opts: &ResolveOpts,
specs: &[PackageIdSpec],
) -> CargoResult<WorkspaceResolve<'a>> {
@ -119,10 +124,20 @@ pub fn resolve_ws_with_opts<'a>(
let pkg_set = get_resolved_packages(&resolved_with_overrides, registry)?;
let resolved_features = FeatureResolver::resolve(
ws,
target_data,
&resolved_with_overrides,
&opts.features,
specs,
requested_target,
)?;
Ok(WorkspaceResolve {
pkg_set,
workspace_resolve: resolve,
targeted_resolve: resolved_with_overrides,
resolved_features,
})
}

View File

@ -473,3 +473,65 @@ cargo +nightly -Zunstable-options -Zconfig-include --config somefile.toml build
```
CLI paths are relative to the current working directory.
## Features
The `-Zfeatures` option causes Cargo to use a new feature resolver that can
resolve features differently from before. It takes a comma separated list of
options to indicate which new behaviors to enable. With no options, it should
behave the same as without the flag.
```console
cargo +nightly -Zfeatures=itarget,build_dep
```
The available options are:
* `itarget` Ignores features for target-specific dependencies for targets
that don't match the current compile target. For example:
```toml
[dependency.common]
version = "1.0"
features = ["f1"]
[target.'cfg(windows)'.dependencies.common]
version = "1.0"
features = ["f2"]
```
When building this example for a non-Windows platform, the `f2` feature will
*not* be enabled.
* `build_dep` Prevents features enabled on build dependencies from being
enabled for normal dependencies. For example:
```toml
[dependencies]
log = "0.4"
[build-dependencies]
log = {version = "0.4", features=['std']}
```
When building the build script, the `log` crate will be built with the `std`
feature. When building the library of your package, it will not enable the
feature.
* `dev_dep` Prevents features enabled on dev dependencies from being enabled
for normal dependencies. For example:
```toml
[dependencies]
serde = {version = "1.0", default-features = false}
[dev-dependencies]
serde = {version = "1.0", features = ["std"]}
```
In this example, the library will normally link against `serde` without the
`std` feature. However, when built as a test or example, it will include the
`std` feature.
* `compare` This option compares the resolved features to the old resolver,
and will print any differences.

View File

@ -2142,3 +2142,53 @@ fn all_features_virtual_ws() {
.with_stdout("f1\nf2\nf3\n")
.run();
}
#[cargo_test]
fn slash_optional_enables() {
// --features dep/feat will enable `dep` and set its feature.
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
dep = {path="dep", optional=true}
"#,
)
.file(
"src/lib.rs",
r#"
#[cfg(not(feature="dep"))]
compile_error!("dep not set");
"#,
)
.file(
"dep/Cargo.toml",
r#"
[package]
name = "dep"
version = "0.1.0"
[features]
feat = []
"#,
)
.file(
"dep/src/lib.rs",
r#"
#[cfg(not(feature="feat"))]
compile_error!("feat not set");
"#,
)
.build();
p.cargo("check")
.with_status(101)
.with_stderr_contains("[..]dep not set[..]")
.run();
p.cargo("check --features dep/feat").run();
}

View File

@ -0,0 +1,396 @@
//! Tests for the new feature resolver.
use cargo_test_support::project;
use cargo_test_support::registry::{Dependency, Package};
#[cargo_test]
fn inactivate_targets() {
// Basic test of `itarget`. A shared dependency where an inactive [target]
// changes the features.
Package::new("common", "1.0.0")
.feature("f1", &[])
.file(
"src/lib.rs",
r#"
#[cfg(feature = "f1")]
compile_error!("f1 should not activate");
"#,
)
.publish();
Package::new("bar", "1.0.0")
.add_dep(
Dependency::new("common", "1.0")
.target("cfg(whatever)")
.enable_features(&["f1"]),
)
.publish();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
common = "1.0"
bar = "1.0"
"#,
)
.file("src/lib.rs", "")
.build();
p.cargo("check")
.with_status(101)
.with_stderr_contains("[..]f1 should not activate[..]")
.run();
p.cargo("check -Zfeatures=itarget")
.masquerade_as_nightly_cargo()
.run();
}
#[cargo_test]
fn inactive_target_optional() {
// Activating optional [target] dependencies for inactivate target.
Package::new("common", "1.0.0")
.feature("f1", &[])
.feature("f2", &[])
.feature("f3", &[])
.feature("f4", &[])
.file(
"src/lib.rs",
r#"
pub fn f() {
if cfg!(feature="f1") { println!("f1"); }
if cfg!(feature="f2") { println!("f2"); }
if cfg!(feature="f3") { println!("f3"); }
if cfg!(feature="f4") { println!("f4"); }
}
"#,
)
.publish();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
edition = "2018"
[dependencies]
common = "1.0"
[target.'cfg(whatever)'.dependencies]
dep1 = {path='dep1', optional=true}
dep2 = {path='dep2', optional=true, features=["f3"]}
common = {version="1.0", optional=true, features=["f4"]}
[features]
foo1 = ["dep1/f2"]
"#,
)
.file(
"src/main.rs",
r#"
fn main() {
if cfg!(feature="foo1") { println!("foo1"); }
if cfg!(feature="dep1") { println!("dep1"); }
if cfg!(feature="dep2") { println!("dep2"); }
if cfg!(feature="common") { println!("common"); }
common::f();
}
"#,
)
.file(
"dep1/Cargo.toml",
r#"
[package]
name = "dep1"
version = "0.1.0"
[dependencies]
common = {version="1.0", features=["f1"]}
[features]
f2 = ["common/f2"]
"#,
)
.file(
"dep1/src/lib.rs",
r#"compile_error!("dep1 should not build");"#,
)
.file(
"dep2/Cargo.toml",
r#"
[package]
name = "dep2"
version = "0.1.0"
[dependencies]
common = "1.0"
[features]
f3 = ["common/f3"]
"#,
)
.file(
"dep2/src/lib.rs",
r#"compile_error!("dep2 should not build");"#,
)
.build();
p.cargo("run --all-features")
.with_stdout("foo1\ndep1\ndep2\ncommon\nf1\nf2\nf3\nf4\n")
.run();
p.cargo("run --features dep1")
.with_stdout("dep1\nf1\n")
.run();
p.cargo("run --features foo1")
.with_stdout("foo1\ndep1\nf1\nf2\n")
.run();
p.cargo("run --features dep2")
.with_stdout("dep2\nf3\n")
.run();
p.cargo("run --features common")
.with_stdout("common\nf4\n")
.run();
p.cargo("run -Zfeatures=itarget --all-features")
.masquerade_as_nightly_cargo()
.with_stdout("foo1\n")
.run();
p.cargo("run -Zfeatures=itarget --features dep1")
.masquerade_as_nightly_cargo()
.with_stdout("dep1\n")
.run();
p.cargo("run -Zfeatures=itarget --features foo1")
.masquerade_as_nightly_cargo()
.with_stdout("foo1\n")
.run();
p.cargo("run -Zfeatures=itarget --features dep2")
.masquerade_as_nightly_cargo()
.with_stdout("dep2\n")
.run();
p.cargo("run -Zfeatures=itarget --features common")
.masquerade_as_nightly_cargo()
.with_stdout("common")
.run();
}
#[cargo_test]
fn decouple_build_deps() {
// Basic test for `build_dep` decouple.
Package::new("common", "1.0.0")
.feature("f1", &[])
.file(
"src/lib.rs",
r#"
#[cfg(feature = "f1")]
pub fn foo() {}
#[cfg(not(feature = "f1"))]
pub fn bar() {}
"#,
)
.publish();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
edition = "2018"
[build-dependencies]
common = {version="1.0", features=["f1"]}
[dependencies]
common = "1.0"
"#,
)
.file(
"build.rs",
r#"
use common::foo;
fn main() {}
"#,
)
.file("src/lib.rs", "use common::bar;")
.build();
p.cargo("check")
.with_status(101)
.with_stderr_contains("[..]unresolved import `common::bar`[..]")
.run();
p.cargo("check -Zfeatures=build_dep")
.masquerade_as_nightly_cargo()
.run();
}
#[cargo_test]
fn decouple_build_deps_nested() {
// `build_dep` decouple of transitive dependencies.
Package::new("common", "1.0.0")
.feature("f1", &[])
.file(
"src/lib.rs",
r#"
#[cfg(feature = "f1")]
pub fn foo() {}
#[cfg(not(feature = "f1"))]
pub fn bar() {}
"#,
)
.publish();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
edition = "2018"
[build-dependencies]
bdep = {path="bdep"}
[dependencies]
common = "1.0"
"#,
)
.file(
"build.rs",
r#"
use bdep::foo;
fn main() {}
"#,
)
.file("src/lib.rs", "use common::bar;")
.file(
"bdep/Cargo.toml",
r#"
[package]
name = "bdep"
version = "0.1.0"
edition = "2018"
[dependencies]
common = {version="1.0", features=["f1"]}
"#,
)
.file("bdep/src/lib.rs", "pub use common::foo;")
.build();
p.cargo("check")
.with_status(101)
.with_stderr_contains("[..]unresolved import `common::bar`[..]")
.run();
p.cargo("check -Zfeatures=build_dep")
.masquerade_as_nightly_cargo()
.run();
}
#[cargo_test]
fn decouple_dev_deps() {
// Basic test for `dev_dep` decouple.
Package::new("common", "1.0.0")
.feature("f1", &[])
.feature("f2", &[])
.file(
"src/lib.rs",
r#"
pub fn foo() -> u32 {
let mut res = 0;
if cfg!(feature = "f1") {
res |= 1;
}
if cfg!(feature = "f2") {
res |= 2;
}
res
}
"#,
)
.publish();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
edition = "2018"
[dependencies]
common = {version="1.0", features=["f1"]}
[dev-dependencies]
common = {version="1.0", features=["f2"]}
"#,
)
.file(
"src/main.rs",
r#"
fn main() {
assert_eq!(foo::foo(), 1);
assert_eq!(common::foo(), 1);
}
#[test]
fn test_bin() {
assert_eq!(foo::foo(), 3);
assert_eq!(common::foo(), 3);
}
"#,
)
.file(
"src/lib.rs",
r#"
pub fn foo() -> u32 {
common::foo()
}
#[test]
fn test_lib() {
assert_eq!(foo(), 3);
assert_eq!(common::foo(), 3);
}
"#,
)
.file(
"tests/t1.rs",
r#"
#[test]
fn test_t1() {
assert_eq!(foo::foo(), 3);
assert_eq!(common::foo(), 3);
}
"#,
)
.build();
p.cargo("run")
.with_status(101)
.with_stderr_contains("[..]assertion failed[..]")
.run();
p.cargo("run -Zfeatures=dev_dep")
.masquerade_as_nightly_cargo()
.run();
p.cargo("test").run();
p.cargo("test -Zfeatures=dev_dep")
.masquerade_as_nightly_cargo()
.run();
}

View File

@ -44,6 +44,7 @@ mod doc;
mod edition;
mod error;
mod features;
mod features2;
mod fetch;
mod fix;
mod freshness;