mirror of
https://github.com/rust-lang/cargo.git
synced 2025-10-01 11:30:39 +00:00
Merge remote-tracking branch 'origin/master' into custom-profile-pr-rfc
This commit is contained in:
commit
33d5c837e2
@ -27,7 +27,7 @@ crossbeam-utils = "0.6"
|
|||||||
crypto-hash = "0.3.1"
|
crypto-hash = "0.3.1"
|
||||||
curl = { version = "0.4.21", features = ['http2'] }
|
curl = { version = "0.4.21", features = ['http2'] }
|
||||||
curl-sys = "0.4.18"
|
curl-sys = "0.4.18"
|
||||||
env_logger = "0.6.0"
|
env_logger = "0.7.0"
|
||||||
pretty_env_logger = { version = "0.3", optional = true }
|
pretty_env_logger = { version = "0.3", optional = true }
|
||||||
failure = "0.1.5"
|
failure = "0.1.5"
|
||||||
filetime = "0.2"
|
filetime = "0.2"
|
||||||
|
@ -6,6 +6,7 @@ use std::env;
|
|||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{self, ErrorKind};
|
use std::io::{self, ErrorKind};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::process::Command;
|
||||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
|
|
||||||
@ -252,3 +253,14 @@ pub fn get_lib_extension(kind: &str) -> &str {
|
|||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the sysroot as queried from rustc.
|
||||||
|
pub fn sysroot() -> String {
|
||||||
|
let output = Command::new("rustc")
|
||||||
|
.arg("--print=sysroot")
|
||||||
|
.output()
|
||||||
|
.expect("rustc to run");
|
||||||
|
assert!(output.status.success());
|
||||||
|
let sysroot = String::from_utf8(output.stdout).unwrap();
|
||||||
|
sysroot.trim().to_string()
|
||||||
|
}
|
||||||
|
@ -203,7 +203,13 @@ impl<'cfg> Compilation<'cfg> {
|
|||||||
super::filter_dynamic_search_path(self.native_dirs.iter(), &self.root_output);
|
super::filter_dynamic_search_path(self.native_dirs.iter(), &self.root_output);
|
||||||
search_path.push(self.deps_output.clone());
|
search_path.push(self.deps_output.clone());
|
||||||
search_path.push(self.root_output.clone());
|
search_path.push(self.root_output.clone());
|
||||||
search_path.push(self.target_dylib_path.clone());
|
// For build-std, we don't want to accidentally pull in any shared
|
||||||
|
// libs from the sysroot that ships with rustc. This may not be
|
||||||
|
// required (at least I cannot craft a situation where it
|
||||||
|
// matters), but is here to be safe.
|
||||||
|
if self.config.cli_unstable().build_std.is_none() {
|
||||||
|
search_path.push(self.target_dylib_path.clone());
|
||||||
|
}
|
||||||
search_path
|
search_path
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@ use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
|
|||||||
use super::fingerprint::Fingerprint;
|
use super::fingerprint::Fingerprint;
|
||||||
use super::job_queue::JobQueue;
|
use super::job_queue::JobQueue;
|
||||||
use super::layout::Layout;
|
use super::layout::Layout;
|
||||||
|
use super::standard_lib;
|
||||||
use super::unit_dependencies::{UnitDep, UnitGraph};
|
use super::unit_dependencies::{UnitDep, UnitGraph};
|
||||||
use super::{BuildContext, Compilation, CompileMode, Executor, FileFlavor, Kind};
|
use super::{BuildContext, Compilation, CompileMode, Executor, FileFlavor, Kind};
|
||||||
|
|
||||||
@ -301,7 +302,11 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
|
|||||||
let dest = self.bcx.profiles.get_dir_name(profile_kind);
|
let dest = self.bcx.profiles.get_dir_name(profile_kind);
|
||||||
let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
|
let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
|
||||||
let target_layout = match self.bcx.build_config.requested_target.as_ref() {
|
let target_layout = match self.bcx.build_config.requested_target.as_ref() {
|
||||||
Some(target) => Some(Layout::new(self.bcx.ws, Some(target), &dest)?),
|
Some(target) => {
|
||||||
|
let layout = Layout::new(self.bcx.ws, Some(target), &dest)?;
|
||||||
|
standard_lib::prepare_sysroot(&layout)?;
|
||||||
|
Some(layout)
|
||||||
|
}
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
self.primary_packages
|
self.primary_packages
|
||||||
|
@ -17,6 +17,7 @@ use super::job::{
|
|||||||
Freshness::{self, Dirty, Fresh},
|
Freshness::{self, Dirty, Fresh},
|
||||||
Job,
|
Job,
|
||||||
};
|
};
|
||||||
|
use super::standard_lib;
|
||||||
use super::timings::Timings;
|
use super::timings::Timings;
|
||||||
use super::{BuildContext, BuildPlan, CompileMode, Context, Unit};
|
use super::{BuildContext, BuildPlan, CompileMode, Context, Unit};
|
||||||
use crate::core::compiler::ProfileKind;
|
use crate::core::compiler::ProfileKind;
|
||||||
@ -608,7 +609,7 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> {
|
|||||||
id: u32,
|
id: u32,
|
||||||
unit: &Unit<'a>,
|
unit: &Unit<'a>,
|
||||||
artifact: Artifact,
|
artifact: Artifact,
|
||||||
cx: &mut Context<'_, '_>,
|
cx: &mut Context<'a, '_>,
|
||||||
) -> CargoResult<()> {
|
) -> CargoResult<()> {
|
||||||
if unit.mode.is_run_custom_build() && cx.bcx.show_warnings(unit.pkg.package_id()) {
|
if unit.mode.is_run_custom_build() && cx.bcx.show_warnings(unit.pkg.package_id()) {
|
||||||
self.emit_warnings(None, unit, cx)?;
|
self.emit_warnings(None, unit, cx)?;
|
||||||
@ -618,6 +619,23 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> {
|
|||||||
Artifact::All => self.timings.unit_finished(id, unlocked),
|
Artifact::All => self.timings.unit_finished(id, unlocked),
|
||||||
Artifact::Metadata => self.timings.unit_rmeta_finished(id, unlocked),
|
Artifact::Metadata => self.timings.unit_rmeta_finished(id, unlocked),
|
||||||
}
|
}
|
||||||
|
if unit.is_std && unit.kind == super::Kind::Target && !cx.bcx.build_config.build_plan {
|
||||||
|
// This is a bit of an unusual place to copy files around, and
|
||||||
|
// ideally this would be somewhere like the Work closure
|
||||||
|
// (`link_targets`). The tricky issue is handling rmeta files for
|
||||||
|
// pipelining. Since those are emitted asynchronously, the code
|
||||||
|
// path (like `on_stderr_line`) does not have enough information
|
||||||
|
// to know where the sysroot is, and that it is an std unit. If
|
||||||
|
// possible, it might be nice to eventually move this to the
|
||||||
|
// worker thread, but may be tricky to have the paths available.
|
||||||
|
// Another possibility is to disable pipelining between std ->
|
||||||
|
// non-std. The pipelining opportunities are small, and are not a
|
||||||
|
// huge win (in a full build, only proc_macro overlaps for 2
|
||||||
|
// seconds out of a 90s build on my system). Care must also be
|
||||||
|
// taken to properly copy these artifacts for Fresh units.
|
||||||
|
let rmeta = artifact == Artifact::Metadata;
|
||||||
|
standard_lib::add_sysroot_artifact(cx, unit, rmeta)?;
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,6 +12,10 @@
|
|||||||
//! .rustc-info.json
|
//! .rustc-info.json
|
||||||
//!
|
//!
|
||||||
//! # All final artifacts are linked into this directory from `deps`.
|
//! # All final artifacts are linked into this directory from `deps`.
|
||||||
|
//! # Note that named profiles will soon be included as separate directories
|
||||||
|
//! # here. They have a restricted format, similar to Rust identifiers, so
|
||||||
|
//! # Cargo-specific directories added in the future should use some prefix
|
||||||
|
//! # like `.` to avoid name collisions.
|
||||||
//! debug/ # or release/
|
//! debug/ # or release/
|
||||||
//!
|
//!
|
||||||
//! # File used to lock the directory to prevent multiple cargo processes
|
//! # File used to lock the directory to prevent multiple cargo processes
|
||||||
@ -46,6 +50,11 @@
|
|||||||
//! # incremental is enabled.
|
//! # incremental is enabled.
|
||||||
//! incremental/
|
//! incremental/
|
||||||
//!
|
//!
|
||||||
|
//! # The sysroot for -Zbuild-std builds. This only appears in
|
||||||
|
//! # target-triple directories (not host), and only if -Zbuild-std is
|
||||||
|
//! # enabled.
|
||||||
|
//! .sysroot/
|
||||||
|
//!
|
||||||
//! # This is the location at which the output of all custom build
|
//! # This is the location at which the output of all custom build
|
||||||
//! # commands are rooted.
|
//! # commands are rooted.
|
||||||
//! build/
|
//! build/
|
||||||
@ -116,6 +125,10 @@ pub struct Layout {
|
|||||||
examples: PathBuf,
|
examples: PathBuf,
|
||||||
/// The directory for rustdoc output: `$root/doc`
|
/// The directory for rustdoc output: `$root/doc`
|
||||||
doc: PathBuf,
|
doc: PathBuf,
|
||||||
|
/// The local sysroot for the build-std feature.
|
||||||
|
sysroot: Option<PathBuf>,
|
||||||
|
/// The "lib" directory within `sysroot`.
|
||||||
|
sysroot_libdir: Option<PathBuf>,
|
||||||
/// The lockfile for a build (`.cargo-lock`). Will be unlocked when this
|
/// The lockfile for a build (`.cargo-lock`). Will be unlocked when this
|
||||||
/// struct is `drop`ped.
|
/// struct is `drop`ped.
|
||||||
_lock: FileLock,
|
_lock: FileLock,
|
||||||
@ -139,18 +152,21 @@ impl Layout {
|
|||||||
// Flexible target specifications often point at json files, so interpret
|
// Flexible target specifications often point at json files, so interpret
|
||||||
// the target triple as a Path and then just use the file stem as the
|
// the target triple as a Path and then just use the file stem as the
|
||||||
// component for the directory name in that case.
|
// component for the directory name in that case.
|
||||||
if let Some(triple) = triple {
|
let triple_path = if let Some(s) = triple {
|
||||||
let triple = Path::new(triple);
|
let p = Path::new(s);
|
||||||
if triple.extension().and_then(|s| s.to_str()) == Some("json") {
|
let tp = if p.extension().and_then(|s| s.to_str()) == Some("json") {
|
||||||
root.push(
|
Path::new(
|
||||||
triple
|
p.file_stem()
|
||||||
.file_stem()
|
|
||||||
.ok_or_else(|| failure::format_err!("invalid target"))?,
|
.ok_or_else(|| failure::format_err!("invalid target"))?,
|
||||||
);
|
)
|
||||||
} else {
|
} else {
|
||||||
root.push(triple);
|
p
|
||||||
}
|
};
|
||||||
}
|
root.push(tp);
|
||||||
|
Some(tp)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
let dest = root.join(dest);
|
let dest = root.join(dest);
|
||||||
// If the root directory doesn't already exist go ahead and create it
|
// If the root directory doesn't already exist go ahead and create it
|
||||||
// here. Use this opportunity to exclude it from backups as well if the
|
// here. Use this opportunity to exclude it from backups as well if the
|
||||||
@ -167,6 +183,17 @@ impl Layout {
|
|||||||
let root = root.into_path_unlocked();
|
let root = root.into_path_unlocked();
|
||||||
let dest = dest.into_path_unlocked();
|
let dest = dest.into_path_unlocked();
|
||||||
|
|
||||||
|
// Compute the sysroot path for the build-std feature.
|
||||||
|
let build_std = ws.config().cli_unstable().build_std.as_ref();
|
||||||
|
let (sysroot, sysroot_libdir) = if let Some(tp) = build_std.and(triple_path) {
|
||||||
|
// This uses a leading dot to avoid collision with named profiles.
|
||||||
|
let sysroot = dest.join(".sysroot");
|
||||||
|
let sysroot_libdir = sysroot.join("lib").join("rustlib").join(tp).join("lib");
|
||||||
|
(Some(sysroot), Some(sysroot_libdir))
|
||||||
|
} else {
|
||||||
|
(None, None)
|
||||||
|
};
|
||||||
|
|
||||||
Ok(Layout {
|
Ok(Layout {
|
||||||
deps: dest.join("deps"),
|
deps: dest.join("deps"),
|
||||||
build: dest.join("build"),
|
build: dest.join("build"),
|
||||||
@ -176,6 +203,8 @@ impl Layout {
|
|||||||
doc: root.join("doc"),
|
doc: root.join("doc"),
|
||||||
root,
|
root,
|
||||||
dest,
|
dest,
|
||||||
|
sysroot,
|
||||||
|
sysroot_libdir,
|
||||||
_lock: lock,
|
_lock: lock,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -223,6 +252,16 @@ impl Layout {
|
|||||||
pub fn build(&self) -> &Path {
|
pub fn build(&self) -> &Path {
|
||||||
&self.build
|
&self.build
|
||||||
}
|
}
|
||||||
|
/// The local sysroot for the build-std feature.
|
||||||
|
///
|
||||||
|
/// Returns None if build-std is not enabled or this is the Host layout.
|
||||||
|
pub fn sysroot(&self) -> Option<&Path> {
|
||||||
|
self.sysroot.as_ref().map(|p| p.as_ref())
|
||||||
|
}
|
||||||
|
/// The "lib" directory within `sysroot`.
|
||||||
|
pub fn sysroot_libdir(&self) -> Option<&Path> {
|
||||||
|
self.sysroot_libdir.as_ref().map(|p| p.as_ref())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(target_os = "macos"))]
|
#[cfg(not(target_os = "macos"))]
|
||||||
|
@ -50,7 +50,7 @@ use crate::util::paths;
|
|||||||
use crate::util::{self, machine_message, ProcessBuilder};
|
use crate::util::{self, machine_message, ProcessBuilder};
|
||||||
use crate::util::{internal, join_paths, profile};
|
use crate::util::{internal, join_paths, profile};
|
||||||
|
|
||||||
/// Indicates whether an object is for the host architcture or the target architecture.
|
/// Indicates whether an object is for the host architecture or the target architecture.
|
||||||
///
|
///
|
||||||
/// These will be the same unless cross-compiling.
|
/// These will be the same unless cross-compiling.
|
||||||
#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord, Serialize)]
|
#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord, Serialize)]
|
||||||
@ -915,6 +915,16 @@ fn build_base_args<'a, 'cfg>(
|
|||||||
let dir = cx.files().layout(unit.kind).incremental().as_os_str();
|
let dir = cx.files().layout(unit.kind).incremental().as_os_str();
|
||||||
opt(cmd, "-C", "incremental=", Some(dir));
|
opt(cmd, "-C", "incremental=", Some(dir));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if unit.is_std {
|
||||||
|
// -Zforce-unstable-if-unmarked prevents the accidental use of
|
||||||
|
// unstable crates within the sysroot (such as "extern crate libc" or
|
||||||
|
// any non-public crate in the sysroot).
|
||||||
|
//
|
||||||
|
// RUSTC_BOOTSTRAP allows unstable features on stable.
|
||||||
|
cmd.arg("-Zforce-unstable-if-unmarked")
|
||||||
|
.env("RUSTC_BOOTSTRAP", "1");
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -968,7 +978,17 @@ fn build_deps_args<'a, 'cfg>(
|
|||||||
|
|
||||||
let mut unstable_opts = false;
|
let mut unstable_opts = false;
|
||||||
|
|
||||||
|
if let Some(sysroot) = cx.files().layout(Kind::Target).sysroot() {
|
||||||
|
if unit.kind == Kind::Target {
|
||||||
|
cmd.arg("--sysroot").arg(sysroot);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for dep in deps {
|
for dep in deps {
|
||||||
|
if !unit.is_std && dep.unit.is_std {
|
||||||
|
// Dependency to sysroot crate uses --sysroot.
|
||||||
|
continue;
|
||||||
|
}
|
||||||
if dep.unit.mode.is_run_custom_build() {
|
if dep.unit.mode.is_run_custom_build() {
|
||||||
cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep.unit));
|
cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep.unit));
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
//! Code for building the standard library.
|
//! Code for building the standard library.
|
||||||
|
|
||||||
use crate::core::compiler::{BuildContext, CompileMode, Kind, Unit};
|
use super::layout::Layout;
|
||||||
|
use crate::core::compiler::{BuildContext, CompileMode, Context, FileFlavor, Kind, Unit};
|
||||||
use crate::core::profiles::UnitFor;
|
use crate::core::profiles::UnitFor;
|
||||||
use crate::core::resolver::ResolveOpts;
|
use crate::core::resolver::ResolveOpts;
|
||||||
use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace};
|
use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace};
|
||||||
use crate::ops::{self, Packages};
|
use crate::ops::{self, Packages};
|
||||||
use crate::util::errors::CargoResult;
|
use crate::util::errors::CargoResult;
|
||||||
|
use crate::util::paths;
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@ -141,9 +143,15 @@ pub fn generate_std_roots<'a>(
|
|||||||
bcx.build_config.profile_kind.clone(),
|
bcx.build_config.profile_kind.clone(),
|
||||||
);
|
);
|
||||||
let features = std_resolve.features_sorted(pkg.package_id());
|
let features = std_resolve.features_sorted(pkg.package_id());
|
||||||
Ok(bcx
|
Ok(bcx.units.intern(
|
||||||
.units
|
pkg,
|
||||||
.intern(pkg, lib, profile, Kind::Target, mode, features))
|
lib,
|
||||||
|
profile,
|
||||||
|
Kind::Target,
|
||||||
|
mode,
|
||||||
|
features,
|
||||||
|
/*is_std*/ true,
|
||||||
|
))
|
||||||
})
|
})
|
||||||
.collect::<CargoResult<Vec<_>>>()
|
.collect::<CargoResult<Vec<_>>>()
|
||||||
}
|
}
|
||||||
@ -173,3 +181,33 @@ fn detect_sysroot_src_path(ws: &Workspace<'_>) -> CargoResult<PathBuf> {
|
|||||||
}
|
}
|
||||||
Ok(src_path)
|
Ok(src_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Prepare the output directory for the local sysroot.
|
||||||
|
pub fn prepare_sysroot(layout: &Layout) -> CargoResult<()> {
|
||||||
|
if let Some(libdir) = layout.sysroot_libdir() {
|
||||||
|
if libdir.exists() {
|
||||||
|
paths::remove_dir_all(libdir)?;
|
||||||
|
}
|
||||||
|
paths::create_dir_all(libdir)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Copy an artifact to the sysroot.
|
||||||
|
pub fn add_sysroot_artifact<'a>(
|
||||||
|
cx: &Context<'a, '_>,
|
||||||
|
unit: &Unit<'a>,
|
||||||
|
rmeta: bool,
|
||||||
|
) -> CargoResult<()> {
|
||||||
|
let outputs = cx.outputs(unit)?;
|
||||||
|
let outputs = outputs
|
||||||
|
.iter()
|
||||||
|
.filter(|output| output.flavor == FileFlavor::Linkable { rmeta })
|
||||||
|
.map(|output| &output.path);
|
||||||
|
for path in outputs {
|
||||||
|
let libdir = cx.files().layout(Kind::Target).sysroot_libdir().unwrap();
|
||||||
|
let dst = libdir.join(path.file_name().unwrap());
|
||||||
|
paths::link_or_copy(path, dst)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
@ -163,9 +163,8 @@ function render_timing_graph() {
|
|||||||
for (c of CONCURRENCY_DATA) {
|
for (c of CONCURRENCY_DATA) {
|
||||||
max_v = Math.max(max_v, c.active, c.waiting, c.inactive);
|
max_v = Math.max(max_v, c.active, c.waiting, c.inactive);
|
||||||
}
|
}
|
||||||
let [step, top] = split_ticks(max_v, GRAPH_HEIGHT / MIN_TICK_DIST);
|
const px_per_v = GRAPH_HEIGHT / max_v;
|
||||||
let num_ticks = top / step;
|
const {step, tick_dist, num_ticks} = split_ticks(max_v, px_per_v, GRAPH_HEIGHT);
|
||||||
let tick_dist = GRAPH_HEIGHT / num_ticks;
|
|
||||||
ctx.textAlign = 'end';
|
ctx.textAlign = 'end';
|
||||||
for (n=0; n<num_ticks; n++) {
|
for (n=0; n<num_ticks; n++) {
|
||||||
let y = HEIGHT - Y_LINE - ((n + 1) * tick_dist);
|
let y = HEIGHT - Y_LINE - ((n + 1) * tick_dist);
|
||||||
@ -299,7 +298,7 @@ function draw_graph_axes(id, graph_height) {
|
|||||||
// 4096 is still ridiculously large, and probably won't render on mobile
|
// 4096 is still ridiculously large, and probably won't render on mobile
|
||||||
// browsers, but should be ok for many desktop environments.
|
// browsers, but should be ok for many desktop environments.
|
||||||
const graph_width = Math.min(scale * DURATION, 4096);
|
const graph_width = Math.min(scale * DURATION, 4096);
|
||||||
const px_per_sec = Math.floor(graph_width / DURATION);
|
const px_per_sec = graph_width / DURATION;
|
||||||
const canvas_width = Math.max(graph_width + X_LINE + 30, X_LINE + 250);
|
const canvas_width = Math.max(graph_width + X_LINE + 30, X_LINE + 250);
|
||||||
const canvas_height = graph_height + MARGIN + Y_LINE;
|
const canvas_height = graph_height + MARGIN + Y_LINE;
|
||||||
let ctx = setup_canvas(id, canvas_width, canvas_height);
|
let ctx = setup_canvas(id, canvas_width, canvas_height);
|
||||||
@ -318,9 +317,7 @@ function draw_graph_axes(id, graph_height) {
|
|||||||
ctx.stroke();
|
ctx.stroke();
|
||||||
|
|
||||||
// Draw X tick marks.
|
// Draw X tick marks.
|
||||||
const [step, top] = split_ticks(DURATION, graph_width / MIN_TICK_DIST);
|
const {step, tick_dist, num_ticks} = split_ticks(DURATION, px_per_sec, graph_width);
|
||||||
const num_ticks = top / step;
|
|
||||||
const tick_dist = graph_width / num_ticks;
|
|
||||||
ctx.fillStyle = '#303030';
|
ctx.fillStyle = '#303030';
|
||||||
for (let n=0; n<num_ticks; n++) {
|
for (let n=0; n<num_ticks; n++) {
|
||||||
const x = X_LINE + ((n + 1) * tick_dist);
|
const x = X_LINE + ((n + 1) * tick_dist);
|
||||||
@ -347,40 +344,39 @@ function draw_graph_axes(id, graph_height) {
|
|||||||
return {canvas_width, canvas_height, graph_width, graph_height, ctx, px_per_sec};
|
return {canvas_width, canvas_height, graph_width, graph_height, ctx, px_per_sec};
|
||||||
}
|
}
|
||||||
|
|
||||||
function round_up(n, step) {
|
// Determine the spacing and number of ticks along an axis.
|
||||||
if (n % step == 0) {
|
function split_ticks(max_value, px_per_v, max_px) {
|
||||||
return n;
|
const max_ticks = Math.floor(max_px / MIN_TICK_DIST);
|
||||||
} else {
|
if (max_ticks <= 1) {
|
||||||
return (step - n % step) + n;
|
// Graph is too small for even 1 tick.
|
||||||
|
return {step: max_value, tick_dist: max_px, num_ticks: 1};
|
||||||
}
|
}
|
||||||
}
|
let step;
|
||||||
|
if (max_value <= max_ticks) {
|
||||||
// Determine the `(step, max_value)` of the number of ticks along an axis.
|
step = 1;
|
||||||
function split_ticks(n, max_ticks) {
|
} else if (max_value <= max_ticks * 2) {
|
||||||
max_ticks = Math.ceil(max_ticks);
|
step = 2;
|
||||||
if (n <= max_ticks) {
|
} else if (max_value <= max_ticks * 4) {
|
||||||
return [1, n];
|
step = 4;
|
||||||
} else if (n <= max_ticks * 2) {
|
} else if (max_value <= max_ticks * 5) {
|
||||||
return [2, round_up(n, 2)];
|
step = 5;
|
||||||
} else if (n <= max_ticks * 4) {
|
|
||||||
return [4, round_up(n, 4)];
|
|
||||||
} else if (n <= max_ticks * 5) {
|
|
||||||
return [5, round_up(n, 5)];
|
|
||||||
} else {
|
} else {
|
||||||
let step = 10;
|
step = 10;
|
||||||
let count = 0;
|
let count = 0;
|
||||||
while (true) {
|
while (true) {
|
||||||
if (count > 100) {
|
if (count > 100) {
|
||||||
throw Error("tick loop too long");
|
throw Error("tick loop too long");
|
||||||
}
|
}
|
||||||
count += 1;
|
count += 1;
|
||||||
let top = round_up(n, step);
|
if (max_value <= max_ticks * step) {
|
||||||
if (top <= max_ticks * step) {
|
break;
|
||||||
return [step, top];
|
|
||||||
}
|
}
|
||||||
step += 10;
|
step += 10;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const tick_dist = px_per_v * step;
|
||||||
|
const num_ticks = Math.floor(max_value / step);
|
||||||
|
return {step, tick_dist, num_ticks};
|
||||||
}
|
}
|
||||||
|
|
||||||
function codegen_time(unit) {
|
function codegen_time(unit) {
|
||||||
|
@ -133,7 +133,7 @@ impl<'a, 'cfg> Timings<'a, 'cfg> {
|
|||||||
unit_times: Vec::new(),
|
unit_times: Vec::new(),
|
||||||
active: HashMap::new(),
|
active: HashMap::new(),
|
||||||
concurrency: Vec::new(),
|
concurrency: Vec::new(),
|
||||||
last_cpu_state: State::current().ok(),
|
last_cpu_state: if enabled { State::current().ok() } else { None },
|
||||||
last_cpu_recording: Instant::now(),
|
last_cpu_recording: Instant::now(),
|
||||||
cpu_usage: Vec::new(),
|
cpu_usage: Vec::new(),
|
||||||
}
|
}
|
||||||
@ -296,7 +296,7 @@ impl<'a, 'cfg> Timings<'a, 'cfg> {
|
|||||||
|
|
||||||
/// Save HTML report to disk.
|
/// Save HTML report to disk.
|
||||||
fn report_html(&self, bcx: &BuildContext<'_, '_>) -> CargoResult<()> {
|
fn report_html(&self, bcx: &BuildContext<'_, '_>) -> CargoResult<()> {
|
||||||
let duration = self.start.elapsed().as_secs() as u32 + 1;
|
let duration = d_as_f64(self.start.elapsed());
|
||||||
let timestamp = self.start_str.replace(&['-', ':'][..], "");
|
let timestamp = self.start_str.replace(&['-', ':'][..], "");
|
||||||
let filename = format!("cargo-timing-{}.html", timestamp);
|
let filename = format!("cargo-timing-{}.html", timestamp);
|
||||||
let mut f = BufWriter::new(File::create(&filename)?);
|
let mut f = BufWriter::new(File::create(&filename)?);
|
||||||
@ -309,11 +309,12 @@ impl<'a, 'cfg> Timings<'a, 'cfg> {
|
|||||||
self.write_summary_table(&mut f, duration, bcx)?;
|
self.write_summary_table(&mut f, duration, bcx)?;
|
||||||
f.write_all(HTML_CANVAS.as_bytes())?;
|
f.write_all(HTML_CANVAS.as_bytes())?;
|
||||||
self.write_unit_table(&mut f)?;
|
self.write_unit_table(&mut f)?;
|
||||||
|
// It helps with pixel alignment to use whole numbers.
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
"<script>\n\
|
"<script>\n\
|
||||||
DURATION = {};",
|
DURATION = {};",
|
||||||
duration
|
f64::ceil(duration) as u32
|
||||||
)?;
|
)?;
|
||||||
self.write_js_data(&mut f)?;
|
self.write_js_data(&mut f)?;
|
||||||
write!(
|
write!(
|
||||||
@ -344,7 +345,7 @@ impl<'a, 'cfg> Timings<'a, 'cfg> {
|
|||||||
fn write_summary_table(
|
fn write_summary_table(
|
||||||
&self,
|
&self,
|
||||||
f: &mut impl Write,
|
f: &mut impl Write,
|
||||||
duration: u32,
|
duration: f64,
|
||||||
bcx: &BuildContext<'_, '_>,
|
bcx: &BuildContext<'_, '_>,
|
||||||
) -> CargoResult<()> {
|
) -> CargoResult<()> {
|
||||||
let targets: Vec<String> = self
|
let targets: Vec<String> = self
|
||||||
@ -353,12 +354,12 @@ impl<'a, 'cfg> Timings<'a, 'cfg> {
|
|||||||
.map(|(name, targets)| format!("{} ({})", name, targets.join(", ")))
|
.map(|(name, targets)| format!("{} ({})", name, targets.join(", ")))
|
||||||
.collect();
|
.collect();
|
||||||
let targets = targets.join("<br>");
|
let targets = targets.join("<br>");
|
||||||
let time_human = if duration > 60 {
|
let time_human = if duration > 60.0 {
|
||||||
format!(" ({}m {:02}s)", duration / 60, duration % 60)
|
format!(" ({}m {:.1}s)", duration as u32 / 60, duration % 60.0)
|
||||||
} else {
|
} else {
|
||||||
"".to_string()
|
"".to_string()
|
||||||
};
|
};
|
||||||
let total_time = format!("{}s{}", duration, time_human);
|
let total_time = format!("{:.1}s{}", duration, time_human);
|
||||||
let max_concurrency = self.concurrency.iter().map(|c| c.active).max().unwrap();
|
let max_concurrency = self.concurrency.iter().map(|c| c.active).max().unwrap();
|
||||||
let rustc_info = render_rustc_info(bcx);
|
let rustc_info = render_rustc_info(bcx);
|
||||||
write!(
|
write!(
|
||||||
|
@ -51,6 +51,8 @@ pub struct UnitInner<'a> {
|
|||||||
/// The `cfg` features to enable for this unit.
|
/// The `cfg` features to enable for this unit.
|
||||||
/// This must be sorted.
|
/// This must be sorted.
|
||||||
pub features: Vec<&'a str>,
|
pub features: Vec<&'a str>,
|
||||||
|
/// Whether this is a standard library unit.
|
||||||
|
pub is_std: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl UnitInner<'_> {
|
impl UnitInner<'_> {
|
||||||
@ -144,6 +146,7 @@ impl<'a> UnitInterner<'a> {
|
|||||||
kind: Kind,
|
kind: Kind,
|
||||||
mode: CompileMode,
|
mode: CompileMode,
|
||||||
features: Vec<&'a str>,
|
features: Vec<&'a str>,
|
||||||
|
is_std: bool,
|
||||||
) -> Unit<'a> {
|
) -> Unit<'a> {
|
||||||
let inner = self.intern_inner(&UnitInner {
|
let inner = self.intern_inner(&UnitInner {
|
||||||
pkg,
|
pkg,
|
||||||
@ -152,6 +155,7 @@ impl<'a> UnitInterner<'a> {
|
|||||||
kind,
|
kind,
|
||||||
mode,
|
mode,
|
||||||
features,
|
features,
|
||||||
|
is_std,
|
||||||
});
|
});
|
||||||
Unit { inner }
|
Unit { inner }
|
||||||
}
|
}
|
||||||
|
@ -573,7 +573,7 @@ fn new_unit_dep_with_profile<'a>(
|
|||||||
let unit = state
|
let unit = state
|
||||||
.bcx
|
.bcx
|
||||||
.units
|
.units
|
||||||
.intern(pkg, target, profile, kind, mode, features);
|
.intern(pkg, target, profile, kind, mode, features, state.is_std);
|
||||||
Ok(UnitDep {
|
Ok(UnitDep {
|
||||||
unit,
|
unit,
|
||||||
unit_for,
|
unit_for,
|
||||||
|
@ -390,6 +390,7 @@ impl CliUnstable {
|
|||||||
"advanced-env" => self.advanced_env = true,
|
"advanced-env" => self.advanced_env = true,
|
||||||
"config-profile" => self.config_profile = true,
|
"config-profile" => self.config_profile = true,
|
||||||
"dual-proc-macros" => self.dual_proc_macros = true,
|
"dual-proc-macros" => self.dual_proc_macros = true,
|
||||||
|
// can also be set in .cargo/config or with and ENV
|
||||||
"mtime-on-use" => self.mtime_on_use = true,
|
"mtime-on-use" => self.mtime_on_use = true,
|
||||||
"install-upgrade" => self.install_upgrade = true,
|
"install-upgrade" => self.install_upgrade = true,
|
||||||
"cache-messages" => self.cache_messages = true,
|
"cache-messages" => self.cache_messages = true,
|
||||||
|
@ -103,10 +103,9 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
|
|||||||
)
|
)
|
||||||
};
|
};
|
||||||
let features = resolve.features_sorted(pkg.package_id());
|
let features = resolve.features_sorted(pkg.package_id());
|
||||||
units.push(
|
units.push(bcx.units.intern(
|
||||||
bcx.units
|
pkg, target, profile, *kind, *mode, features, /*is_std*/ false,
|
||||||
.intern(pkg, target, profile, *kind, *mode, features),
|
));
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -312,6 +312,11 @@ pub fn compile_ws<'a>(
|
|||||||
let (mut packages, resolve_with_overrides) = resolve;
|
let (mut packages, resolve_with_overrides) = resolve;
|
||||||
|
|
||||||
let std_resolve = if let Some(crates) = &config.cli_unstable().build_std {
|
let std_resolve = if let Some(crates) = &config.cli_unstable().build_std {
|
||||||
|
if build_config.build_plan {
|
||||||
|
config
|
||||||
|
.shell()
|
||||||
|
.warn("-Zbuild-std does not currently fully support --build-plan")?;
|
||||||
|
}
|
||||||
if build_config.requested_target.is_none() {
|
if build_config.requested_target.is_none() {
|
||||||
// TODO: This should eventually be fixed. Unfortunately it is not
|
// TODO: This should eventually be fixed. Unfortunately it is not
|
||||||
// easy to get the host triple in BuildConfig. Consider changing
|
// easy to get the host triple in BuildConfig. Consider changing
|
||||||
@ -706,8 +711,15 @@ fn generate_targets<'a>(
|
|||||||
bcx.build_config.profile_kind.clone(),
|
bcx.build_config.profile_kind.clone(),
|
||||||
);
|
);
|
||||||
let features = resolve.features_sorted(pkg.package_id());
|
let features = resolve.features_sorted(pkg.package_id());
|
||||||
bcx.units
|
bcx.units.intern(
|
||||||
.intern(pkg, target, profile, kind, target_mode, features)
|
pkg,
|
||||||
|
target,
|
||||||
|
profile,
|
||||||
|
kind,
|
||||||
|
target_mode,
|
||||||
|
features,
|
||||||
|
/*is_std*/ false,
|
||||||
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create a list of proposed targets.
|
// Create a list of proposed targets.
|
||||||
|
@ -100,11 +100,11 @@ pub trait AppExt: Sized {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn arg_features(self) -> Self {
|
fn arg_features(self) -> Self {
|
||||||
self._arg(
|
self._arg(multi_opt(
|
||||||
opt("features", "Space-separated list of features to activate")
|
"features",
|
||||||
.multiple(true)
|
"FEATURES",
|
||||||
.value_name("FEATURES"),
|
"Space-separated list of features to activate",
|
||||||
)
|
))
|
||||||
._arg(opt("all-features", "Activate all available features"))
|
._arg(opt("all-features", "Activate all available features"))
|
||||||
._arg(opt(
|
._arg(opt(
|
||||||
"no-default-features",
|
"no-default-features",
|
||||||
|
@ -23,7 +23,7 @@ use url::Url;
|
|||||||
use self::ConfigValue as CV;
|
use self::ConfigValue as CV;
|
||||||
use crate::core::profiles::ConfigProfiles;
|
use crate::core::profiles::ConfigProfiles;
|
||||||
use crate::core::shell::Verbosity;
|
use crate::core::shell::Verbosity;
|
||||||
use crate::core::{CliUnstable, Shell, SourceId, Workspace};
|
use crate::core::{nightly_features_allowed, CliUnstable, Shell, SourceId, Workspace};
|
||||||
use crate::ops;
|
use crate::ops;
|
||||||
use crate::util::errors::{self, internal, CargoResult, CargoResultExt};
|
use crate::util::errors::{self, internal, CargoResult, CargoResultExt};
|
||||||
use crate::util::toml as cargo_toml;
|
use crate::util::toml as cargo_toml;
|
||||||
@ -626,6 +626,12 @@ impl Config {
|
|||||||
self.target_dir = cli_target_dir;
|
self.target_dir = cli_target_dir;
|
||||||
self.cli_flags.parse(unstable_flags)?;
|
self.cli_flags.parse(unstable_flags)?;
|
||||||
|
|
||||||
|
if nightly_features_allowed() {
|
||||||
|
if let Some(val) = self.get::<Option<bool>>("unstable.mtime_on_use")? {
|
||||||
|
self.cli_flags.mtime_on_use |= val;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,12 +12,24 @@ Some unstable features will require you to specify the `cargo-features` key in
|
|||||||
|
|
||||||
### no-index-update
|
### no-index-update
|
||||||
* Original Issue: [#3479](https://github.com/rust-lang/cargo/issues/3479)
|
* Original Issue: [#3479](https://github.com/rust-lang/cargo/issues/3479)
|
||||||
|
* Tracking Issue: [#7404](https://github.com/rust-lang/cargo/issues/7404)
|
||||||
|
|
||||||
The `-Z no-index-update` flag ensures that Cargo does not attempt to update
|
The `-Z no-index-update` flag ensures that Cargo does not attempt to update
|
||||||
the registry index. This is intended for tools such as Crater that issue many
|
the registry index. This is intended for tools such as Crater that issue many
|
||||||
Cargo commands, and you want to avoid the network latency for updating the
|
Cargo commands, and you want to avoid the network latency for updating the
|
||||||
index each time.
|
index each time.
|
||||||
|
|
||||||
|
### mtime-on-use
|
||||||
|
* Original Issue: [#6477](https://github.com/rust-lang/cargo/pull/6477)
|
||||||
|
* Cache usage meta tracking issue: [#7150](https://github.com/rust-lang/cargo/issues/7150)
|
||||||
|
|
||||||
|
The `-Z mtime-on-use` flag is an experiment to have Cargo update the mtime of
|
||||||
|
used files to make it easier for tools like cargo-sweep to detect which files
|
||||||
|
are stale. For many workflows this needs to be set on *all* invocations of cargo.
|
||||||
|
To make this more practical setting the `unstable.mtime_on_use` flag in `.cargo/config`
|
||||||
|
or the corresponding ENV variable will apply the `-Z mtime-on-use` to all
|
||||||
|
invocations of nightly cargo. (the config flag is ignored by stable)
|
||||||
|
|
||||||
### avoid-dev-deps
|
### avoid-dev-deps
|
||||||
* Original Issue: [#4988](https://github.com/rust-lang/cargo/issues/4988)
|
* Original Issue: [#4988](https://github.com/rust-lang/cargo/issues/4988)
|
||||||
* Stabilization Issue: [#5133](https://github.com/rust-lang/cargo/issues/5133)
|
* Stabilization Issue: [#5133](https://github.com/rust-lang/cargo/issues/5133)
|
||||||
@ -412,6 +424,7 @@ the [issue tracker](https://github.com/rust-lang/wg-cargo-std-aware/issues) of
|
|||||||
the tracking repository, and if it's not there please file a new issue!
|
the tracking repository, and if it's not there please file a new issue!
|
||||||
|
|
||||||
### timings
|
### timings
|
||||||
|
* Tracking Issue: [#7405](https://github.com/rust-lang/cargo/issues/7405)
|
||||||
|
|
||||||
The `timings` feature gives some information about how long each compilation
|
The `timings` feature gives some information about how long each compilation
|
||||||
takes, and tracks concurrency information over time.
|
takes, and tracks concurrency information over time.
|
||||||
@ -470,3 +483,14 @@ Tips for addressing compile times:
|
|||||||
- Split large crates into smaller pieces.
|
- Split large crates into smaller pieces.
|
||||||
- If there are a large number of crates bottlenecked on a single crate, focus
|
- If there are a large number of crates bottlenecked on a single crate, focus
|
||||||
your attention on improving that one crate to improve parallelism.
|
your attention on improving that one crate to improve parallelism.
|
||||||
|
|
||||||
|
### binary-dep-depinfo
|
||||||
|
* Tracking rustc issue: [#63012](https://github.com/rust-lang/rust/issues/63012)
|
||||||
|
|
||||||
|
The `-Z binary-dep-depinfo` flag causes Cargo to forward the same flag to
|
||||||
|
`rustc` which will then cause `rustc` to include the paths of all binary
|
||||||
|
dependencies in the "dep info" file (with the `.d` extension). Cargo then uses
|
||||||
|
that information for change-detection (if any binary dependency changes, then
|
||||||
|
the crate will be rebuilt). The primary use case is for building the compiler
|
||||||
|
itself, which has implicit dependencies on the standard library that would
|
||||||
|
otherwise be untracked for change-detection.
|
||||||
|
@ -19,6 +19,8 @@
|
|||||||
//! Otherwise the tests are skipped.
|
//! Otherwise the tests are skipped.
|
||||||
|
|
||||||
use cargo_test_support::*;
|
use cargo_test_support::*;
|
||||||
|
use std::env;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
fn enable_build_std(e: &mut Execs, arg: Option<&str>) {
|
fn enable_build_std(e: &mut Execs, arg: Option<&str>) {
|
||||||
e.env_remove("CARGO_HOME");
|
e.env_remove("CARGO_HOME");
|
||||||
@ -174,7 +176,21 @@ fn custom_test_framework() {
|
|||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
|
// This is a bit of a hack to use the rust-lld that ships with most toolchains.
|
||||||
|
let sysroot = paths::sysroot();
|
||||||
|
let sysroot = Path::new(&sysroot);
|
||||||
|
let sysroot_bin = sysroot
|
||||||
|
.join("lib")
|
||||||
|
.join("rustlib")
|
||||||
|
.join(rustc_host())
|
||||||
|
.join("bin");
|
||||||
|
let path = env::var_os("PATH").unwrap_or_default();
|
||||||
|
let mut paths = env::split_paths(&path).collect::<Vec<_>>();
|
||||||
|
paths.insert(0, sysroot_bin);
|
||||||
|
let new_path = env::join_paths(paths).unwrap();
|
||||||
|
|
||||||
p.cargo("test --target target.json --no-run -v")
|
p.cargo("test --target target.json --no-run -v")
|
||||||
|
.env("PATH", new_path)
|
||||||
.build_std_arg("core")
|
.build_std_arg("core")
|
||||||
.run();
|
.run();
|
||||||
}
|
}
|
||||||
|
@ -1959,3 +1959,32 @@ fn multi_multi_features() {
|
|||||||
|
|
||||||
p.cargo("build --features a --features").arg("b c").run();
|
p.cargo("build --features a --features").arg("b c").run();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cargo_test]
|
||||||
|
fn cli_parse_ok() {
|
||||||
|
let p = project()
|
||||||
|
.file(
|
||||||
|
"Cargo.toml",
|
||||||
|
r#"
|
||||||
|
[project]
|
||||||
|
name = "foo"
|
||||||
|
version = "0.0.1"
|
||||||
|
authors = []
|
||||||
|
|
||||||
|
[features]
|
||||||
|
a = []
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.file(
|
||||||
|
"src/main.rs",
|
||||||
|
r#"
|
||||||
|
#[cfg(feature = "a")]
|
||||||
|
fn main() {
|
||||||
|
assert_eq!(std::env::args().nth(1).unwrap(), "b");
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
p.cargo("run --features a b").run();
|
||||||
|
}
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
|
"src/liballoc",
|
||||||
|
"src/libcore",
|
||||||
|
"src/libproc_macro",
|
||||||
|
"src/libstd",
|
||||||
"src/libtest",
|
"src/libtest",
|
||||||
]
|
]
|
||||||
|
|
||||||
[patch.crates-io]
|
|
||||||
rustc-std-workspace-std = { path = 'src/tools/rustc-std-workspace-std' }
|
|
||||||
rustc-std-workspace-core = { path = 'src/tools/rustc-std-workspace-core' }
|
|
||||||
rustc-std-workspace-alloc = { path = 'src/tools/rustc-std-workspace-alloc' }
|
|
||||||
|
@ -8,4 +8,4 @@ edition = "2018"
|
|||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
core = { path = "../libcore" }
|
registry-dep-using-core = { version = "*", features = ['mockbuild'] }
|
||||||
|
@ -1 +1,11 @@
|
|||||||
pub fn custom_api() {}
|
#![feature(staged_api)]
|
||||||
|
#![stable(since = "1.0.0", feature = "dummy")]
|
||||||
|
|
||||||
|
extern crate alloc;
|
||||||
|
|
||||||
|
#[stable(since = "1.0.0", feature = "dummy")]
|
||||||
|
pub use alloc::*;
|
||||||
|
|
||||||
|
#[stable(since = "1.0.0", feature = "dummy")]
|
||||||
|
pub fn custom_api() {
|
||||||
|
}
|
||||||
|
@ -6,6 +6,3 @@ edition = "2018"
|
|||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
core = { path = "../libcore" }
|
|
||||||
|
@ -1,23 +0,0 @@
|
|||||||
//! This build script is basically the whole hack that makes this entire "mock
|
|
||||||
//! std" feature work. Here we print out `rustc-link-search` pointing to the
|
|
||||||
//! sysroot of the actual compiler itself, and that way we can indeed implicitly
|
|
||||||
//! pull in those crates, but only via `extern crate`. That means that we can
|
|
||||||
//! build tiny shim core/std/etc crates while they actually load all the various
|
|
||||||
//! language/library details from the actual crates, meaning that instead of
|
|
||||||
//! literally compiling libstd we compile just our own tiny shims.
|
|
||||||
|
|
||||||
use std::process::Command;
|
|
||||||
use std::env;
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let output = Command::new("rustc")
|
|
||||||
.arg("--print")
|
|
||||||
.arg("sysroot")
|
|
||||||
.output()
|
|
||||||
.unwrap();
|
|
||||||
assert!(output.status.success());
|
|
||||||
let stdout = String::from_utf8(output.stdout).unwrap();
|
|
||||||
let stdout = stdout.trim();
|
|
||||||
let host = env::var("HOST").unwrap();
|
|
||||||
println!("cargo:rustc-link-search={}/lib/rustlib/{}/lib", stdout, host);
|
|
||||||
}
|
|
@ -1,4 +1,9 @@
|
|||||||
#![no_std]
|
#![feature(staged_api)]
|
||||||
|
#![stable(since = "1.0.0", feature = "dummy")]
|
||||||
|
|
||||||
|
#[stable(since = "1.0.0", feature = "dummy")]
|
||||||
pub use core::*;
|
pub use core::*;
|
||||||
|
|
||||||
pub fn custom_api() {}
|
#[stable(since = "1.0.0", feature = "dummy")]
|
||||||
|
pub fn custom_api() {
|
||||||
|
}
|
||||||
|
@ -6,6 +6,3 @@ edition = "2018"
|
|||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
core = { path = "../libcore" }
|
|
||||||
|
@ -6,6 +6,3 @@ edition = "2018"
|
|||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
std = { path = "../libstd" }
|
|
||||||
|
@ -1,3 +1,11 @@
|
|||||||
|
#![feature(staged_api)]
|
||||||
|
#![stable(since = "1.0.0", feature = "dummy")]
|
||||||
|
|
||||||
extern crate proc_macro;
|
extern crate proc_macro;
|
||||||
|
|
||||||
|
#[stable(since = "1.0.0", feature = "dummy")]
|
||||||
pub use proc_macro::*;
|
pub use proc_macro::*;
|
||||||
pub fn custom_api() {}
|
|
||||||
|
#[stable(since = "1.0.0", feature = "dummy")]
|
||||||
|
pub fn custom_api() {
|
||||||
|
}
|
||||||
|
@ -8,8 +8,4 @@ edition = "2018"
|
|||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
core = { path = "../libcore" }
|
registry-dep-using-alloc = { version = "*", features = ['mockbuild'] }
|
||||||
compiler_builtins = { path = "../libcompiler_builtins" }
|
|
||||||
panic_unwind = { path = "../libpanic_unwind" }
|
|
||||||
registry-dep-using-core = { version = "1.0", features = ['mockbuild'] }
|
|
||||||
registry-dep-using-alloc = { version = "1.0", features = ['mockbuild'] }
|
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
|
#![feature(staged_api)]
|
||||||
|
#![stable(since = "1.0.0", feature = "dummy")]
|
||||||
|
|
||||||
|
#[stable(since = "1.0.0", feature = "dummy")]
|
||||||
pub use std::*;
|
pub use std::*;
|
||||||
|
|
||||||
|
#[stable(since = "1.0.0", feature = "dummy")]
|
||||||
pub fn custom_api() {
|
pub fn custom_api() {
|
||||||
registry_dep_using_core::custom_api();
|
|
||||||
registry_dep_using_alloc::custom_api();
|
|
||||||
}
|
}
|
||||||
|
@ -9,8 +9,9 @@ path = "lib.rs"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
proc_macro = { path = "../libproc_macro" }
|
proc_macro = { path = "../libproc_macro" }
|
||||||
registry-dep-using-std = { version = "1.0", features = ['mockbuild'] }
|
panic_unwind = { path = "../libpanic_unwind" }
|
||||||
std = { path = "../libstd" }
|
compiler_builtins = { path = "../libcompiler_builtins" }
|
||||||
|
registry-dep-using-std = { version = "*", features = ['mockbuild'] }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
panic-unwind = []
|
panic-unwind = []
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
|
#![feature(staged_api)]
|
||||||
#![feature(test)]
|
#![feature(test)]
|
||||||
|
#![unstable(feature = "test", issue = "0")]
|
||||||
|
|
||||||
extern crate test;
|
extern crate test;
|
||||||
|
|
||||||
pub use test::*;
|
pub use test::*;
|
||||||
|
|
||||||
pub fn custom_api() {
|
pub fn custom_api() {
|
||||||
registry_dep_using_std::custom_api();
|
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,6 @@ authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
|||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "alloc"
|
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
@ -5,7 +5,6 @@ authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
|||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "core"
|
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
@ -5,7 +5,6 @@ authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
|||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "std"
|
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
@ -1,11 +1,18 @@
|
|||||||
use cargo_test_support::registry::{Dependency, Package};
|
use cargo_test_support::registry::{Dependency, Package};
|
||||||
|
use cargo_test_support::ProjectBuilder;
|
||||||
use cargo_test_support::{is_nightly, paths, project, rustc_host, Execs};
|
use cargo_test_support::{is_nightly, paths, project, rustc_host, Execs};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
fn setup() -> bool {
|
struct Setup {
|
||||||
|
rustc_wrapper: PathBuf,
|
||||||
|
real_sysroot: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn setup() -> Option<Setup> {
|
||||||
if !is_nightly() {
|
if !is_nightly() {
|
||||||
// -Zbuild-std is nightly
|
// -Zbuild-std is nightly
|
||||||
// We don't want these tests to run on rust-lang/rust.
|
// We don't want these tests to run on rust-lang/rust.
|
||||||
return false;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Our mock sysroot requires a few packages from crates.io, so make sure
|
// Our mock sysroot requires a few packages from crates.io, so make sure
|
||||||
@ -19,7 +26,6 @@ fn setup() -> bool {
|
|||||||
|
|
||||||
#[cfg(feature = \"mockbuild\")]
|
#[cfg(feature = \"mockbuild\")]
|
||||||
pub fn custom_api() {
|
pub fn custom_api() {
|
||||||
core::custom_api();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = \"mockbuild\"))]
|
#[cfg(not(feature = \"mockbuild\"))]
|
||||||
@ -41,8 +47,6 @@ fn setup() -> bool {
|
|||||||
|
|
||||||
#[cfg(feature = \"mockbuild\")]
|
#[cfg(feature = \"mockbuild\")]
|
||||||
pub fn custom_api() {
|
pub fn custom_api() {
|
||||||
core::custom_api();
|
|
||||||
alloc::custom_api();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = \"mockbuild\"))]
|
#[cfg(not(feature = \"mockbuild\"))]
|
||||||
@ -65,7 +69,6 @@ fn setup() -> bool {
|
|||||||
"
|
"
|
||||||
#[cfg(feature = \"mockbuild\")]
|
#[cfg(feature = \"mockbuild\")]
|
||||||
pub fn custom_api() {
|
pub fn custom_api() {
|
||||||
std::custom_api();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = \"mockbuild\"))]
|
#[cfg(not(feature = \"mockbuild\"))]
|
||||||
@ -77,10 +80,37 @@ fn setup() -> bool {
|
|||||||
.add_dep(Dependency::new("rustc-std-workspace-std", "*").optional(true))
|
.add_dep(Dependency::new("rustc-std-workspace-std", "*").optional(true))
|
||||||
.feature("mockbuild", &["rustc-std-workspace-std"])
|
.feature("mockbuild", &["rustc-std-workspace-std"])
|
||||||
.publish();
|
.publish();
|
||||||
return true;
|
|
||||||
|
let p = ProjectBuilder::new(paths::root().join("rustc-wrapper"))
|
||||||
|
.file(
|
||||||
|
"src/main.rs",
|
||||||
|
r#"
|
||||||
|
use std::process::Command;
|
||||||
|
use std::env;
|
||||||
|
fn main() {
|
||||||
|
let mut args = env::args().skip(1).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let is_sysroot_crate = env::var_os("RUSTC_BOOTSTRAP").is_some();
|
||||||
|
if is_sysroot_crate {
|
||||||
|
let arg = args.iter().position(|a| a == "--sysroot").unwrap();
|
||||||
|
args[arg + 1] = env::var("REAL_SYSROOT").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let ret = Command::new(&args[0]).args(&args[1..]).status().unwrap();
|
||||||
|
std::process::exit(ret.code().unwrap_or(1));
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
p.cargo("build").run();
|
||||||
|
|
||||||
|
return Some(Setup {
|
||||||
|
rustc_wrapper: p.bin("foo"),
|
||||||
|
real_sysroot: paths::sysroot(),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enable_build_std(e: &mut Execs, arg: Option<&str>) {
|
fn enable_build_std(e: &mut Execs, setup: &Setup, arg: Option<&str>) {
|
||||||
// First up, force Cargo to use our "mock sysroot" which mimics what
|
// First up, force Cargo to use our "mock sysroot" which mimics what
|
||||||
// libstd looks like upstream.
|
// libstd looks like upstream.
|
||||||
let root = paths::root();
|
let root = paths::root();
|
||||||
@ -94,33 +124,51 @@ fn enable_build_std(e: &mut Execs, arg: Option<&str>) {
|
|||||||
.join("tests/testsuite/mock-std");
|
.join("tests/testsuite/mock-std");
|
||||||
e.env("__CARGO_TESTS_ONLY_SRC_ROOT", &root);
|
e.env("__CARGO_TESTS_ONLY_SRC_ROOT", &root);
|
||||||
|
|
||||||
// Next, make sure it doesn't have implicit access to the host's sysroot
|
// Actually enable `-Zbuild-std` for now
|
||||||
e.env("RUSTFLAGS", "--sysroot=/path/to/nowhere");
|
|
||||||
|
|
||||||
// And finally actually enable `build-std` for now
|
|
||||||
let arg = match arg {
|
let arg = match arg {
|
||||||
Some(s) => format!("-Zbuild-std={}", s),
|
Some(s) => format!("-Zbuild-std={}", s),
|
||||||
None => "-Zbuild-std".to_string(),
|
None => "-Zbuild-std".to_string(),
|
||||||
};
|
};
|
||||||
e.arg(arg);
|
e.arg(arg);
|
||||||
e.masquerade_as_nightly_cargo();
|
e.masquerade_as_nightly_cargo();
|
||||||
|
|
||||||
|
// We do various shenanigans to ensure our "mock sysroot" actually links
|
||||||
|
// with the real sysroot, so we don't have to actually recompile std for
|
||||||
|
// each test. Perform all that logic here, namely:
|
||||||
|
//
|
||||||
|
// * RUSTC_WRAPPER - uses our shim executable built above to control rustc
|
||||||
|
// * REAL_SYSROOT - used by the shim executable to swap out to the real
|
||||||
|
// sysroot temporarily for some compilations
|
||||||
|
// * RUST{,DOC}FLAGS - an extra `-L` argument to ensure we can always load
|
||||||
|
// crates from the sysroot, but only indirectly through other crates.
|
||||||
|
e.env("RUSTC_WRAPPER", &setup.rustc_wrapper);
|
||||||
|
e.env("REAL_SYSROOT", &setup.real_sysroot);
|
||||||
|
let libdir = format!("/lib/rustlib/{}/lib", rustc_host());
|
||||||
|
e.env(
|
||||||
|
"RUSTFLAGS",
|
||||||
|
format!("-Ldependency={}{}", setup.real_sysroot, libdir),
|
||||||
|
);
|
||||||
|
e.env(
|
||||||
|
"RUSTDOCFLAGS",
|
||||||
|
format!("-Ldependency={}{}", setup.real_sysroot, libdir),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper methods used in the tests below
|
// Helper methods used in the tests below
|
||||||
trait BuildStd: Sized {
|
trait BuildStd: Sized {
|
||||||
fn build_std(&mut self) -> &mut Self;
|
fn build_std(&mut self, setup: &Setup) -> &mut Self;
|
||||||
fn build_std_arg(&mut self, arg: &str) -> &mut Self;
|
fn build_std_arg(&mut self, setup: &Setup, arg: &str) -> &mut Self;
|
||||||
fn target_host(&mut self) -> &mut Self;
|
fn target_host(&mut self) -> &mut Self;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuildStd for Execs {
|
impl BuildStd for Execs {
|
||||||
fn build_std(&mut self) -> &mut Self {
|
fn build_std(&mut self, setup: &Setup) -> &mut Self {
|
||||||
enable_build_std(self, None);
|
enable_build_std(self, setup, None);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_std_arg(&mut self, arg: &str) -> &mut Self {
|
fn build_std_arg(&mut self, setup: &Setup, arg: &str) -> &mut Self {
|
||||||
enable_build_std(self, Some(arg));
|
enable_build_std(self, setup, Some(arg));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,9 +180,10 @@ impl BuildStd for Execs {
|
|||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn basic() {
|
fn basic() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
|
|
||||||
let p = project()
|
let p = project()
|
||||||
.file(
|
.file(
|
||||||
@ -187,27 +236,28 @@ fn basic() {
|
|||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
p.cargo("check").build_std().target_host().run();
|
p.cargo("check").build_std(&setup).target_host().run();
|
||||||
p.cargo("build").build_std().target_host().run();
|
p.cargo("build").build_std(&setup).target_host().run();
|
||||||
p.cargo("run").build_std().target_host().run();
|
p.cargo("run").build_std(&setup).target_host().run();
|
||||||
p.cargo("test").build_std().target_host().run();
|
p.cargo("test").build_std(&setup).target_host().run();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn simple_lib_std() {
|
fn simple_lib_std() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
let p = project().file("src/lib.rs", "").build();
|
let p = project().file("src/lib.rs", "").build();
|
||||||
p.cargo("build -v")
|
p.cargo("build -v")
|
||||||
.build_std()
|
.build_std(&setup)
|
||||||
.target_host()
|
.target_host()
|
||||||
.with_stderr_contains("[RUNNING] `rustc [..]--crate-name std [..]")
|
.with_stderr_contains("[RUNNING] `[..]--crate-name std [..]`")
|
||||||
.run();
|
.run();
|
||||||
// Check freshness.
|
// Check freshness.
|
||||||
p.change_file("src/lib.rs", " ");
|
p.change_file("src/lib.rs", " ");
|
||||||
p.cargo("build -v")
|
p.cargo("build -v")
|
||||||
.build_std()
|
.build_std(&setup)
|
||||||
.target_host()
|
.target_host()
|
||||||
.with_stderr_contains("[FRESH] std[..]")
|
.with_stderr_contains("[FRESH] std[..]")
|
||||||
.run();
|
.run();
|
||||||
@ -215,18 +265,20 @@ fn simple_lib_std() {
|
|||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn simple_bin_std() {
|
fn simple_bin_std() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
let p = project().file("src/main.rs", "fn main() {}").build();
|
let p = project().file("src/main.rs", "fn main() {}").build();
|
||||||
p.cargo("run -v").build_std().target_host().run();
|
p.cargo("run -v").build_std(&setup).target_host().run();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn lib_nostd() {
|
fn lib_nostd() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
let p = project()
|
let p = project()
|
||||||
.file(
|
.file(
|
||||||
"src/lib.rs",
|
"src/lib.rs",
|
||||||
@ -239,7 +291,7 @@ fn lib_nostd() {
|
|||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
p.cargo("build -v --lib")
|
p.cargo("build -v --lib")
|
||||||
.build_std_arg("core")
|
.build_std_arg(&setup, "core")
|
||||||
.target_host()
|
.target_host()
|
||||||
.with_stderr_does_not_contain("[..]libstd[..]")
|
.with_stderr_does_not_contain("[..]libstd[..]")
|
||||||
.run();
|
.run();
|
||||||
@ -247,15 +299,16 @@ fn lib_nostd() {
|
|||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn check_core() {
|
fn check_core() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
let p = project()
|
let p = project()
|
||||||
.file("src/lib.rs", "#![no_std] fn unused_fn() {}")
|
.file("src/lib.rs", "#![no_std] fn unused_fn() {}")
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
p.cargo("check -v")
|
p.cargo("check -v")
|
||||||
.build_std_arg("core")
|
.build_std_arg(&setup, "core")
|
||||||
.target_host()
|
.target_host()
|
||||||
.with_stderr_contains("[WARNING] [..]unused_fn[..]`")
|
.with_stderr_contains("[WARNING] [..]unused_fn[..]`")
|
||||||
.run();
|
.run();
|
||||||
@ -263,9 +316,10 @@ fn check_core() {
|
|||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn depend_same_as_std() {
|
fn depend_same_as_std() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
|
|
||||||
let p = project()
|
let p = project()
|
||||||
.file(
|
.file(
|
||||||
@ -294,14 +348,15 @@ fn depend_same_as_std() {
|
|||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
p.cargo("build -v").build_std().target_host().run();
|
p.cargo("build -v").build_std(&setup).target_host().run();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn test() {
|
fn test() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
let p = project()
|
let p = project()
|
||||||
.file(
|
.file(
|
||||||
"src/lib.rs",
|
"src/lib.rs",
|
||||||
@ -318,7 +373,7 @@ fn test() {
|
|||||||
.build();
|
.build();
|
||||||
|
|
||||||
p.cargo("test -v")
|
p.cargo("test -v")
|
||||||
.build_std()
|
.build_std(&setup)
|
||||||
.target_host()
|
.target_host()
|
||||||
.with_stdout_contains("test tests::it_works ... ok")
|
.with_stdout_contains("test tests::it_works ... ok")
|
||||||
.run();
|
.run();
|
||||||
@ -326,9 +381,10 @@ fn test() {
|
|||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn target_proc_macro() {
|
fn target_proc_macro() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
let p = project()
|
let p = project()
|
||||||
.file(
|
.file(
|
||||||
"src/lib.rs",
|
"src/lib.rs",
|
||||||
@ -341,14 +397,15 @@ fn target_proc_macro() {
|
|||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
p.cargo("build -v").build_std().target_host().run();
|
p.cargo("build -v").build_std(&setup).target_host().run();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn bench() {
|
fn bench() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
let p = project()
|
let p = project()
|
||||||
.file(
|
.file(
|
||||||
"src/lib.rs",
|
"src/lib.rs",
|
||||||
@ -364,14 +421,15 @@ fn bench() {
|
|||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
p.cargo("bench -v").build_std().target_host().run();
|
p.cargo("bench -v").build_std(&setup).target_host().run();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn doc() {
|
fn doc() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
let p = project()
|
let p = project()
|
||||||
.file(
|
.file(
|
||||||
"src/lib.rs",
|
"src/lib.rs",
|
||||||
@ -382,14 +440,15 @@ fn doc() {
|
|||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
p.cargo("doc -v").build_std().target_host().run();
|
p.cargo("doc -v").build_std(&setup).target_host().run();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn check_std() {
|
fn check_std() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
let p = project()
|
let p = project()
|
||||||
.file(
|
.file(
|
||||||
"src/lib.rs",
|
"src/lib.rs",
|
||||||
@ -413,32 +472,89 @@ fn check_std() {
|
|||||||
.build();
|
.build();
|
||||||
|
|
||||||
p.cargo("check -v --all-targets")
|
p.cargo("check -v --all-targets")
|
||||||
.build_std()
|
.build_std(&setup)
|
||||||
.target_host()
|
.target_host()
|
||||||
.run();
|
.run();
|
||||||
p.cargo("check -v --all-targets --profile=test")
|
p.cargo("check -v --all-targets --profile=test")
|
||||||
.build_std()
|
.build_std(&setup)
|
||||||
.target_host()
|
.target_host()
|
||||||
.run();
|
.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn doctest() {
|
fn doctest() {
|
||||||
if !setup() {
|
let setup = match setup() {
|
||||||
return;
|
Some(s) => s,
|
||||||
}
|
None => return,
|
||||||
|
};
|
||||||
let p = project()
|
let p = project()
|
||||||
.file(
|
.file(
|
||||||
"src/lib.rs",
|
"src/lib.rs",
|
||||||
r#"
|
r#"
|
||||||
/// Doc
|
/// Doc
|
||||||
/// ```
|
/// ```
|
||||||
/// assert_eq!(1, 1);
|
/// std::custom_api();
|
||||||
/// ```
|
/// ```
|
||||||
pub fn f() {}
|
pub fn f() {}
|
||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
p.cargo("test --doc -v").build_std().target_host().run();
|
p.cargo("test --doc -v")
|
||||||
|
.build_std(&setup)
|
||||||
|
.with_stdout_contains("test src/lib.rs - f [..] ... ok")
|
||||||
|
.target_host()
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cargo_test]
|
||||||
|
fn no_implicit_alloc() {
|
||||||
|
// Demonstrate that alloc is not implicitly in scope.
|
||||||
|
let setup = match setup() {
|
||||||
|
Some(s) => s,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
let p = project()
|
||||||
|
.file(
|
||||||
|
"src/lib.rs",
|
||||||
|
r#"
|
||||||
|
pub fn f() {
|
||||||
|
let _: Vec<i32> = alloc::vec::Vec::new();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
p.cargo("build -v")
|
||||||
|
.build_std(&setup)
|
||||||
|
.target_host()
|
||||||
|
.with_stderr_contains("[..]use of undeclared [..]`alloc`")
|
||||||
|
.with_status(101)
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cargo_test]
|
||||||
|
fn macro_expanded_shadow() {
|
||||||
|
// This tests a bug caused by the previous use of `--extern` to directly
|
||||||
|
// load sysroot crates. This necessitated the switch to `--sysroot` to
|
||||||
|
// retain existing behavior. See
|
||||||
|
// https://github.com/rust-lang/wg-cargo-std-aware/issues/40 for more
|
||||||
|
// detail.
|
||||||
|
let setup = match setup() {
|
||||||
|
Some(s) => s,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
let p = project()
|
||||||
|
.file(
|
||||||
|
"src/lib.rs",
|
||||||
|
r#"
|
||||||
|
macro_rules! a {
|
||||||
|
() => (extern crate std as alloc;)
|
||||||
|
}
|
||||||
|
a!();
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
p.cargo("build -v").build_std(&setup).target_host().run();
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use std::fs::File;
|
use std::fs::{self, File};
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
|
|
||||||
use cargo;
|
use cargo;
|
||||||
@ -3665,6 +3665,7 @@ fn test_dep_with_dev() {
|
|||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn cargo_test_doctest_xcompile_ignores() {
|
fn cargo_test_doctest_xcompile_ignores() {
|
||||||
if !is_nightly() {
|
if !is_nightly() {
|
||||||
|
// -Zdoctest-xcompile is unstable
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let p = project()
|
let p = project()
|
||||||
@ -3686,18 +3687,14 @@ fn cargo_test_doctest_xcompile_ignores() {
|
|||||||
#[cfg(not(target_arch = "x86_64"))]
|
#[cfg(not(target_arch = "x86_64"))]
|
||||||
p.cargo("test")
|
p.cargo("test")
|
||||||
.with_stdout_contains(
|
.with_stdout_contains(
|
||||||
"\
|
"test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out",
|
||||||
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out\
|
|
||||||
",
|
|
||||||
)
|
)
|
||||||
.run();
|
.run();
|
||||||
#[cfg(target_arch = "x86_64")]
|
#[cfg(target_arch = "x86_64")]
|
||||||
p.cargo("test")
|
p.cargo("test")
|
||||||
.with_status(101)
|
.with_status(101)
|
||||||
.with_stdout_contains(
|
.with_stdout_contains(
|
||||||
"\
|
"test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured; 0 filtered out",
|
||||||
test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured; 0 filtered out\
|
|
||||||
",
|
|
||||||
)
|
)
|
||||||
.run();
|
.run();
|
||||||
|
|
||||||
@ -3705,9 +3702,7 @@ fn cargo_test_doctest_xcompile_ignores() {
|
|||||||
p.cargo("test -Zdoctest-xcompile")
|
p.cargo("test -Zdoctest-xcompile")
|
||||||
.masquerade_as_nightly_cargo()
|
.masquerade_as_nightly_cargo()
|
||||||
.with_stdout_contains(
|
.with_stdout_contains(
|
||||||
"\
|
"test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out",
|
||||||
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out\
|
|
||||||
",
|
|
||||||
)
|
)
|
||||||
.run();
|
.run();
|
||||||
|
|
||||||
@ -3715,16 +3710,18 @@ fn cargo_test_doctest_xcompile_ignores() {
|
|||||||
p.cargo("test -Zdoctest-xcompile")
|
p.cargo("test -Zdoctest-xcompile")
|
||||||
.masquerade_as_nightly_cargo()
|
.masquerade_as_nightly_cargo()
|
||||||
.with_stdout_contains(
|
.with_stdout_contains(
|
||||||
"\
|
"test result: ok. 0 passed; 0 failed; 1 ignored; 0 measured; 0 filtered out",
|
||||||
test result: ok. 0 passed; 0 failed; 1 ignored; 0 measured; 0 filtered out\
|
|
||||||
",
|
|
||||||
)
|
)
|
||||||
.run();
|
.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn cargo_test_doctest_xcompile() {
|
fn cargo_test_doctest_xcompile() {
|
||||||
|
if cross_compile::disabled() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
if !is_nightly() {
|
if !is_nightly() {
|
||||||
|
// -Zdoctest-xcompile is unstable
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let p = project()
|
let p = project()
|
||||||
@ -3745,11 +3742,7 @@ fn cargo_test_doctest_xcompile() {
|
|||||||
|
|
||||||
p.cargo("build").run();
|
p.cargo("build").run();
|
||||||
p.cargo(&format!("test --target {}", cross_compile::alternate()))
|
p.cargo(&format!("test --target {}", cross_compile::alternate()))
|
||||||
.with_stdout_contains(
|
.with_stdout_contains("running 0 tests")
|
||||||
"\
|
|
||||||
running 0 tests\
|
|
||||||
",
|
|
||||||
)
|
|
||||||
.run();
|
.run();
|
||||||
p.cargo(&format!(
|
p.cargo(&format!(
|
||||||
"test --target {} -Zdoctest-xcompile",
|
"test --target {} -Zdoctest-xcompile",
|
||||||
@ -3757,17 +3750,18 @@ fn cargo_test_doctest_xcompile() {
|
|||||||
))
|
))
|
||||||
.masquerade_as_nightly_cargo()
|
.masquerade_as_nightly_cargo()
|
||||||
.with_stdout_contains(
|
.with_stdout_contains(
|
||||||
"\
|
"test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out",
|
||||||
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out\
|
|
||||||
",
|
|
||||||
)
|
)
|
||||||
.run();
|
.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn cargo_test_doctest_xcompile_runner() {
|
fn cargo_test_doctest_xcompile_runner() {
|
||||||
use std::fs;
|
if cross_compile::disabled() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
if !is_nightly() {
|
if !is_nightly() {
|
||||||
|
// -Zdoctest-xcompile is unstable
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3824,11 +3818,7 @@ runner = "{}"
|
|||||||
|
|
||||||
p.cargo("build").run();
|
p.cargo("build").run();
|
||||||
p.cargo(&format!("test --target {}", cross_compile::alternate()))
|
p.cargo(&format!("test --target {}", cross_compile::alternate()))
|
||||||
.with_stdout_contains(
|
.with_stdout_contains("running 0 tests")
|
||||||
"\
|
|
||||||
running 0 tests\
|
|
||||||
",
|
|
||||||
)
|
|
||||||
.run();
|
.run();
|
||||||
p.cargo(&format!(
|
p.cargo(&format!(
|
||||||
"test --target {} -Zdoctest-xcompile",
|
"test --target {} -Zdoctest-xcompile",
|
||||||
@ -3836,21 +3826,19 @@ runner = "{}"
|
|||||||
))
|
))
|
||||||
.masquerade_as_nightly_cargo()
|
.masquerade_as_nightly_cargo()
|
||||||
.with_stdout_contains(
|
.with_stdout_contains(
|
||||||
"\
|
"test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out",
|
||||||
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out\
|
|
||||||
",
|
|
||||||
)
|
|
||||||
.with_stderr_contains(
|
|
||||||
"\
|
|
||||||
this is a runner\
|
|
||||||
",
|
|
||||||
)
|
)
|
||||||
|
.with_stderr_contains("this is a runner")
|
||||||
.run();
|
.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cargo_test]
|
#[cargo_test]
|
||||||
fn cargo_test_doctest_xcompile_no_runner() {
|
fn cargo_test_doctest_xcompile_no_runner() {
|
||||||
|
if cross_compile::disabled() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
if !is_nightly() {
|
if !is_nightly() {
|
||||||
|
// -Zdoctest-xcompile is unstable
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3872,11 +3860,7 @@ fn cargo_test_doctest_xcompile_no_runner() {
|
|||||||
|
|
||||||
p.cargo("build").run();
|
p.cargo("build").run();
|
||||||
p.cargo(&format!("test --target {}", cross_compile::alternate()))
|
p.cargo(&format!("test --target {}", cross_compile::alternate()))
|
||||||
.with_stdout_contains(
|
.with_stdout_contains("running 0 tests")
|
||||||
"\
|
|
||||||
running 0 tests\
|
|
||||||
",
|
|
||||||
)
|
|
||||||
.run();
|
.run();
|
||||||
p.cargo(&format!(
|
p.cargo(&format!(
|
||||||
"test --target {} -Zdoctest-xcompile",
|
"test --target {} -Zdoctest-xcompile",
|
||||||
@ -3884,9 +3868,7 @@ fn cargo_test_doctest_xcompile_no_runner() {
|
|||||||
))
|
))
|
||||||
.masquerade_as_nightly_cargo()
|
.masquerade_as_nightly_cargo()
|
||||||
.with_stdout_contains(
|
.with_stdout_contains(
|
||||||
"\
|
"test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out",
|
||||||
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out\
|
|
||||||
",
|
|
||||||
)
|
)
|
||||||
.run();
|
.run();
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user