mirror of
https://github.com/rust-lang/cargo.git
synced 2025-10-01 11:30:39 +00:00
auto merge of #157 : rust-lang/cargo/graph-resolve, r=alexcrichton
This incorporates previous bugfixes by @alexcrichton that are still stuck in bors.
This commit is contained in:
commit
0355cb6584
4
Makefile
4
Makefile
@ -7,9 +7,11 @@ ifeq ($(wildcard rustc/bin),)
|
|||||||
export RUSTC := rustc
|
export RUSTC := rustc
|
||||||
else
|
else
|
||||||
export RUSTC := $(CURDIR)/rustc/bin/rustc
|
export RUSTC := $(CURDIR)/rustc/bin/rustc
|
||||||
|
export LD_LIBRARY_PATH := $(CURDIR)/rustc/lib:$(LD_LIBRARY_PATH)
|
||||||
|
export DYLD_LIBRARY_PATH := $(CURDIR)/rustc/lib:$(DYLD_LIBRARY_PATH)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
export PATH := $(PATH):$(CURDIR)/rustc/bin
|
export PATH := $(CURDIR)/rustc/bin:$(PATH)
|
||||||
|
|
||||||
# Link flags to pull in dependencies
|
# Link flags to pull in dependencies
|
||||||
BINS = cargo \
|
BINS = cargo \
|
||||||
|
@ -16,9 +16,9 @@ $ ./cargo-nightly/bin/cargo build
|
|||||||
|
|
||||||
The current nightlies available are:
|
The current nightlies available are:
|
||||||
|
|
||||||
* `cargo-nightly-linux`
|
* [`cargo-nightly-linux`](http://static.rust-lang.org/cargo-dist/cargo-nightly-linux.tar.gz)
|
||||||
* `cargo-nightly-win`
|
* [`cargo-nightly-win`](http://static.rust-lang.org/cargo-dist/cargo-nightly-win.tar.gz)
|
||||||
* `cargo-nightly-mac`
|
* [`cargo-nightly-mac`](http://static.rust-lang.org/cargo-dist/cargo-nightly-mac.tar.gz)
|
||||||
|
|
||||||
## Compiling cargo
|
## Compiling cargo
|
||||||
|
|
||||||
|
@ -1 +1 @@
|
|||||||
Subproject commit a3844d6e0c6b84934078b7ee0e6e702c59cc5242
|
Subproject commit 60b649957b556c934929b7b6205ec95e20a2cd9e
|
@ -1 +1 @@
|
|||||||
Subproject commit bbb7848676698ec94d186e2c910ab82452d07433
|
Subproject commit fa1255d4ba109f82d1b0e8be61dde4ea70047be9
|
@ -1 +1 @@
|
|||||||
Subproject commit 624d5398184ccd500c3ce02338006f32f380fcc9
|
Subproject commit a0f1ea65fc80379f0c6c095d92fad840004aaa56
|
@ -22,11 +22,12 @@ use cargo::util::important_paths::find_project_manifest;
|
|||||||
struct Options {
|
struct Options {
|
||||||
manifest_path: Option<String>,
|
manifest_path: Option<String>,
|
||||||
jobs: Option<uint>,
|
jobs: Option<uint>,
|
||||||
|
update: bool,
|
||||||
rest: Vec<String>,
|
rest: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
hammer_config!(Options "Run the package's test suite", |c| {
|
hammer_config!(Options "Run the package's test suite", |c| {
|
||||||
c.short("jobs", 'j')
|
c.short("jobs", 'j').short("update", 'u')
|
||||||
})
|
})
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
@ -45,7 +46,7 @@ fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let compile_opts = ops::CompileOptions {
|
let compile_opts = ops::CompileOptions {
|
||||||
update: false,
|
update: options.update,
|
||||||
env: "test",
|
env: "test",
|
||||||
shell: shell,
|
shell: shell,
|
||||||
jobs: options.jobs
|
jobs: options.jobs
|
||||||
@ -64,7 +65,7 @@ fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
|
|||||||
for file in walk {
|
for file in walk {
|
||||||
// TODO: The proper fix is to have target knows its expected
|
// TODO: The proper fix is to have target knows its expected
|
||||||
// output and only run expected executables.
|
// output and only run expected executables.
|
||||||
if file.display().to_str().as_slice().contains("dSYM") { continue; }
|
if file.display().to_string().as_slice().contains("dSYM") { continue; }
|
||||||
if !is_executable(&file) { continue; }
|
if !is_executable(&file) { continue; }
|
||||||
|
|
||||||
try!(util::process(file).exec().map_err(|e| {
|
try!(util::process(file).exec().map_err(|e| {
|
||||||
|
@ -34,7 +34,7 @@ fn main() {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
let file = Path::new(manifest);
|
let file = Path::new(manifest);
|
||||||
let contents = match File::open(&file).read_to_str() {
|
let contents = match File::open(&file).read_to_string() {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(e) => return fail("invalid", format!("error reading file: {}",
|
Err(e) => return fail("invalid", format!("error reading file: {}",
|
||||||
e).as_slice())
|
e).as_slice())
|
||||||
|
@ -81,7 +81,7 @@ fn execute() {
|
|||||||
|
|
||||||
fn process(args: Vec<String>) -> (String, Vec<String>) {
|
fn process(args: Vec<String>) -> (String, Vec<String>) {
|
||||||
let mut args = Vec::from_slice(args.tail());
|
let mut args = Vec::from_slice(args.tail());
|
||||||
let head = args.shift().unwrap_or("--help".to_str());
|
let head = args.shift().unwrap_or("--help".to_string());
|
||||||
|
|
||||||
(head, args)
|
(head, args)
|
||||||
}
|
}
|
||||||
@ -156,5 +156,5 @@ fn locate_project(_: NoFlags, _: &mut MultiShell) -> CliResult<Option<ProjectLoc
|
|||||||
not representable in Unicode"))
|
not representable in Unicode"))
|
||||||
.map_err(|e| CliError::from_boxed(e, 1)));
|
.map_err(|e| CliError::from_boxed(e, 1)));
|
||||||
|
|
||||||
Ok(Some(ProjectLocation { root: string.to_str() }))
|
Ok(Some(ProjectLocation { root: string.to_string() }))
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@ impl Dependency {
|
|||||||
};
|
};
|
||||||
|
|
||||||
Ok(Dependency {
|
Ok(Dependency {
|
||||||
name: name.to_str(),
|
name: name.to_string(),
|
||||||
namespace: namespace.clone(),
|
namespace: namespace.clone(),
|
||||||
req: version,
|
req: version,
|
||||||
transitive: true
|
transitive: true
|
||||||
@ -67,8 +67,8 @@ pub struct SerializedDependency {
|
|||||||
impl SerializedDependency {
|
impl SerializedDependency {
|
||||||
pub fn from_dependency(dep: &Dependency) -> SerializedDependency {
|
pub fn from_dependency(dep: &Dependency) -> SerializedDependency {
|
||||||
SerializedDependency {
|
SerializedDependency {
|
||||||
name: dep.get_name().to_str(),
|
name: dep.get_name().to_string(),
|
||||||
req: dep.get_version_req().to_str()
|
req: dep.get_version_req().to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -32,8 +32,8 @@ pub struct CLIError {
|
|||||||
impl CLIError {
|
impl CLIError {
|
||||||
pub fn new<T: Show, U: Show>(msg: T, detail: Option<U>,
|
pub fn new<T: Show, U: Show>(msg: T, detail: Option<U>,
|
||||||
exit_code: uint) -> CLIError {
|
exit_code: uint) -> CLIError {
|
||||||
let detail = detail.map(|d| d.to_str());
|
let detail = detail.map(|d| d.to_string());
|
||||||
CLIError { msg: msg.to_str(), detail: detail, exit_code: exit_code }
|
CLIError { msg: msg.to_string(), detail: detail, exit_code: exit_code }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ impl CargoError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn described<T: Show>(description: T) -> CargoError {
|
pub fn described<T: Show>(description: T) -> CargoError {
|
||||||
CargoInternalError(Described(description.to_str()))
|
CargoInternalError(Described(description.to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn other() -> CargoError {
|
pub fn other() -> CargoError {
|
||||||
|
@ -47,14 +47,14 @@ pub struct SerializedManifest {
|
|||||||
impl<E, S: Encoder<E>> Encodable<S, E> for Manifest {
|
impl<E, S: Encoder<E>> Encodable<S, E> for Manifest {
|
||||||
fn encode(&self, s: &mut S) -> Result<(), E> {
|
fn encode(&self, s: &mut S) -> Result<(), E> {
|
||||||
SerializedManifest {
|
SerializedManifest {
|
||||||
name: self.summary.get_name().to_str(),
|
name: self.summary.get_name().to_string(),
|
||||||
version: self.summary.get_version().to_str(),
|
version: self.summary.get_version().to_string(),
|
||||||
dependencies: self.summary.get_dependencies().iter().map(|d| {
|
dependencies: self.summary.get_dependencies().iter().map(|d| {
|
||||||
SerializedDependency::from_dependency(d)
|
SerializedDependency::from_dependency(d)
|
||||||
}).collect(),
|
}).collect(),
|
||||||
authors: self.authors.clone(),
|
authors: self.authors.clone(),
|
||||||
targets: self.targets.clone(),
|
targets: self.targets.clone(),
|
||||||
target_dir: self.target_dir.display().to_str(),
|
target_dir: self.target_dir.display().to_string(),
|
||||||
build: if self.build.len() == 0 { None } else { Some(self.build.clone()) },
|
build: if self.build.len() == 0 { None } else { Some(self.build.clone()) },
|
||||||
}.encode(s)
|
}.encode(s)
|
||||||
}
|
}
|
||||||
@ -100,7 +100,7 @@ pub enum TargetKind {
|
|||||||
BinTarget
|
BinTarget
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deriving(Encodable, Decodable, Clone, Hash, PartialEq)]
|
#[deriving(Encodable, Decodable, Clone, Hash, PartialEq, Show)]
|
||||||
pub struct Profile {
|
pub struct Profile {
|
||||||
env: String, // compile, test, dev, bench, etc.
|
env: String, // compile, test, dev, bench, etc.
|
||||||
opt_level: uint,
|
opt_level: uint,
|
||||||
@ -112,7 +112,7 @@ pub struct Profile {
|
|||||||
impl Profile {
|
impl Profile {
|
||||||
pub fn default_dev() -> Profile {
|
pub fn default_dev() -> Profile {
|
||||||
Profile {
|
Profile {
|
||||||
env: "compile".to_str(), // run in the default environment only
|
env: "compile".to_string(), // run in the default environment only
|
||||||
opt_level: 0,
|
opt_level: 0,
|
||||||
debug: true,
|
debug: true,
|
||||||
test: false, // whether or not to pass --test
|
test: false, // whether or not to pass --test
|
||||||
@ -122,31 +122,31 @@ impl Profile {
|
|||||||
|
|
||||||
pub fn default_test() -> Profile {
|
pub fn default_test() -> Profile {
|
||||||
Profile {
|
Profile {
|
||||||
env: "test".to_str(), // run in the default environment only
|
env: "test".to_string(), // run in the default environment only
|
||||||
opt_level: 0,
|
opt_level: 0,
|
||||||
debug: true,
|
debug: true,
|
||||||
test: true, // whether or not to pass --test
|
test: true, // whether or not to pass --test
|
||||||
dest: Some("test".to_str())
|
dest: Some("test".to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default_bench() -> Profile {
|
pub fn default_bench() -> Profile {
|
||||||
Profile {
|
Profile {
|
||||||
env: "bench".to_str(), // run in the default environment only
|
env: "bench".to_string(), // run in the default environment only
|
||||||
opt_level: 3,
|
opt_level: 3,
|
||||||
debug: false,
|
debug: false,
|
||||||
test: true, // whether or not to pass --test
|
test: true, // whether or not to pass --test
|
||||||
dest: Some("bench".to_str())
|
dest: Some("bench".to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default_release() -> Profile {
|
pub fn default_release() -> Profile {
|
||||||
Profile {
|
Profile {
|
||||||
env: "release".to_str(), // run in the default environment only
|
env: "release".to_string(), // run in the default environment only
|
||||||
opt_level: 3,
|
opt_level: 3,
|
||||||
debug: false,
|
debug: false,
|
||||||
test: false, // whether or not to pass --test
|
test: false, // whether or not to pass --test
|
||||||
dest: Some("release".to_str())
|
dest: Some("release".to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -218,7 +218,7 @@ impl<E, S: Encoder<E>> Encodable<S, E> for Target {
|
|||||||
SerializedTarget {
|
SerializedTarget {
|
||||||
kind: kind,
|
kind: kind,
|
||||||
name: self.name.clone(),
|
name: self.name.clone(),
|
||||||
src_path: self.src_path.display().to_str(),
|
src_path: self.src_path.display().to_string(),
|
||||||
profile: self.profile.clone(),
|
profile: self.profile.clone(),
|
||||||
metadata: self.metadata.clone()
|
metadata: self.metadata.clone()
|
||||||
}.encode(s)
|
}.encode(s)
|
||||||
@ -227,8 +227,8 @@ impl<E, S: Encoder<E>> Encodable<S, E> for Target {
|
|||||||
|
|
||||||
impl Show for Target {
|
impl Show for Target {
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
write!(f, "{}(name={}, path={})", self.kind, self.name,
|
write!(f, "{}(name={}, path={}, profile={})", self.kind, self.name,
|
||||||
self.src_path.display())
|
self.src_path.display(), self.profile)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -298,6 +298,13 @@ impl Manifest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Target {
|
impl Target {
|
||||||
|
pub fn file_stem(&self) -> String {
|
||||||
|
match self.metadata {
|
||||||
|
Some(ref metadata) => format!("{}{}", self.name, metadata.extra_filename),
|
||||||
|
None => self.name.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn lib_target(name: &str, crate_targets: Vec<LibKind>,
|
pub fn lib_target(name: &str, crate_targets: Vec<LibKind>,
|
||||||
src_path: &Path, profile: &Profile,
|
src_path: &Path, profile: &Profile,
|
||||||
metadata: &Metadata)
|
metadata: &Metadata)
|
||||||
@ -305,7 +312,7 @@ impl Target {
|
|||||||
{
|
{
|
||||||
Target {
|
Target {
|
||||||
kind: LibTarget(crate_targets),
|
kind: LibTarget(crate_targets),
|
||||||
name: name.to_str(),
|
name: name.to_string(),
|
||||||
src_path: src_path.clone(),
|
src_path: src_path.clone(),
|
||||||
profile: profile.clone(),
|
profile: profile.clone(),
|
||||||
metadata: Some(metadata.clone())
|
metadata: Some(metadata.clone())
|
||||||
@ -315,7 +322,7 @@ impl Target {
|
|||||||
pub fn bin_target(name: &str, src_path: &Path, profile: &Profile) -> Target {
|
pub fn bin_target(name: &str, src_path: &Path, profile: &Profile) -> Target {
|
||||||
Target {
|
Target {
|
||||||
kind: BinTarget,
|
kind: BinTarget,
|
||||||
name: name.to_str(),
|
name: name.to_string(),
|
||||||
src_path: src_path.clone(),
|
src_path: src_path.clone(),
|
||||||
profile: profile.clone(),
|
profile: profile.clone(),
|
||||||
metadata: None
|
metadata: None
|
||||||
@ -337,6 +344,21 @@ impl Target {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_dylib(&self) -> bool {
|
||||||
|
match self.kind {
|
||||||
|
LibTarget(ref kinds) => kinds.iter().any(|&k| k == Dylib),
|
||||||
|
_ => false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_rlib(&self) -> bool {
|
||||||
|
match self.kind {
|
||||||
|
LibTarget(ref kinds) =>
|
||||||
|
kinds.iter().any(|&k| k == Rlib || k == Lib),
|
||||||
|
_ => false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn is_bin(&self) -> bool {
|
pub fn is_bin(&self) -> bool {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
BinTarget => true,
|
BinTarget => true,
|
||||||
|
@ -42,6 +42,7 @@ pub use self::dependency::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
pub use self::version_req::VersionReq;
|
pub use self::version_req::VersionReq;
|
||||||
|
pub use self::resolver::Resolve;
|
||||||
|
|
||||||
pub mod errors;
|
pub mod errors;
|
||||||
pub mod source;
|
pub mod source;
|
||||||
|
@ -45,14 +45,14 @@ impl<E, S: Encoder<E>> Encodable<S, E> for Package {
|
|||||||
let package_id = summary.get_package_id();
|
let package_id = summary.get_package_id();
|
||||||
|
|
||||||
SerializedPackage {
|
SerializedPackage {
|
||||||
name: package_id.get_name().to_str(),
|
name: package_id.get_name().to_string(),
|
||||||
version: package_id.get_version().to_str(),
|
version: package_id.get_version().to_string(),
|
||||||
dependencies: summary.get_dependencies().iter().map(|d| {
|
dependencies: summary.get_dependencies().iter().map(|d| {
|
||||||
SerializedDependency::from_dependency(d)
|
SerializedDependency::from_dependency(d)
|
||||||
}).collect(),
|
}).collect(),
|
||||||
authors: Vec::from_slice(manifest.get_authors()),
|
authors: Vec::from_slice(manifest.get_authors()),
|
||||||
targets: Vec::from_slice(manifest.get_targets()),
|
targets: Vec::from_slice(manifest.get_targets()),
|
||||||
manifest_path: self.manifest_path.display().to_str()
|
manifest_path: self.manifest_path.display().to_string()
|
||||||
}.encode(s)
|
}.encode(s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -123,7 +123,7 @@ impl Package {
|
|||||||
// Sort the sources just to make sure we have a consistent fingerprint.
|
// Sort the sources just to make sure we have a consistent fingerprint.
|
||||||
sources.sort_by(|a, b| {
|
sources.sort_by(|a, b| {
|
||||||
cmp::lexical_ordering(a.kind.cmp(&b.kind),
|
cmp::lexical_ordering(a.kind.cmp(&b.kind),
|
||||||
a.location.to_str().cmp(&b.location.to_str()))
|
a.location.to_string().cmp(&b.location.to_string()))
|
||||||
});
|
});
|
||||||
let sources = sources.iter().map(|source_id| {
|
let sources = sources.iter().map(|source_id| {
|
||||||
source_id.load(config)
|
source_id.load(config)
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
use semver;
|
use semver;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
use std::hash::Hash;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::fmt::{Show,Formatter};
|
use std::fmt::{Show,Formatter};
|
||||||
|
use collections::hash;
|
||||||
use serialize::{
|
use serialize::{
|
||||||
Encodable,
|
Encodable,
|
||||||
Encoder,
|
Encoder,
|
||||||
@ -60,6 +62,16 @@ pub struct PackageId {
|
|||||||
source_id: SourceId,
|
source_id: SourceId,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<S: hash::Writer> Hash<S> for PackageId {
|
||||||
|
fn hash(&self, state: &mut S) {
|
||||||
|
self.name.hash(state);
|
||||||
|
self.version.to_string().hash(state);
|
||||||
|
self.source_id.hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for PackageId {}
|
||||||
|
|
||||||
#[deriving(Clone, Show, PartialEq)]
|
#[deriving(Clone, Show, PartialEq)]
|
||||||
pub enum PackageIdError {
|
pub enum PackageIdError {
|
||||||
InvalidVersion(String),
|
InvalidVersion(String),
|
||||||
@ -87,7 +99,7 @@ impl PackageId {
|
|||||||
sid: &SourceId) -> CargoResult<PackageId> {
|
sid: &SourceId) -> CargoResult<PackageId> {
|
||||||
let v = try!(version.to_version().map_err(InvalidVersion));
|
let v = try!(version.to_version().map_err(InvalidVersion));
|
||||||
Ok(PackageId {
|
Ok(PackageId {
|
||||||
name: name.to_str(),
|
name: name.to_string(),
|
||||||
version: v,
|
version: v,
|
||||||
source_id: sid.clone()
|
source_id: sid.clone()
|
||||||
})
|
})
|
||||||
@ -108,9 +120,9 @@ impl PackageId {
|
|||||||
pub fn generate_metadata(&self) -> Metadata {
|
pub fn generate_metadata(&self) -> Metadata {
|
||||||
let metadata = format!("{}:-:{}:-:{}", self.name, self.version, self.source_id);
|
let metadata = format!("{}:-:{}:-:{}", self.name, self.version, self.source_id);
|
||||||
let extra_filename = short_hash(
|
let extra_filename = short_hash(
|
||||||
&(self.name.as_slice(), self.version.to_str(), &self.source_id));
|
&(self.name.as_slice(), self.version.to_string(), &self.source_id));
|
||||||
|
|
||||||
Metadata { metadata: metadata, extra_filename: extra_filename }
|
Metadata { metadata: metadata, extra_filename: format!("-{}", extra_filename) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -120,7 +132,7 @@ impl Show for PackageId {
|
|||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
try!(write!(f, "{} v{}", self.name, self.version));
|
try!(write!(f, "{} v{}", self.name, self.version));
|
||||||
|
|
||||||
if self.source_id.to_str().as_slice() != central_repo {
|
if self.source_id.to_string().as_slice() != central_repo {
|
||||||
try!(write!(f, " ({})", self.source_id));
|
try!(write!(f, " ({})", self.source_id));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -141,7 +153,7 @@ impl<D: Decoder<Box<CargoError + Send>>>
|
|||||||
|
|
||||||
impl<E, S: Encoder<E>> Encodable<S,E> for PackageId {
|
impl<E, S: Encoder<E>> Encodable<S,E> for PackageId {
|
||||||
fn encode(&self, e: &mut S) -> Result<(), E> {
|
fn encode(&self, e: &mut S) -> Result<(), E> {
|
||||||
(self.name.clone(), self.version.to_str(), self.source_id.clone()).encode(e)
|
(self.name.clone(), self.version.to_string(), self.source_id.clone()).encode(e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,68 +1,136 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use util::graph::{Nodes,Edges};
|
||||||
|
|
||||||
use core::{
|
use core::{
|
||||||
Dependency,
|
Dependency,
|
||||||
PackageId,
|
PackageId,
|
||||||
Summary,
|
|
||||||
Registry,
|
Registry,
|
||||||
|
SourceId,
|
||||||
};
|
};
|
||||||
|
|
||||||
use util::{CargoResult, human, internal};
|
use semver;
|
||||||
|
|
||||||
/* TODO:
|
use util::{CargoResult, Graph, human, internal};
|
||||||
* - The correct input here is not a registry. Resolves should be performable
|
|
||||||
* on package summaries vs. the packages themselves.
|
pub struct Resolve {
|
||||||
*/
|
graph: Graph<PackageId>
|
||||||
pub fn resolve<R: Registry>(deps: &[Dependency],
|
}
|
||||||
registry: &mut R) -> CargoResult<Vec<PackageId>> {
|
|
||||||
|
impl Resolve {
|
||||||
|
fn new() -> Resolve {
|
||||||
|
Resolve { graph: Graph::new() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn iter<'a>(&'a self) -> Nodes<'a, PackageId> {
|
||||||
|
self.graph.iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deps<'a>(&'a self, pkg: &PackageId) -> Option<Edges<'a, PackageId>> {
|
||||||
|
self.graph.edges(pkg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Context<'a, R> {
|
||||||
|
registry: &'a mut R,
|
||||||
|
resolve: Resolve,
|
||||||
|
|
||||||
|
// Eventually, we will have smarter logic for checking for conflicts in the resolve,
|
||||||
|
// but without the registry, conflicts should not exist in practice, so this is just
|
||||||
|
// a sanity check.
|
||||||
|
seen: HashMap<(String, SourceId), semver::Version>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, R: Registry> Context<'a, R> {
|
||||||
|
fn new(registry: &'a mut R) -> Context<'a, R> {
|
||||||
|
Context {
|
||||||
|
registry: registry,
|
||||||
|
resolve: Resolve::new(),
|
||||||
|
seen: HashMap::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve<R: Registry>(root: &PackageId, deps: &[Dependency], registry: &mut R)
|
||||||
|
-> CargoResult<Resolve>
|
||||||
|
{
|
||||||
log!(5, "resolve; deps={}", deps);
|
log!(5, "resolve; deps={}", deps);
|
||||||
|
|
||||||
let mut remaining = Vec::from_slice(deps);
|
let mut context = Context::new(registry);
|
||||||
let mut resolve = HashMap::<String, Summary>::new();
|
try!(resolve_deps(root, deps, &mut context));
|
||||||
|
Ok(context.resolve)
|
||||||
|
}
|
||||||
|
|
||||||
loop {
|
fn resolve_deps<'a, R: Registry>(parent: &PackageId,
|
||||||
let curr = match remaining.pop() {
|
deps: &[Dependency],
|
||||||
Some(curr) => curr,
|
ctx: &mut Context<'a, R>)
|
||||||
None => {
|
-> CargoResult<()>
|
||||||
let ret = resolve.values().map(|summary| {
|
{
|
||||||
summary.get_package_id().clone()
|
if deps.is_empty() {
|
||||||
}).collect();
|
return Ok(());
|
||||||
log!(5, "resolve complete; ret={}", ret);
|
}
|
||||||
return Ok(ret);
|
|
||||||
|
for dep in deps.iter() {
|
||||||
|
let pkgs = try!(ctx.registry.query(dep));
|
||||||
|
|
||||||
|
if pkgs.is_empty() {
|
||||||
|
return Err(human(format!("No package named {} found", dep)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkgs.len() > 1 {
|
||||||
|
return Err(internal(format!("At the moment, Cargo only supports a \
|
||||||
|
single source for a particular package name ({}).", dep)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let summary = pkgs.get(0).clone();
|
||||||
|
let name = summary.get_name().to_string();
|
||||||
|
let source_id = summary.get_source_id().clone();
|
||||||
|
let version = summary.get_version().clone();
|
||||||
|
|
||||||
|
ctx.resolve.graph.link(parent.clone(), summary.get_package_id().clone());
|
||||||
|
|
||||||
|
let found = {
|
||||||
|
let found = ctx.seen.find(&(name.clone(), source_id.clone()));
|
||||||
|
|
||||||
|
if found.is_some() {
|
||||||
|
if found == Some(&version) { continue; }
|
||||||
|
return Err(human(format!("Cargo found multiple copies of {} in {}. This \
|
||||||
|
is not currently supported",
|
||||||
|
summary.get_name(), summary.get_source_id())));
|
||||||
|
} else {
|
||||||
|
false
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let opts = try!(registry.query(&curr));
|
if !found {
|
||||||
|
ctx.seen.insert((name, source_id), version);
|
||||||
if opts.len() == 0 {
|
|
||||||
return Err(human(format!("No package named {} found", curr.get_name())));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.len() > 1 {
|
ctx.resolve.graph.add(summary.get_package_id().clone(), []);
|
||||||
return Err(internal(format!("At the moment, Cargo only supports a \
|
|
||||||
single source for a particular package name ({}).", curr.get_name())));
|
|
||||||
}
|
|
||||||
|
|
||||||
let pkg = opts.get(0).clone();
|
let deps: Vec<Dependency> = summary.get_dependencies().iter()
|
||||||
resolve.insert(pkg.get_name().to_str(), pkg.clone());
|
.filter(|d| d.is_transitive())
|
||||||
|
.map(|d| d.clone())
|
||||||
|
.collect();
|
||||||
|
|
||||||
for dep in pkg.get_dependencies().iter() {
|
try!(resolve_deps(summary.get_package_id(), deps.as_slice(), ctx));
|
||||||
if !dep.is_transitive() { continue; }
|
|
||||||
|
|
||||||
if !resolve.contains_key_equiv(&dep.get_name()) {
|
|
||||||
remaining.push(dep.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use hamcrest::{assert_that, equal_to, contains};
|
use hamcrest::{assert_that, equal_to, contains};
|
||||||
|
|
||||||
use core::source::{SourceId, RegistryKind, Location, Remote};
|
use core::source::{SourceId, RegistryKind, GitKind, Location, Remote};
|
||||||
use core::{Dependency, PackageId, Summary};
|
use core::{Dependency, PackageId, Summary, Registry};
|
||||||
use super::resolve;
|
use util::CargoResult;
|
||||||
|
|
||||||
|
fn resolve<R: Registry>(pkg: &PackageId, deps: &[Dependency], registry: &mut R)
|
||||||
|
-> CargoResult<Vec<PackageId>>
|
||||||
|
{
|
||||||
|
Ok(try!(super::resolve(pkg, deps, registry)).iter().map(|p| p.clone()).collect())
|
||||||
|
}
|
||||||
|
|
||||||
trait ToDep {
|
trait ToDep {
|
||||||
fn to_dep(self) -> Dependency;
|
fn to_dep(self) -> Dependency;
|
||||||
@ -102,8 +170,23 @@ mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn pkg(name: &str) -> Summary {
|
fn pkg(name: &str) -> Summary {
|
||||||
Summary::new(&PackageId::new(name, "1.0.0", ®istry_loc()).unwrap(),
|
Summary::new(&pkg_id(name), &[])
|
||||||
&[])
|
}
|
||||||
|
|
||||||
|
fn pkg_id(name: &str) -> PackageId {
|
||||||
|
PackageId::new(name, "1.0.0", ®istry_loc()).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pkg_id_loc(name: &str, loc: &str) -> PackageId {
|
||||||
|
let remote = Location::parse(loc);
|
||||||
|
let source_id = SourceId::new(GitKind("master".to_string()),
|
||||||
|
remote.unwrap());
|
||||||
|
|
||||||
|
PackageId::new(name, "1.0.0", &source_id).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pkg_loc(name: &str, loc: &str) -> Summary {
|
||||||
|
Summary::new(&pkg_id_loc(name, loc), &[])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dep(name: &str) -> Dependency {
|
fn dep(name: &str) -> Dependency {
|
||||||
@ -112,6 +195,12 @@ mod test {
|
|||||||
Dependency::parse(name, Some("1.0.0"), &source_id).unwrap()
|
Dependency::parse(name, Some("1.0.0"), &source_id).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn dep_loc(name: &str, location: &str) -> Dependency {
|
||||||
|
let url = from_str(location).unwrap();
|
||||||
|
let source_id = SourceId::new(GitKind("master".to_string()), Remote(url));
|
||||||
|
Dependency::parse(name, Some("1.0.0"), &source_id).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
fn registry(pkgs: Vec<Summary>) -> Vec<Summary> {
|
fn registry(pkgs: Vec<Summary>) -> Vec<Summary> {
|
||||||
pkgs
|
pkgs
|
||||||
}
|
}
|
||||||
@ -122,9 +211,14 @@ mod test {
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {
|
||||||
|
names.iter()
|
||||||
|
.map(|&(name, loc)| pkg_id_loc(name, loc)).collect()
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_resolving_empty_dependency_list() {
|
pub fn test_resolving_empty_dependency_list() {
|
||||||
let res = resolve([], &mut registry(vec!())).unwrap();
|
let res = resolve(&pkg_id("root"), [], &mut registry(vec!())).unwrap();
|
||||||
|
|
||||||
assert_that(&res, equal_to(&names([])));
|
assert_that(&res, equal_to(&names([])));
|
||||||
}
|
}
|
||||||
@ -132,41 +226,60 @@ mod test {
|
|||||||
#[test]
|
#[test]
|
||||||
pub fn test_resolving_only_package() {
|
pub fn test_resolving_only_package() {
|
||||||
let mut reg = registry(vec!(pkg("foo")));
|
let mut reg = registry(vec!(pkg("foo")));
|
||||||
let res = resolve([dep("foo")], &mut reg);
|
let res = resolve(&pkg_id("root"), [dep("foo")], &mut reg);
|
||||||
|
|
||||||
assert_that(&res.unwrap(), equal_to(&names(["foo"])));
|
assert_that(&res.unwrap(), contains(names(["root", "foo"])).exactly());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_resolving_one_dep() {
|
pub fn test_resolving_one_dep() {
|
||||||
let mut reg = registry(vec!(pkg("foo"), pkg("bar")));
|
let mut reg = registry(vec!(pkg("foo"), pkg("bar")));
|
||||||
let res = resolve([dep("foo")], &mut reg);
|
let res = resolve(&pkg_id("root"), [dep("foo")], &mut reg);
|
||||||
|
|
||||||
assert_that(&res.unwrap(), equal_to(&names(["foo"])));
|
assert_that(&res.unwrap(), contains(names(["root", "foo"])).exactly());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_resolving_multiple_deps() {
|
pub fn test_resolving_multiple_deps() {
|
||||||
let mut reg = registry(vec!(pkg!("foo"), pkg!("bar"), pkg!("baz")));
|
let mut reg = registry(vec!(pkg!("foo"), pkg!("bar"), pkg!("baz")));
|
||||||
let res = resolve([dep("foo"), dep("baz")], &mut reg).unwrap();
|
let res = resolve(&pkg_id("root"), [dep("foo"), dep("baz")], &mut reg).unwrap();
|
||||||
|
|
||||||
assert_that(&res, contains(names(["foo", "baz"])).exactly());
|
assert_that(&res, contains(names(["root", "foo", "baz"])).exactly());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_resolving_transitive_deps() {
|
pub fn test_resolving_transitive_deps() {
|
||||||
let mut reg = registry(vec!(pkg!("foo"), pkg!("bar" => "foo")));
|
let mut reg = registry(vec!(pkg!("foo"), pkg!("bar" => "foo")));
|
||||||
let res = resolve([dep("bar")], &mut reg).unwrap();
|
let res = resolve(&pkg_id("root"), [dep("bar")], &mut reg).unwrap();
|
||||||
|
|
||||||
assert_that(&res, contains(names(["foo", "bar"])));
|
assert_that(&res, contains(names(["root", "foo", "bar"])));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_resolving_common_transitive_deps() {
|
pub fn test_resolving_common_transitive_deps() {
|
||||||
let mut reg = registry(vec!(pkg!("foo" => "bar"), pkg!("bar")));
|
let mut reg = registry(vec!(pkg!("foo" => "bar"), pkg!("bar")));
|
||||||
let res = resolve([dep("foo"), dep("bar")], &mut reg).unwrap();
|
let res = resolve(&pkg_id("root"), [dep("foo"), dep("bar")], &mut reg).unwrap();
|
||||||
|
|
||||||
assert_that(&res, contains(names(["foo", "bar"])));
|
assert_that(&res, contains(names(["root", "foo", "bar"])));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_resolving_with_same_name() {
|
||||||
|
let list = vec!(pkg_loc("foo", "http://first.example.com"),
|
||||||
|
pkg_loc("foo", "http://second.example.com"));
|
||||||
|
|
||||||
|
let mut reg = registry(list);
|
||||||
|
let res = resolve(&pkg_id("root"),
|
||||||
|
[dep_loc("foo", "http://first.example.com"),
|
||||||
|
dep_loc("foo", "http://second.example.com")],
|
||||||
|
&mut reg);
|
||||||
|
|
||||||
|
let mut names = loc_names([("foo", "http://first.example.com"),
|
||||||
|
("foo", "http://second.example.com")]);
|
||||||
|
|
||||||
|
names.push(pkg_id("root"));
|
||||||
|
|
||||||
|
assert_that(&res.unwrap(), contains(names).exactly());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -178,8 +291,8 @@ mod test {
|
|||||||
pkg!("bat")
|
pkg!("bat")
|
||||||
));
|
));
|
||||||
|
|
||||||
let res = resolve([dep("foo"), dep("baz").as_dev()], &mut reg).unwrap();
|
let res = resolve(&pkg_id("root"), [dep("foo"), dep("baz").as_dev()], &mut reg).unwrap();
|
||||||
|
|
||||||
assert_that(&res, contains(names(["foo", "bar", "baz"])));
|
assert_that(&res, contains(names(["root", "foo", "bar", "baz"])));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -42,7 +42,7 @@ impl MultiShell {
|
|||||||
&mut self.err
|
&mut self.err
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn say<T: ToStr>(&mut self, message: T, color: Color) -> IoResult<()> {
|
pub fn say<T: ToString>(&mut self, message: T, color: Color) -> IoResult<()> {
|
||||||
self.out().say(message, color)
|
self.out().say(message, color)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -60,11 +60,11 @@ impl MultiShell {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn error<T: ToStr>(&mut self, message: T) -> IoResult<()> {
|
pub fn error<T: ToString>(&mut self, message: T) -> IoResult<()> {
|
||||||
self.err().say(message, RED)
|
self.err().say(message, RED)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn warn<T: ToStr>(&mut self, message: T) -> IoResult<()> {
|
pub fn warn<T: ToString>(&mut self, message: T) -> IoResult<()> {
|
||||||
self.err().say(message, YELLOW)
|
self.err().say(message, YELLOW)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -96,10 +96,10 @@ impl Shell {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn say<T: ToStr>(&mut self, message: T, color: Color) -> IoResult<()> {
|
pub fn say<T: ToString>(&mut self, message: T, color: Color) -> IoResult<()> {
|
||||||
try!(self.reset());
|
try!(self.reset());
|
||||||
if color != BLACK { try!(self.fg(color)); }
|
if color != BLACK { try!(self.fg(color)); }
|
||||||
try!(self.write_line(message.to_str().as_slice()));
|
try!(self.write_line(message.to_string().as_slice()));
|
||||||
try!(self.reset());
|
try!(self.reset());
|
||||||
try!(self.flush());
|
try!(self.flush());
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -73,7 +73,7 @@ impl<E, D: Decoder<E>> Decodable<D, E> for Location {
|
|||||||
|
|
||||||
impl<E, S: Encoder<E>> Encodable<S, E> for Location {
|
impl<E, S: Encoder<E>> Encodable<S, E> for Location {
|
||||||
fn encode(&self, e: &mut S) -> Result<(), E> {
|
fn encode(&self, e: &mut S) -> Result<(), E> {
|
||||||
self.to_str().encode(e)
|
self.to_string().encode(e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -137,8 +137,8 @@ impl PartialEq for SourceId {
|
|||||||
match (&self.kind, &other.kind, &self.location, &other.location) {
|
match (&self.kind, &other.kind, &self.location, &other.location) {
|
||||||
(&GitKind(..), &GitKind(..),
|
(&GitKind(..), &GitKind(..),
|
||||||
&Remote(ref u1), &Remote(ref u2)) => {
|
&Remote(ref u1), &Remote(ref u2)) => {
|
||||||
git::canonicalize_url(u1.to_str().as_slice()) ==
|
git::canonicalize_url(u1.to_string().as_slice()) ==
|
||||||
git::canonicalize_url(u2.to_str().as_slice())
|
git::canonicalize_url(u2.to_string().as_slice())
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
@ -156,7 +156,7 @@ impl SourceId {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn for_git(url: &Url, reference: &str) -> SourceId {
|
pub fn for_git(url: &Url, reference: &str) -> SourceId {
|
||||||
SourceId::new(GitKind(reference.to_str()), Remote(url.clone()))
|
SourceId::new(GitKind(reference.to_string()), Remote(url.clone()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn for_central() -> SourceId {
|
pub fn for_central() -> SourceId {
|
||||||
|
@ -48,7 +48,7 @@ pub trait SummaryVec {
|
|||||||
impl SummaryVec for Vec<Summary> {
|
impl SummaryVec for Vec<Summary> {
|
||||||
// TODO: Move to Registry
|
// TODO: Move to Registry
|
||||||
fn names(&self) -> Vec<String> {
|
fn names(&self) -> Vec<String> {
|
||||||
self.iter().map(|summary| summary.get_name().to_str()).collect()
|
self.iter().map(|summary| summary.get_name().to_string()).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -487,14 +487,14 @@ mod test {
|
|||||||
pub fn test_parsing_exact() {
|
pub fn test_parsing_exact() {
|
||||||
let r = req("1.0.0");
|
let r = req("1.0.0");
|
||||||
|
|
||||||
assert!(r.to_str() == "= 1.0.0".to_str());
|
assert!(r.to_string() == "= 1.0.0".to_string());
|
||||||
|
|
||||||
assert_match(&r, ["1.0.0"]);
|
assert_match(&r, ["1.0.0"]);
|
||||||
assert_not_match(&r, ["1.0.1", "0.9.9", "0.10.0", "0.1.0"]);
|
assert_not_match(&r, ["1.0.1", "0.9.9", "0.10.0", "0.1.0"]);
|
||||||
|
|
||||||
let r = req("0.9.0");
|
let r = req("0.9.0");
|
||||||
|
|
||||||
assert!(r.to_str() == "= 0.9.0".to_str());
|
assert!(r.to_string() == "= 0.9.0".to_string());
|
||||||
|
|
||||||
assert_match(&r, ["0.9.0"]);
|
assert_match(&r, ["0.9.0"]);
|
||||||
assert_not_match(&r, ["0.9.1", "1.9.0", "0.0.9"]);
|
assert_not_match(&r, ["0.9.1", "1.9.0", "0.0.9"]);
|
||||||
@ -504,7 +504,7 @@ mod test {
|
|||||||
pub fn test_parsing_greater_than() {
|
pub fn test_parsing_greater_than() {
|
||||||
let r = req(">= 1.0.0");
|
let r = req(">= 1.0.0");
|
||||||
|
|
||||||
assert!(r.to_str() == ">= 1.0.0".to_str());
|
assert!(r.to_string() == ">= 1.0.0".to_string());
|
||||||
|
|
||||||
assert_match(&r, ["1.0.0"]);
|
assert_match(&r, ["1.0.0"]);
|
||||||
}
|
}
|
||||||
|
@ -2,9 +2,11 @@
|
|||||||
#![crate_type="rlib"]
|
#![crate_type="rlib"]
|
||||||
|
|
||||||
#![feature(macro_rules, phase)]
|
#![feature(macro_rules, phase)]
|
||||||
|
#![feature(default_type_params)]
|
||||||
|
|
||||||
extern crate debug;
|
extern crate debug;
|
||||||
extern crate term;
|
extern crate term;
|
||||||
|
extern crate collections;
|
||||||
extern crate url;
|
extern crate url;
|
||||||
extern crate serialize;
|
extern crate serialize;
|
||||||
extern crate semver;
|
extern crate semver;
|
||||||
@ -204,7 +206,7 @@ pub fn handle_error(err: CliError, shell: &mut MultiShell) {
|
|||||||
if unknown {
|
if unknown {
|
||||||
let _ = shell.error("An unknown error occurred");
|
let _ = shell.error("An unknown error occurred");
|
||||||
} else {
|
} else {
|
||||||
let _ = shell.error(error.to_str());
|
let _ = shell.error(error.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
if error.cause().is_some() {
|
if error.cause().is_some() {
|
||||||
@ -246,7 +248,7 @@ fn global_flags() -> CliResult<GlobalFlags> {
|
|||||||
|
|
||||||
fn json_from_stdin<T: RepresentsJSON>() -> CliResult<T> {
|
fn json_from_stdin<T: RepresentsJSON>() -> CliResult<T> {
|
||||||
let mut reader = io::stdin();
|
let mut reader = io::stdin();
|
||||||
let input = try!(reader.read_to_str().map_err(|_| {
|
let input = try!(reader.read_to_string().map_err(|_| {
|
||||||
CliError::new("Standard in did not exist or was not UTF-8", 1)
|
CliError::new("Standard in did not exist or was not UTF-8", 1)
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@
|
|||||||
|
|
||||||
use std::os;
|
use std::os;
|
||||||
use util::config::{Config, ConfigValue};
|
use util::config::{Config, ConfigValue};
|
||||||
use core::{MultiShell, Source, SourceId, PackageSet, Target, resolver};
|
use core::{MultiShell, Source, SourceId, PackageSet, Target, PackageId, resolver};
|
||||||
use core::registry::PackageRegistry;
|
use core::registry::PackageRegistry;
|
||||||
use ops;
|
use ops;
|
||||||
use sources::{PathSource};
|
use sources::{PathSource};
|
||||||
@ -57,18 +57,22 @@ pub fn compile(manifest_path: &Path, options: CompileOptions) -> CargoResult<()>
|
|||||||
let override_ids = try!(source_ids_from_config());
|
let override_ids = try!(source_ids_from_config());
|
||||||
let source_ids = package.get_source_ids();
|
let source_ids = package.get_source_ids();
|
||||||
|
|
||||||
let packages = {
|
let (packages, resolve) = {
|
||||||
let mut config = try!(Config::new(shell, update, jobs));
|
let mut config = try!(Config::new(shell, update, jobs));
|
||||||
|
|
||||||
let mut registry =
|
let mut registry =
|
||||||
try!(PackageRegistry::new(source_ids, override_ids, &mut config));
|
try!(PackageRegistry::new(source_ids, override_ids, &mut config));
|
||||||
|
|
||||||
let resolved =
|
let resolved = try!(resolver::resolve(package.get_package_id(),
|
||||||
try!(resolver::resolve(package.get_dependencies(), &mut registry));
|
package.get_dependencies(),
|
||||||
|
&mut registry));
|
||||||
|
|
||||||
try!(registry.get(resolved.as_slice()).wrap({
|
let req: Vec<PackageId> = resolved.iter().map(|r| r.clone()).collect();
|
||||||
|
let packages = try!(registry.get(req.as_slice()).wrap({
|
||||||
human("Unable to get packages from source")
|
human("Unable to get packages from source")
|
||||||
}))
|
}));
|
||||||
|
|
||||||
|
(packages, resolved)
|
||||||
};
|
};
|
||||||
|
|
||||||
debug!("packages={}", packages);
|
debug!("packages={}", packages);
|
||||||
@ -78,8 +82,9 @@ pub fn compile(manifest_path: &Path, options: CompileOptions) -> CargoResult<()>
|
|||||||
}).collect::<Vec<&Target>>();
|
}).collect::<Vec<&Target>>();
|
||||||
|
|
||||||
let mut config = try!(Config::new(shell, update, jobs));
|
let mut config = try!(Config::new(shell, update, jobs));
|
||||||
try!(ops::compile_targets(targets.as_slice(), &package,
|
|
||||||
&PackageSet::new(packages.as_slice()), &mut config));
|
try!(ops::compile_targets(env.as_slice(), targets.as_slice(), &package,
|
||||||
|
&PackageSet::new(packages.as_slice()), &resolve, &mut config));
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -6,9 +6,9 @@ use std::os::args;
|
|||||||
use std::str;
|
use std::str;
|
||||||
use term::color::YELLOW;
|
use term::color::YELLOW;
|
||||||
|
|
||||||
use core::{Package, PackageSet, Target};
|
use core::{Package, PackageSet, Target, Resolve};
|
||||||
use util;
|
use util;
|
||||||
use util::{CargoResult, ChainError, ProcessBuilder, internal, human, CargoError};
|
use util::{CargoResult, ChainError, ProcessBuilder, CargoError, internal, human};
|
||||||
use util::{Config, TaskPool, DependencyQueue, Fresh, Dirty, Freshness};
|
use util::{Config, TaskPool, DependencyQueue, Fresh, Dirty, Freshness};
|
||||||
|
|
||||||
type Args = Vec<String>;
|
type Args = Vec<String>;
|
||||||
@ -18,7 +18,10 @@ struct Context<'a, 'b> {
|
|||||||
deps_dir: &'a Path,
|
deps_dir: &'a Path,
|
||||||
primary: bool,
|
primary: bool,
|
||||||
rustc_version: &'a str,
|
rustc_version: &'a str,
|
||||||
config: &'b mut Config<'b>
|
resolve: &'a Resolve,
|
||||||
|
package_set: &'a PackageSet,
|
||||||
|
config: &'b mut Config<'b>,
|
||||||
|
dylib: (String, String)
|
||||||
}
|
}
|
||||||
|
|
||||||
type Job = proc():Send -> CargoResult<()>;
|
type Job = proc():Send -> CargoResult<()>;
|
||||||
@ -41,9 +44,10 @@ fn uniq_target_dest<'a>(targets: &[&'a Target]) -> Option<&'a str> {
|
|||||||
curr.unwrap()
|
curr.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compile_targets<'a>(targets: &[&Target], pkg: &Package, deps: &PackageSet,
|
pub fn compile_targets<'a>(env: &str, targets: &[&Target], pkg: &Package,
|
||||||
config: &'a mut Config<'a>) -> CargoResult<()> {
|
deps: &PackageSet, resolve: &'a Resolve,
|
||||||
|
config: &'a mut Config<'a>) -> CargoResult<()>
|
||||||
|
{
|
||||||
if targets.is_empty() {
|
if targets.is_empty() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
@ -68,12 +72,27 @@ pub fn compile_targets<'a>(targets: &[&Target], pkg: &Package, deps: &PackageSet
|
|||||||
internal(format!("Couldn't create the directory for dependencies for {} at {}",
|
internal(format!("Couldn't create the directory for dependencies for {} at {}",
|
||||||
pkg.get_name(), deps_target_dir.display()))));
|
pkg.get_name(), deps_target_dir.display()))));
|
||||||
|
|
||||||
|
let output = try!(util::process("rustc")
|
||||||
|
.arg("-")
|
||||||
|
.arg("--crate-name").arg("-")
|
||||||
|
.arg("--crate-type").arg("dylib")
|
||||||
|
.arg("--print-file-name")
|
||||||
|
.exec_with_output());
|
||||||
|
|
||||||
|
let output = str::from_utf8(output.output.as_slice()).unwrap();
|
||||||
|
|
||||||
|
let parts: Vec<&str> = output.slice_to(output.len() - 1).split('-').collect();
|
||||||
|
assert!(parts.len() == 2, "rustc --print-file-name output has changed");
|
||||||
|
|
||||||
let mut cx = Context {
|
let mut cx = Context {
|
||||||
dest: &deps_target_dir,
|
dest: &deps_target_dir,
|
||||||
deps_dir: &deps_target_dir,
|
deps_dir: &deps_target_dir,
|
||||||
primary: false,
|
primary: false,
|
||||||
rustc_version: rustc_version.as_slice(),
|
rustc_version: rustc_version.as_slice(),
|
||||||
config: config
|
resolve: resolve,
|
||||||
|
package_set: deps,
|
||||||
|
config: config,
|
||||||
|
dylib: (parts.get(0).to_string(), parts.get(1).to_string())
|
||||||
};
|
};
|
||||||
|
|
||||||
// Build up a list of pending jobs, each of which represent compiling a
|
// Build up a list of pending jobs, each of which represent compiling a
|
||||||
@ -82,13 +101,17 @@ pub fn compile_targets<'a>(targets: &[&Target], pkg: &Package, deps: &PackageSet
|
|||||||
// everything in order with proper parallelism.
|
// everything in order with proper parallelism.
|
||||||
let mut jobs = Vec::new();
|
let mut jobs = Vec::new();
|
||||||
for dep in deps.iter() {
|
for dep in deps.iter() {
|
||||||
|
if dep == pkg { continue; }
|
||||||
|
|
||||||
// Only compile lib targets for dependencies
|
// Only compile lib targets for dependencies
|
||||||
let targets = dep.get_targets().iter().filter(|target| {
|
let targets = dep.get_targets().iter().filter(|target| {
|
||||||
target.is_lib() && target.get_profile().is_compile()
|
target.is_lib() && match env {
|
||||||
|
"test" => target.get_profile().is_compile(),
|
||||||
|
_ => target.get_profile().get_env() == env,
|
||||||
|
}
|
||||||
}).collect::<Vec<&Target>>();
|
}).collect::<Vec<&Target>>();
|
||||||
|
|
||||||
jobs.push((dep,
|
jobs.push((dep, try!(compile(targets.as_slice(), dep, &mut cx))));
|
||||||
try!(compile(targets.as_slice(), dep, &mut cx))));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
cx.primary = true;
|
cx.primary = true;
|
||||||
@ -101,7 +124,7 @@ pub fn compile_targets<'a>(targets: &[&Target], pkg: &Package, deps: &PackageSet
|
|||||||
|
|
||||||
fn compile(targets: &[&Target], pkg: &Package,
|
fn compile(targets: &[&Target], pkg: &Package,
|
||||||
cx: &mut Context) -> CargoResult<(Freshness, Job)> {
|
cx: &mut Context) -> CargoResult<(Freshness, Job)> {
|
||||||
debug!("compile_pkg; pkg={}; targets={}", pkg, pkg.get_targets());
|
debug!("compile_pkg; pkg={}; targets={}", pkg, targets);
|
||||||
|
|
||||||
if targets.is_empty() {
|
if targets.is_empty() {
|
||||||
return Ok((Fresh, proc() Ok(())))
|
return Ok((Fresh, proc() Ok(())))
|
||||||
@ -135,7 +158,7 @@ fn compile(targets: &[&Target], pkg: &Package,
|
|||||||
// After the custom command has run, execute rustc for all targets of our
|
// After the custom command has run, execute rustc for all targets of our
|
||||||
// package.
|
// package.
|
||||||
for &target in targets.iter() {
|
for &target in targets.iter() {
|
||||||
cmds.push(rustc(&pkg.get_root(), target, cx));
|
cmds.push(rustc(pkg, target, cx));
|
||||||
}
|
}
|
||||||
|
|
||||||
cmds.push(proc() {
|
cmds.push(proc() {
|
||||||
@ -169,7 +192,7 @@ fn is_fresh(dep: &Package, loc: &Path,
|
|||||||
Err(..) => return Ok((false, new_fingerprint)),
|
Err(..) => return Ok((false, new_fingerprint)),
|
||||||
};
|
};
|
||||||
|
|
||||||
let old_fingerprint = try!(file.read_to_str());
|
let old_fingerprint = try!(file.read_to_string());
|
||||||
|
|
||||||
log!(5, "old fingerprint: {}", old_fingerprint);
|
log!(5, "old fingerprint: {}", old_fingerprint);
|
||||||
log!(5, "new fingerprint: {}", new_fingerprint);
|
log!(5, "new fingerprint: {}", new_fingerprint);
|
||||||
@ -207,37 +230,42 @@ fn compile_custom(pkg: &Package, cmd: &str,
|
|||||||
proc() p.exec_with_output().map(|_| ()).map_err(|e| e.mark_human())
|
proc() p.exec_with_output().map(|_| ()).map_err(|e| e.mark_human())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rustc(root: &Path, target: &Target, cx: &mut Context) -> Job {
|
fn rustc(package: &Package, target: &Target, cx: &mut Context) -> Job {
|
||||||
let crate_types = target.rustc_crate_types();
|
let crate_types = target.rustc_crate_types();
|
||||||
|
let root = package.get_root();
|
||||||
|
|
||||||
log!(5, "root={}; target={}; crate_types={}; dest={}; deps={}; verbose={}",
|
log!(5, "root={}; target={}; crate_types={}; dest={}; deps={}; verbose={}",
|
||||||
root.display(), target, crate_types, cx.dest.display(),
|
root.display(), target, crate_types, cx.dest.display(),
|
||||||
cx.deps_dir.display(), cx.primary);
|
cx.deps_dir.display(), cx.primary);
|
||||||
|
|
||||||
let primary = cx.primary;
|
let primary = cx.primary;
|
||||||
let rustc = prepare_rustc(root, target, crate_types, cx);
|
let rustc = prepare_rustc(package, target, crate_types, cx);
|
||||||
|
|
||||||
log!(5, "command={}", rustc);
|
log!(5, "command={}", rustc);
|
||||||
|
|
||||||
let _ = cx.config.shell().verbose(|shell| shell.status("Running", rustc.to_str()));
|
let _ = cx.config.shell().verbose(|shell| shell.status("Running", rustc.to_string()));
|
||||||
|
|
||||||
proc() {
|
proc() {
|
||||||
if primary {
|
if primary {
|
||||||
rustc.exec().map_err(|err| human(err.to_str()))
|
log!(5, "executing primary");
|
||||||
|
rustc.exec().map_err(|err| human(err.to_string()))
|
||||||
} else {
|
} else {
|
||||||
|
log!(5, "executing deps");
|
||||||
rustc.exec_with_output().and(Ok(())).map_err(|err| {
|
rustc.exec_with_output().and(Ok(())).map_err(|err| {
|
||||||
human(err.to_str())
|
human(err.to_string())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prepare_rustc(root: &Path, target: &Target, crate_types: Vec<&str>,
|
fn prepare_rustc(package: &Package, target: &Target, crate_types: Vec<&str>,
|
||||||
cx: &Context) -> ProcessBuilder {
|
cx: &Context) -> ProcessBuilder
|
||||||
|
{
|
||||||
|
let root = package.get_root();
|
||||||
let mut args = Vec::new();
|
let mut args = Vec::new();
|
||||||
|
|
||||||
build_base_args(&mut args, target, crate_types, cx);
|
build_base_args(&mut args, target, crate_types, cx);
|
||||||
build_deps_args(&mut args, cx);
|
build_deps_args(&mut args, package, cx);
|
||||||
|
|
||||||
util::process("rustc")
|
util::process("rustc")
|
||||||
.cwd(root.clone())
|
.cwd(root.clone())
|
||||||
@ -253,57 +281,96 @@ fn build_base_args(into: &mut Args,
|
|||||||
let metadata = target.get_metadata();
|
let metadata = target.get_metadata();
|
||||||
|
|
||||||
// TODO: Handle errors in converting paths into args
|
// TODO: Handle errors in converting paths into args
|
||||||
into.push(target.get_src_path().display().to_str());
|
into.push(target.get_src_path().display().to_string());
|
||||||
|
|
||||||
into.push("--crate-name".to_str());
|
into.push("--crate-name".to_string());
|
||||||
into.push(target.get_name().to_str());
|
into.push(target.get_name().to_string());
|
||||||
|
|
||||||
for crate_type in crate_types.iter() {
|
for crate_type in crate_types.iter() {
|
||||||
into.push("--crate-type".to_str());
|
into.push("--crate-type".to_string());
|
||||||
into.push(crate_type.to_str());
|
into.push(crate_type.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
let out = cx.dest.clone();
|
let out = cx.dest.clone();
|
||||||
let profile = target.get_profile();
|
let profile = target.get_profile();
|
||||||
|
|
||||||
if profile.get_opt_level() != 0 {
|
if profile.get_opt_level() != 0 {
|
||||||
into.push("--opt-level".to_str());
|
into.push("--opt-level".to_string());
|
||||||
into.push(profile.get_opt_level().to_str());
|
into.push(profile.get_opt_level().to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
if profile.get_debug() {
|
// Right now -g is a little buggy, so we're not passing -g just yet
|
||||||
into.push("-g".to_str());
|
// if profile.get_debug() {
|
||||||
}
|
// into.push("-g".to_string());
|
||||||
|
// }
|
||||||
|
|
||||||
if profile.is_test() {
|
if profile.is_test() {
|
||||||
into.push("--test".to_str());
|
into.push("--test".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
match metadata {
|
match metadata {
|
||||||
Some(m) => {
|
Some(m) => {
|
||||||
into.push("-C".to_str());
|
into.push("-C".to_string());
|
||||||
into.push(format!("metadata={}", m.metadata));
|
into.push(format!("metadata={}", m.metadata));
|
||||||
|
|
||||||
into.push("-C".to_str());
|
into.push("-C".to_string());
|
||||||
into.push(format!("extra-filename={}", m.extra_filename));
|
into.push(format!("extra-filename={}", m.extra_filename));
|
||||||
}
|
}
|
||||||
None => {}
|
None => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
if target.is_lib() {
|
if target.is_lib() {
|
||||||
into.push("--out-dir".to_str());
|
into.push("--out-dir".to_string());
|
||||||
into.push(out.display().to_str());
|
into.push(out.display().to_string());
|
||||||
} else {
|
} else {
|
||||||
into.push("-o".to_str());
|
into.push("-o".to_string());
|
||||||
into.push(out.join(target.get_name()).display().to_str());
|
into.push(out.join(target.get_name()).display().to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_deps_args(dst: &mut Args, cx: &Context) {
|
fn build_deps_args(dst: &mut Args, package: &Package, cx: &Context) {
|
||||||
dst.push("-L".to_str());
|
dst.push("-L".to_string());
|
||||||
dst.push(cx.dest.display().to_str());
|
dst.push(cx.dest.display().to_string());
|
||||||
dst.push("-L".to_str());
|
dst.push("-L".to_string());
|
||||||
dst.push(cx.deps_dir.display().to_str());
|
dst.push(cx.deps_dir.display().to_string());
|
||||||
|
|
||||||
|
for target in dep_targets(package, cx).iter() {
|
||||||
|
dst.push("--extern".to_string());
|
||||||
|
dst.push(format!("{}={}/{}",
|
||||||
|
target.get_name(),
|
||||||
|
cx.deps_dir.display(),
|
||||||
|
target_filename(target, cx)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn target_filename(target: &Target, cx: &Context) -> String {
|
||||||
|
let stem = target.file_stem();
|
||||||
|
|
||||||
|
if target.is_dylib() {
|
||||||
|
let (ref prefix, ref suffix) = cx.dylib;
|
||||||
|
format!("{}{}{}", prefix, stem, suffix)
|
||||||
|
} else if target.is_rlib() {
|
||||||
|
format!("lib{}.rlib", stem)
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dep_targets(pkg: &Package, cx: &Context) -> Vec<Target> {
|
||||||
|
match cx.resolve.deps(pkg.get_package_id()) {
|
||||||
|
None => vec!(),
|
||||||
|
Some(deps) => deps
|
||||||
|
.map(|pkg_id| {
|
||||||
|
cx.package_set.iter()
|
||||||
|
.find(|pkg| pkg_id == pkg.get_package_id())
|
||||||
|
.expect("Should have found package")
|
||||||
|
})
|
||||||
|
.filter_map(|pkg| {
|
||||||
|
pkg.get_targets().iter().find(|&t| t.is_lib() && t.get_profile().is_compile())
|
||||||
|
})
|
||||||
|
.map(|t| t.clone())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Execute all jobs necessary to build the dependency graph.
|
/// Execute all jobs necessary to build the dependency graph.
|
||||||
|
@ -65,11 +65,11 @@ fn ident(location: &Location) -> String {
|
|||||||
let ident = match *location {
|
let ident = match *location {
|
||||||
Local(ref path) => {
|
Local(ref path) => {
|
||||||
let last = path.components().last().unwrap();
|
let last = path.components().last().unwrap();
|
||||||
str::from_utf8(last).unwrap().to_str()
|
str::from_utf8(last).unwrap().to_string()
|
||||||
}
|
}
|
||||||
Remote(ref url) => {
|
Remote(ref url) => {
|
||||||
let path = canonicalize_url(url.path.path.as_slice());
|
let path = canonicalize_url(url.path.path.as_slice());
|
||||||
path.as_slice().split('/').last().unwrap().to_str()
|
path.as_slice().split('/').last().unwrap().to_string()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -79,7 +79,7 @@ fn ident(location: &Location) -> String {
|
|||||||
ident
|
ident
|
||||||
};
|
};
|
||||||
|
|
||||||
let location = canonicalize_url(location.to_str().as_slice());
|
let location = canonicalize_url(location.to_string().as_slice());
|
||||||
|
|
||||||
format!("{}-{}", ident, to_hex(hasher.hash(&location.as_slice())))
|
format!("{}-{}", ident, to_hex(hasher.hash(&location.as_slice())))
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@ impl GitReference {
|
|||||||
if string.as_slice() == "master" {
|
if string.as_slice() == "master" {
|
||||||
Master
|
Master
|
||||||
} else {
|
} else {
|
||||||
Other(string.as_slice().to_str())
|
Other(string.as_slice().to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -79,7 +79,7 @@ struct EncodableGitRemote {
|
|||||||
impl<E, S: Encoder<E>> Encodable<S, E> for GitRemote {
|
impl<E, S: Encoder<E>> Encodable<S, E> for GitRemote {
|
||||||
fn encode(&self, s: &mut S) -> Result<(), E> {
|
fn encode(&self, s: &mut S) -> Result<(), E> {
|
||||||
EncodableGitRemote {
|
EncodableGitRemote {
|
||||||
location: self.location.to_str()
|
location: self.location.to_string()
|
||||||
}.encode(s)
|
}.encode(s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -102,7 +102,7 @@ impl<E, S: Encoder<E>> Encodable<S, E> for GitDatabase {
|
|||||||
fn encode(&self, s: &mut S) -> Result<(), E> {
|
fn encode(&self, s: &mut S) -> Result<(), E> {
|
||||||
EncodableGitDatabase {
|
EncodableGitDatabase {
|
||||||
remote: self.remote.clone(),
|
remote: self.remote.clone(),
|
||||||
path: self.path.display().to_str()
|
path: self.path.display().to_string()
|
||||||
}.encode(s)
|
}.encode(s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -129,9 +129,9 @@ impl<E, S: Encoder<E>> Encodable<S, E> for GitCheckout {
|
|||||||
fn encode(&self, s: &mut S) -> Result<(), E> {
|
fn encode(&self, s: &mut S) -> Result<(), E> {
|
||||||
EncodableGitCheckout {
|
EncodableGitCheckout {
|
||||||
database: self.database.clone(),
|
database: self.database.clone(),
|
||||||
location: self.location.display().to_str(),
|
location: self.location.display().to_string(),
|
||||||
reference: self.reference.to_str(),
|
reference: self.reference.to_string(),
|
||||||
revision: self.revision.to_str()
|
revision: self.revision.to_string()
|
||||||
}.encode(s)
|
}.encode(s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -182,8 +182,8 @@ impl GitRemote {
|
|||||||
|
|
||||||
fn fetch_location(&self) -> String {
|
fn fetch_location(&self) -> String {
|
||||||
match self.location {
|
match self.location {
|
||||||
Local(ref p) => p.display().to_str(),
|
Local(ref p) => p.display().to_string(),
|
||||||
Remote(ref u) => u.to_str(),
|
Remote(ref u) => u.to_string(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -308,10 +308,10 @@ fn git_output(path: &Path, str: String) -> CargoResult<String> {
|
|||||||
.chain_error(||
|
.chain_error(||
|
||||||
human(format!("Executing `git {}` failed", str))));
|
human(format!("Executing `git {}` failed", str))));
|
||||||
|
|
||||||
Ok(to_str(output.output.as_slice()).as_slice().trim_right().to_str())
|
Ok(to_str(output.output.as_slice()).as_slice().trim_right().to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_str(vec: &[u8]) -> String {
|
fn to_str(vec: &[u8]) -> String {
|
||||||
str::from_utf8_lossy(vec).to_str()
|
str::from_utf8_lossy(vec).to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,7 +102,7 @@ impl Source for PathSource {
|
|||||||
let loc = pkg.get_manifest_path().dir_path();
|
let loc = pkg.get_manifest_path().dir_path();
|
||||||
max = cmp::max(max, try!(walk(&loc, true)));
|
max = cmp::max(max, try!(walk(&loc, true)));
|
||||||
}
|
}
|
||||||
return Ok(max.to_str());
|
return Ok(max.to_string());
|
||||||
|
|
||||||
fn walk(path: &Path, is_root: bool) -> CargoResult<u64> {
|
fn walk(path: &Path, is_root: bool) -> CargoResult<u64> {
|
||||||
if !path.is_dir() {
|
if !path.is_dir() {
|
||||||
|
@ -118,7 +118,7 @@ impl<E, S: Encoder<E>> Encodable<S, E> for ConfigValue {
|
|||||||
impl fmt::Show for ConfigValue {
|
impl fmt::Show for ConfigValue {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
let paths: Vec<String> = self.path.iter().map(|p| {
|
let paths: Vec<String> = self.path.iter().map(|p| {
|
||||||
p.display().to_str()
|
p.display().to_string()
|
||||||
}).collect();
|
}).collect();
|
||||||
write!(f, "{} (from {})", self.value, paths)
|
write!(f, "{} (from {})", self.value, paths)
|
||||||
}
|
}
|
||||||
@ -184,16 +184,16 @@ fn walk_tree(pwd: &Path,
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extract_config(mut file: io::fs::File, key: &str) -> CargoResult<ConfigValue> {
|
fn extract_config(mut file: io::fs::File, key: &str) -> CargoResult<ConfigValue> {
|
||||||
let contents = try!(file.read_to_str());
|
let contents = try!(file.read_to_string());
|
||||||
let toml = try!(cargo_toml::parse(contents.as_slice(),
|
let toml = try!(cargo_toml::parse(contents.as_slice(),
|
||||||
file.path().filename_display()
|
file.path().filename_display()
|
||||||
.to_str().as_slice()));
|
.to_string().as_slice()));
|
||||||
let val = try!(toml.find_equiv(&key).require(|| internal("")));
|
let val = try!(toml.find_equiv(&key).require(|| internal("")));
|
||||||
|
|
||||||
let v = match *val {
|
let v = match *val {
|
||||||
toml::String(ref val) => String(val.clone()),
|
toml::String(ref val) => String(val.clone()),
|
||||||
toml::Array(ref val) => {
|
toml::Array(ref val) => {
|
||||||
List(val.iter().map(|s: &toml::Value| s.to_str()).collect())
|
List(val.iter().map(|s: &toml::Value| s.to_string()).collect())
|
||||||
}
|
}
|
||||||
_ => return Err(internal(""))
|
_ => return Err(internal(""))
|
||||||
};
|
};
|
||||||
@ -204,8 +204,8 @@ fn extract_config(mut file: io::fs::File, key: &str) -> CargoResult<ConfigValue>
|
|||||||
fn extract_all_configs(mut file: io::fs::File,
|
fn extract_all_configs(mut file: io::fs::File,
|
||||||
map: &mut HashMap<String, ConfigValue>) -> CargoResult<()> {
|
map: &mut HashMap<String, ConfigValue>) -> CargoResult<()> {
|
||||||
let path = file.path().clone();
|
let path = file.path().clone();
|
||||||
let contents = try!(file.read_to_str());
|
let contents = try!(file.read_to_string());
|
||||||
let file = path.filename_display().to_str();
|
let file = path.filename_display().to_string();
|
||||||
let table = try!(cargo_toml::parse(contents.as_slice(),
|
let table = try!(cargo_toml::parse(contents.as_slice(),
|
||||||
file.as_slice()).chain_error(|| {
|
file.as_slice()).chain_error(|| {
|
||||||
internal(format!("could not parse Toml manifest; path={}",
|
internal(format!("could not parse Toml manifest; path={}",
|
||||||
|
@ -61,7 +61,7 @@ impl<T> DependencyQueue<T> {
|
|||||||
///
|
///
|
||||||
/// Only registered packages will be returned from dequeue().
|
/// Only registered packages will be returned from dequeue().
|
||||||
pub fn register(&mut self, pkg: &Package) {
|
pub fn register(&mut self, pkg: &Package) {
|
||||||
self.reverse_dep_map.insert(pkg.get_name().to_str(), HashSet::new());
|
self.reverse_dep_map.insert(pkg.get_name().to_string(), HashSet::new());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds a new package to this dependency queue.
|
/// Adds a new package to this dependency queue.
|
||||||
@ -70,10 +70,10 @@ impl<T> DependencyQueue<T> {
|
|||||||
/// be added to the dependency queue.
|
/// be added to the dependency queue.
|
||||||
pub fn enqueue(&mut self, pkg: &Package, fresh: Freshness, data: T) {
|
pub fn enqueue(&mut self, pkg: &Package, fresh: Freshness, data: T) {
|
||||||
// ignore self-deps
|
// ignore self-deps
|
||||||
if self.pkgs.contains_key(&pkg.get_name().to_str()) { return }
|
if self.pkgs.contains_key(&pkg.get_name().to_string()) { return }
|
||||||
|
|
||||||
if fresh == Dirty {
|
if fresh == Dirty {
|
||||||
self.dirty.insert(pkg.get_name().to_str());
|
self.dirty.insert(pkg.get_name().to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut my_dependencies = HashSet::new();
|
let mut my_dependencies = HashSet::new();
|
||||||
@ -84,12 +84,12 @@ impl<T> DependencyQueue<T> {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = dep.get_name().to_str();
|
let name = dep.get_name().to_string();
|
||||||
assert!(my_dependencies.insert(name.clone()));
|
assert!(my_dependencies.insert(name.clone()));
|
||||||
let rev = self.reverse_dep_map.find_or_insert(name, HashSet::new());
|
let rev = self.reverse_dep_map.find_or_insert(name, HashSet::new());
|
||||||
assert!(rev.insert(pkg.get_name().to_str()));
|
assert!(rev.insert(pkg.get_name().to_string()));
|
||||||
}
|
}
|
||||||
assert!(self.pkgs.insert(pkg.get_name().to_str(),
|
assert!(self.pkgs.insert(pkg.get_name().to_string(),
|
||||||
(my_dependencies, data)));
|
(my_dependencies, data)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -100,7 +100,7 @@ impl<T> DependencyQueue<T> {
|
|||||||
pub fn dequeue(&mut self) -> Option<(String, Freshness, T)> {
|
pub fn dequeue(&mut self) -> Option<(String, Freshness, T)> {
|
||||||
let pkg = match self.pkgs.iter()
|
let pkg = match self.pkgs.iter()
|
||||||
.find(|&(_, &(ref deps, _))| deps.len() == 0)
|
.find(|&(_, &(ref deps, _))| deps.len() == 0)
|
||||||
.map(|(ref name, _)| name.to_str()) {
|
.map(|(ref name, _)| name.to_string()) {
|
||||||
Some(pkg) => pkg,
|
Some(pkg) => pkg,
|
||||||
None => return None
|
None => return None
|
||||||
};
|
};
|
||||||
|
@ -120,20 +120,20 @@ impl<T, E: CargoError + Send> ChainError<T> for Result<T, E> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl CargoError for IoError {
|
impl CargoError for IoError {
|
||||||
fn description(&self) -> String { self.to_str() }
|
fn description(&self) -> String { self.to_string() }
|
||||||
}
|
}
|
||||||
|
|
||||||
from_error!(IoError)
|
from_error!(IoError)
|
||||||
|
|
||||||
impl CargoError for TomlError {
|
impl CargoError for TomlError {
|
||||||
fn description(&self) -> String { self.to_str() }
|
fn description(&self) -> String { self.to_string() }
|
||||||
}
|
}
|
||||||
|
|
||||||
from_error!(TomlError)
|
from_error!(TomlError)
|
||||||
|
|
||||||
impl CargoError for FormatError {
|
impl CargoError for FormatError {
|
||||||
fn description(&self) -> String {
|
fn description(&self) -> String {
|
||||||
"formatting failed".to_str()
|
"formatting failed".to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -152,8 +152,8 @@ from_error!(ProcessError)
|
|||||||
impl Show for ProcessError {
|
impl Show for ProcessError {
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
let exit = match self.exit {
|
let exit = match self.exit {
|
||||||
Some(ExitStatus(i)) | Some(ExitSignal(i)) => i.to_str(),
|
Some(ExitStatus(i)) | Some(ExitSignal(i)) => i.to_string(),
|
||||||
None => "never executed".to_str()
|
None => "never executed".to_string()
|
||||||
};
|
};
|
||||||
try!(write!(f, "{} (status={})", self.msg, exit));
|
try!(write!(f, "{} (status={})", self.msg, exit));
|
||||||
match self.output {
|
match self.output {
|
||||||
@ -178,7 +178,7 @@ impl Show for ProcessError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl CargoError for ProcessError {
|
impl CargoError for ProcessError {
|
||||||
fn description(&self) -> String { self.to_str() }
|
fn description(&self) -> String { self.to_string() }
|
||||||
|
|
||||||
fn detail(&self) -> Option<String> {
|
fn detail(&self) -> Option<String> {
|
||||||
self.detail.clone()
|
self.detail.clone()
|
||||||
@ -248,7 +248,7 @@ pub struct CliError {
|
|||||||
|
|
||||||
impl CargoError for CliError {
|
impl CargoError for CliError {
|
||||||
fn description(&self) -> String {
|
fn description(&self) -> String {
|
||||||
self.error.to_str()
|
self.error.to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -256,7 +256,7 @@ from_error!(CliError)
|
|||||||
|
|
||||||
impl CliError {
|
impl CliError {
|
||||||
pub fn new<S: Str>(error: S, code: uint) -> CliError {
|
pub fn new<S: Str>(error: S, code: uint) -> CliError {
|
||||||
let error = human(error.as_slice().to_str());
|
let error = human(error.as_slice().to_string());
|
||||||
CliError::from_boxed(error, code)
|
CliError::from_boxed(error, code)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -276,7 +276,7 @@ pub fn process_error<S: Str>(msg: S,
|
|||||||
status: Option<&ProcessExit>,
|
status: Option<&ProcessExit>,
|
||||||
output: Option<&ProcessOutput>) -> ProcessError {
|
output: Option<&ProcessOutput>) -> ProcessError {
|
||||||
ProcessError {
|
ProcessError {
|
||||||
msg: msg.as_slice().to_str(),
|
msg: msg.as_slice().to_string(),
|
||||||
exit: status.map(|o| o.clone()),
|
exit: status.map(|o| o.clone()),
|
||||||
output: output.map(|o| o.clone()),
|
output: output.map(|o| o.clone()),
|
||||||
detail: None,
|
detail: None,
|
||||||
@ -287,8 +287,8 @@ pub fn process_error<S: Str>(msg: S,
|
|||||||
pub fn internal_error<S1: Str, S2: Str>(error: S1,
|
pub fn internal_error<S1: Str, S2: Str>(error: S1,
|
||||||
detail: S2) -> Box<CargoError + Send> {
|
detail: S2) -> Box<CargoError + Send> {
|
||||||
box ConcreteCargoError {
|
box ConcreteCargoError {
|
||||||
description: error.as_slice().to_str(),
|
description: error.as_slice().to_string(),
|
||||||
detail: Some(detail.as_slice().to_str()),
|
detail: Some(detail.as_slice().to_string()),
|
||||||
cause: None,
|
cause: None,
|
||||||
is_human: false
|
is_human: false
|
||||||
} as Box<CargoError + Send>
|
} as Box<CargoError + Send>
|
||||||
@ -296,7 +296,7 @@ pub fn internal_error<S1: Str, S2: Str>(error: S1,
|
|||||||
|
|
||||||
pub fn internal<S: Show>(error: S) -> Box<CargoError + Send> {
|
pub fn internal<S: Show>(error: S) -> Box<CargoError + Send> {
|
||||||
box ConcreteCargoError {
|
box ConcreteCargoError {
|
||||||
description: error.to_str(),
|
description: error.to_string(),
|
||||||
detail: None,
|
detail: None,
|
||||||
cause: None,
|
cause: None,
|
||||||
is_human: false
|
is_human: false
|
||||||
@ -305,7 +305,7 @@ pub fn internal<S: Show>(error: S) -> Box<CargoError + Send> {
|
|||||||
|
|
||||||
pub fn human<S: Show>(error: S) -> Box<CargoError + Send> {
|
pub fn human<S: Show>(error: S) -> Box<CargoError + Send> {
|
||||||
box ConcreteCargoError {
|
box ConcreteCargoError {
|
||||||
description: error.to_str(),
|
description: error.to_string(),
|
||||||
detail: None,
|
detail: None,
|
||||||
cause: None,
|
cause: None,
|
||||||
is_human: true
|
is_human: true
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::collections::HashMap;
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use std::collections::hashmap::{Keys, SetItems};
|
||||||
|
|
||||||
pub struct Graph<N> {
|
pub struct Graph<N> {
|
||||||
nodes: HashMap<N, Vec<N>>
|
nodes: HashMap<N, HashSet<N>>
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Mark {
|
enum Mark {
|
||||||
@ -10,13 +11,30 @@ enum Mark {
|
|||||||
Done
|
Done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub type Nodes<'a, N> = Keys<'a, N, HashSet<N>>;
|
||||||
|
pub type Edges<'a, N> = SetItems<'a, N>;
|
||||||
|
|
||||||
impl<N: Eq + Hash + Clone> Graph<N> {
|
impl<N: Eq + Hash + Clone> Graph<N> {
|
||||||
pub fn new() -> Graph<N> {
|
pub fn new() -> Graph<N> {
|
||||||
Graph { nodes: HashMap::new() }
|
Graph { nodes: HashMap::new() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add(&mut self, node: N, children: &[N]) {
|
pub fn add(&mut self, node: N, children: &[N]) {
|
||||||
self.nodes.insert(node, children.to_owned());
|
self.nodes.insert(node, children.iter().map(|n| n.clone()).collect());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn link(&mut self, node: N, child: N) {
|
||||||
|
self.nodes
|
||||||
|
.find_or_insert_with(node, |_| HashSet::new())
|
||||||
|
.insert(child);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_nodes<'a>(&'a self) -> &'a HashMap<N, HashSet<N>> {
|
||||||
|
&self.nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn edges<'a>(&'a self, node: &N) -> Option<Edges<'a, N>> {
|
||||||
|
self.nodes.find(node).map(|set| set.iter())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sort(&self) -> Option<Vec<N>> {
|
pub fn sort(&self) -> Option<Vec<N>> {
|
||||||
@ -44,4 +62,8 @@ impl<N: Eq + Hash + Clone> Graph<N> {
|
|||||||
dst.push(node.clone());
|
dst.push(node.clone());
|
||||||
marks.insert(node.clone(), Done);
|
marks.insert(node.clone(), Done);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn iter<'a>(&'a self) -> Nodes<'a, N> {
|
||||||
|
self.nodes.keys()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ pub use self::paths::realpath;
|
|||||||
pub use self::hex::{to_hex, short_hash};
|
pub use self::hex::{to_hex, short_hash};
|
||||||
pub use self::pool::TaskPool;
|
pub use self::pool::TaskPool;
|
||||||
pub use self::dependency_queue::{DependencyQueue, Fresh, Dirty, Freshness};
|
pub use self::dependency_queue::{DependencyQueue, Fresh, Dirty, Freshness};
|
||||||
|
pub use self::graph::Graph;
|
||||||
|
|
||||||
pub mod graph;
|
pub mod graph;
|
||||||
pub mod process_builder;
|
pub mod process_builder;
|
||||||
|
@ -36,12 +36,12 @@ static PATH_SEP : &'static str = ";";
|
|||||||
|
|
||||||
impl ProcessBuilder {
|
impl ProcessBuilder {
|
||||||
pub fn arg<T: Str>(mut self, arg: T) -> ProcessBuilder {
|
pub fn arg<T: Str>(mut self, arg: T) -> ProcessBuilder {
|
||||||
self.args.push(arg.as_slice().to_str());
|
self.args.push(arg.as_slice().to_string());
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn args<T: Str>(mut self, arguments: &[T]) -> ProcessBuilder {
|
pub fn args<T: Str>(mut self, arguments: &[T]) -> ProcessBuilder {
|
||||||
self.args = arguments.iter().map(|a| a.as_slice().to_str()).collect();
|
self.args = arguments.iter().map(|a| a.as_slice().to_string()).collect();
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,7 +51,7 @@ impl ProcessBuilder {
|
|||||||
|
|
||||||
pub fn extra_path(mut self, path: Path) -> ProcessBuilder {
|
pub fn extra_path(mut self, path: Path) -> ProcessBuilder {
|
||||||
// For now, just convert to a string, but we should do something better
|
// For now, just convert to a string, but we should do something better
|
||||||
self.path.unshift(path.display().to_str());
|
self.path.unshift(path.display().to_string());
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -63,10 +63,10 @@ impl ProcessBuilder {
|
|||||||
pub fn env(mut self, key: &str, val: Option<&str>) -> ProcessBuilder {
|
pub fn env(mut self, key: &str, val: Option<&str>) -> ProcessBuilder {
|
||||||
match val {
|
match val {
|
||||||
Some(v) => {
|
Some(v) => {
|
||||||
self.env.insert(key.to_str(), v.to_str());
|
self.env.insert(key.to_string(), v.to_string());
|
||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
self.env.remove(&key.to_str());
|
self.env.remove(&key.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -139,7 +139,7 @@ impl ProcessBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
match self.build_path() {
|
match self.build_path() {
|
||||||
Some(path) => ret.push(("PATH".to_str(), path)),
|
Some(path) => ret.push(("PATH".to_string(), path)),
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -198,9 +198,9 @@ struct Context<'a> {
|
|||||||
fn inferred_lib_target(name: &str, layout: &Layout) -> Option<Vec<TomlTarget>> {
|
fn inferred_lib_target(name: &str, layout: &Layout) -> Option<Vec<TomlTarget>> {
|
||||||
layout.lib.as_ref().map(|lib| {
|
layout.lib.as_ref().map(|lib| {
|
||||||
vec![TomlTarget {
|
vec![TomlTarget {
|
||||||
name: name.to_str(),
|
name: name.to_string(),
|
||||||
crate_type: None,
|
crate_type: None,
|
||||||
path: Some(lib.display().to_str()),
|
path: Some(lib.display().to_string()),
|
||||||
test: None
|
test: None
|
||||||
}]
|
}]
|
||||||
})
|
})
|
||||||
@ -209,16 +209,16 @@ fn inferred_lib_target(name: &str, layout: &Layout) -> Option<Vec<TomlTarget>> {
|
|||||||
fn inferred_bin_targets(name: &str, layout: &Layout) -> Option<Vec<TomlTarget>> {
|
fn inferred_bin_targets(name: &str, layout: &Layout) -> Option<Vec<TomlTarget>> {
|
||||||
Some(layout.bins.iter().filter_map(|bin| {
|
Some(layout.bins.iter().filter_map(|bin| {
|
||||||
let name = if bin.as_str() == Some("src/main.rs") {
|
let name = if bin.as_str() == Some("src/main.rs") {
|
||||||
Some(name.to_str())
|
Some(name.to_string())
|
||||||
} else {
|
} else {
|
||||||
bin.filestem_str().map(|f| f.to_str())
|
bin.filestem_str().map(|f| f.to_string())
|
||||||
};
|
};
|
||||||
|
|
||||||
name.map(|name| {
|
name.map(|name| {
|
||||||
TomlTarget {
|
TomlTarget {
|
||||||
name: name,
|
name: name,
|
||||||
crate_type: None,
|
crate_type: None,
|
||||||
path: Some(bin.display().to_str()),
|
path: Some(bin.display().to_string()),
|
||||||
test: None
|
test: None
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -252,7 +252,7 @@ impl TomlManifest {
|
|||||||
TomlTarget {
|
TomlTarget {
|
||||||
name: t.name.clone(),
|
name: t.name.clone(),
|
||||||
crate_type: t.crate_type.clone(),
|
crate_type: t.crate_type.clone(),
|
||||||
path: layout.lib.as_ref().map(|p| p.display().to_str()),
|
path: layout.lib.as_ref().map(|p| p.display().to_string()),
|
||||||
test: t.test
|
test: t.test
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -271,7 +271,7 @@ impl TomlManifest {
|
|||||||
TomlTarget {
|
TomlTarget {
|
||||||
name: t.name.clone(),
|
name: t.name.clone(),
|
||||||
crate_type: t.crate_type.clone(),
|
crate_type: t.crate_type.clone(),
|
||||||
path: bin.as_ref().map(|p| p.display().to_str()),
|
path: bin.as_ref().map(|p| p.display().to_string()),
|
||||||
test: t.test
|
test: t.test
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -336,7 +336,7 @@ fn process_dependencies<'a>(cx: &mut Context<'a>, dev: bool,
|
|||||||
let reference = details.branch.clone()
|
let reference = details.branch.clone()
|
||||||
.or_else(|| details.tag.clone())
|
.or_else(|| details.tag.clone())
|
||||||
.or_else(|| details.rev.clone())
|
.or_else(|| details.rev.clone())
|
||||||
.unwrap_or_else(|| "master".to_str());
|
.unwrap_or_else(|| "master".to_string());
|
||||||
|
|
||||||
let new_source_id = match details.git {
|
let new_source_id = match details.git {
|
||||||
Some(ref git) => {
|
Some(ref git) => {
|
||||||
@ -386,25 +386,34 @@ fn normalize(lib: Option<&[TomlLibTarget]>,
|
|||||||
{
|
{
|
||||||
log!(4, "normalizing toml targets; lib={}; bin={}", lib, bin);
|
log!(4, "normalizing toml targets; lib={}; bin={}", lib, bin);
|
||||||
|
|
||||||
fn target_profiles(target: &TomlTarget) -> Vec<Profile> {
|
enum TestDep { Needed, NotNeeded }
|
||||||
|
|
||||||
|
fn target_profiles(target: &TomlTarget,
|
||||||
|
dep: Option<TestDep>) -> Vec<Profile> {
|
||||||
let mut ret = vec!(Profile::default_dev(), Profile::default_release());
|
let mut ret = vec!(Profile::default_dev(), Profile::default_release());
|
||||||
|
|
||||||
match target.test {
|
match target.test {
|
||||||
Some(true) | None => ret.push(Profile::default_test()),
|
Some(true) | None => ret.push(Profile::default_test()),
|
||||||
|
Some(false) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
match dep {
|
||||||
|
Some(Needed) => ret.push(Profile::default_test().test(false)),
|
||||||
_ => {}
|
_ => {}
|
||||||
};
|
}
|
||||||
|
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lib_targets(dst: &mut Vec<Target>, libs: &[TomlLibTarget], metadata: &Metadata) {
|
fn lib_targets(dst: &mut Vec<Target>, libs: &[TomlLibTarget],
|
||||||
|
dep: TestDep, metadata: &Metadata) {
|
||||||
let l = &libs[0];
|
let l = &libs[0];
|
||||||
let path = l.path.clone().unwrap_or_else(|| format!("src/{}.rs", l.name));
|
let path = l.path.clone().unwrap_or_else(|| format!("src/{}.rs", l.name));
|
||||||
let crate_types = l.crate_type.clone().and_then(|kinds| {
|
let crate_types = l.crate_type.clone().and_then(|kinds| {
|
||||||
LibKind::from_strs(kinds).ok()
|
LibKind::from_strs(kinds).ok()
|
||||||
}).unwrap_or_else(|| vec!(Lib));
|
}).unwrap_or_else(|| vec!(Lib));
|
||||||
|
|
||||||
for profile in target_profiles(l).iter() {
|
for profile in target_profiles(l, Some(dep)).iter() {
|
||||||
dst.push(Target::lib_target(l.name.as_slice(), crate_types.clone(),
|
dst.push(Target::lib_target(l.name.as_slice(), crate_types.clone(),
|
||||||
&Path::new(path.as_slice()), profile,
|
&Path::new(path.as_slice()), profile,
|
||||||
metadata));
|
metadata));
|
||||||
@ -416,7 +425,7 @@ fn normalize(lib: Option<&[TomlLibTarget]>,
|
|||||||
for bin in bins.iter() {
|
for bin in bins.iter() {
|
||||||
let path = bin.path.clone().unwrap_or_else(|| default(bin));
|
let path = bin.path.clone().unwrap_or_else(|| default(bin));
|
||||||
|
|
||||||
for profile in target_profiles(bin).iter() {
|
for profile in target_profiles(bin, None).iter() {
|
||||||
dst.push(Target::bin_target(bin.name.as_slice(),
|
dst.push(Target::bin_target(bin.name.as_slice(),
|
||||||
&Path::new(path.as_slice()),
|
&Path::new(path.as_slice()),
|
||||||
profile));
|
profile));
|
||||||
@ -428,12 +437,12 @@ fn normalize(lib: Option<&[TomlLibTarget]>,
|
|||||||
|
|
||||||
match (lib, bin) {
|
match (lib, bin) {
|
||||||
(Some(ref libs), Some(ref bins)) => {
|
(Some(ref libs), Some(ref bins)) => {
|
||||||
lib_targets(&mut ret, libs.as_slice(), metadata);
|
lib_targets(&mut ret, libs.as_slice(), Needed, metadata);
|
||||||
bin_targets(&mut ret, bins.as_slice(),
|
bin_targets(&mut ret, bins.as_slice(),
|
||||||
|bin| format!("src/bin/{}.rs", bin.name));
|
|bin| format!("src/bin/{}.rs", bin.name));
|
||||||
},
|
},
|
||||||
(Some(ref libs), None) => {
|
(Some(ref libs), None) => {
|
||||||
lib_targets(&mut ret, libs.as_slice(), metadata);
|
lib_targets(&mut ret, libs.as_slice(), NotNeeded, metadata);
|
||||||
},
|
},
|
||||||
(None, Some(ref bins)) => {
|
(None, Some(ref bins)) => {
|
||||||
bin_targets(&mut ret, bins.as_slice(),
|
bin_targets(&mut ret, bins.as_slice(),
|
||||||
|
@ -30,7 +30,7 @@ struct FileBuilder {
|
|||||||
|
|
||||||
impl FileBuilder {
|
impl FileBuilder {
|
||||||
pub fn new(path: Path, body: &str) -> FileBuilder {
|
pub fn new(path: Path, body: &str) -> FileBuilder {
|
||||||
FileBuilder { path: path, body: body.to_str() }
|
FileBuilder { path: path, body: body.to_string() }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk(&self) -> Result<(), String> {
|
fn mk(&self) -> Result<(), String> {
|
||||||
@ -86,7 +86,7 @@ pub struct ProjectBuilder {
|
|||||||
impl ProjectBuilder {
|
impl ProjectBuilder {
|
||||||
pub fn new(name: &str, root: Path) -> ProjectBuilder {
|
pub fn new(name: &str, root: Path) -> ProjectBuilder {
|
||||||
ProjectBuilder {
|
ProjectBuilder {
|
||||||
name: name.to_str(),
|
name: name.to_string(),
|
||||||
root: root,
|
root: root,
|
||||||
files: vec!(),
|
files: vec!(),
|
||||||
symlinks: vec!()
|
symlinks: vec!()
|
||||||
@ -108,7 +108,7 @@ impl ProjectBuilder {
|
|||||||
pub fn process<T: ToCStr>(&self, program: T) -> ProcessBuilder {
|
pub fn process<T: ToCStr>(&self, program: T) -> ProcessBuilder {
|
||||||
process(program)
|
process(program)
|
||||||
.cwd(self.root())
|
.cwd(self.root())
|
||||||
.env("HOME", Some(paths::home().display().to_str().as_slice()))
|
.env("HOME", Some(paths::home().display().to_string().as_slice()))
|
||||||
.extra_path(cargo_dir())
|
.extra_path(cargo_dir())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -195,7 +195,7 @@ pub fn main_file<T: Str>(println: T, deps: &[&str]) -> String {
|
|||||||
buf.push_str(println.as_slice());
|
buf.push_str(println.as_slice());
|
||||||
buf.push_str("); }\n");
|
buf.push_str("); }\n");
|
||||||
|
|
||||||
buf.to_str()
|
buf.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
trait ErrMsg<T> {
|
trait ErrMsg<T> {
|
||||||
@ -238,13 +238,13 @@ struct Execs {
|
|||||||
|
|
||||||
impl Execs {
|
impl Execs {
|
||||||
|
|
||||||
pub fn with_stdout<S: ToStr>(mut ~self, expected: S) -> Box<Execs> {
|
pub fn with_stdout<S: ToString>(mut ~self, expected: S) -> Box<Execs> {
|
||||||
self.expect_stdout = Some(expected.to_str());
|
self.expect_stdout = Some(expected.to_string());
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_stderr<S: ToStr>(mut ~self, expected: S) -> Box<Execs> {
|
pub fn with_stderr<S: ToString>(mut ~self, expected: S) -> Box<Execs> {
|
||||||
self.expect_stderr = Some(expected.to_str());
|
self.expect_stderr = Some(expected.to_string());
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -310,7 +310,7 @@ impl Execs {
|
|||||||
|
|
||||||
impl ham::SelfDescribing for Execs {
|
impl ham::SelfDescribing for Execs {
|
||||||
fn describe(&self) -> String {
|
fn describe(&self) -> String {
|
||||||
"execs".to_str()
|
"execs".to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -354,13 +354,13 @@ impl<'a> ham::Matcher<&'a [u8]> for ShellWrites {
|
|||||||
{
|
{
|
||||||
println!("{}", actual);
|
println!("{}", actual);
|
||||||
let actual = std::str::from_utf8_lossy(actual);
|
let actual = std::str::from_utf8_lossy(actual);
|
||||||
let actual = actual.to_str();
|
let actual = actual.to_string();
|
||||||
ham::expect(actual == self.expected, actual)
|
ham::expect(actual == self.expected, actual)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn shell_writes<T: Show>(string: T) -> Box<ShellWrites> {
|
pub fn shell_writes<T: Show>(string: T) -> Box<ShellWrites> {
|
||||||
box ShellWrites { expected: string.to_str() }
|
box ShellWrites { expected: string.to_string() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait ResultTest<T,E> {
|
pub trait ResultTest<T,E> {
|
||||||
@ -397,7 +397,7 @@ impl<T> Tap for T {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn escape_path(p: &Path) -> String {
|
pub fn escape_path(p: &Path) -> String {
|
||||||
p.display().to_str().as_slice().replace("\\", "\\\\")
|
p.display().to_string().as_slice().replace("\\", "\\\\")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn basic_bin_manifest(name: &str) -> String {
|
pub fn basic_bin_manifest(name: &str) -> String {
|
||||||
@ -414,6 +414,7 @@ pub fn basic_bin_manifest(name: &str) -> String {
|
|||||||
"#, name, name)
|
"#, name, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub static RUNNING: &'static str = " Running";
|
||||||
pub static COMPILING: &'static str = " Compiling";
|
pub static COMPILING: &'static str = " Compiling";
|
||||||
pub static FRESH: &'static str = " Fresh";
|
pub static FRESH: &'static str = " Fresh";
|
||||||
pub static UPDATING: &'static str = " Updating";
|
pub static UPDATING: &'static str = " Updating";
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
use std::io::fs;
|
use std::io::fs;
|
||||||
use std::os;
|
use std::os;
|
||||||
use std::path;
|
use std::path;
|
||||||
|
use std::str;
|
||||||
|
|
||||||
use support::{ResultTest, project, execs, main_file, escape_path, basic_bin_manifest};
|
use support::{ResultTest, project, execs, main_file, escape_path, basic_bin_manifest};
|
||||||
use support::COMPILING;
|
use support::{COMPILING, RUNNING};
|
||||||
use hamcrest::{assert_that, existing_file};
|
use hamcrest::{assert_that, existing_file};
|
||||||
use cargo;
|
use cargo;
|
||||||
use cargo::util::{process, realpath};
|
use cargo::util::{process, realpath};
|
||||||
@ -92,7 +93,7 @@ test!(cargo_compile_with_invalid_code {
|
|||||||
{filename}:1 invalid rust code!
|
{filename}:1 invalid rust code!
|
||||||
^~~~~~~
|
^~~~~~~
|
||||||
Could not execute process \
|
Could not execute process \
|
||||||
`rustc {filename} --crate-name foo --crate-type bin -g -o {} -L {} -L {}` (status=101)\n",
|
`rustc {filename} --crate-name foo --crate-type bin -o {} -L {} -L {}` (status=101)\n",
|
||||||
target.join("foo").display(),
|
target.join("foo").display(),
|
||||||
target.display(),
|
target.display(),
|
||||||
target.join("deps").display(),
|
target.join("deps").display(),
|
||||||
@ -152,7 +153,7 @@ test!(cargo_compile_with_warnings_in_a_dep_package {
|
|||||||
"#)
|
"#)
|
||||||
.file("bar/src/bar.rs", r#"
|
.file("bar/src/bar.rs", r#"
|
||||||
pub fn gimme() -> String {
|
pub fn gimme() -> String {
|
||||||
"test passed".to_str()
|
"test passed".to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dead() {}
|
fn dead() {}
|
||||||
@ -229,7 +230,7 @@ test!(cargo_compile_with_nested_deps_inferred {
|
|||||||
"#)
|
"#)
|
||||||
.file("baz/src/lib.rs", r#"
|
.file("baz/src/lib.rs", r#"
|
||||||
pub fn gimme() -> String {
|
pub fn gimme() -> String {
|
||||||
"test passed".to_str()
|
"test passed".to_string()
|
||||||
}
|
}
|
||||||
"#);
|
"#);
|
||||||
|
|
||||||
@ -297,7 +298,7 @@ test!(cargo_compile_with_nested_deps_correct_bin {
|
|||||||
"#)
|
"#)
|
||||||
.file("baz/src/lib.rs", r#"
|
.file("baz/src/lib.rs", r#"
|
||||||
pub fn gimme() -> String {
|
pub fn gimme() -> String {
|
||||||
"test passed".to_str()
|
"test passed".to_string()
|
||||||
}
|
}
|
||||||
"#);
|
"#);
|
||||||
|
|
||||||
@ -373,7 +374,7 @@ test!(cargo_compile_with_nested_deps_shorthand {
|
|||||||
"#)
|
"#)
|
||||||
.file("baz/src/baz.rs", r#"
|
.file("baz/src/baz.rs", r#"
|
||||||
pub fn gimme() -> String {
|
pub fn gimme() -> String {
|
||||||
"test passed".to_str()
|
"test passed".to_string()
|
||||||
}
|
}
|
||||||
"#);
|
"#);
|
||||||
|
|
||||||
@ -449,7 +450,7 @@ test!(cargo_compile_with_nested_deps_longhand {
|
|||||||
"#)
|
"#)
|
||||||
.file("baz/src/baz.rs", r#"
|
.file("baz/src/baz.rs", r#"
|
||||||
pub fn gimme() -> String {
|
pub fn gimme() -> String {
|
||||||
"test passed".to_str()
|
"test passed".to_string()
|
||||||
}
|
}
|
||||||
"#);
|
"#);
|
||||||
|
|
||||||
@ -691,8 +692,8 @@ test!(custom_build_env_vars {
|
|||||||
.file("src/foo.rs", format!(r#"
|
.file("src/foo.rs", format!(r#"
|
||||||
use std::os;
|
use std::os;
|
||||||
fn main() {{
|
fn main() {{
|
||||||
assert_eq!(os::getenv("OUT_DIR").unwrap(), "{}".to_str());
|
assert_eq!(os::getenv("OUT_DIR").unwrap(), "{}".to_string());
|
||||||
assert_eq!(os::getenv("DEPS_DIR").unwrap(), "{}".to_str());
|
assert_eq!(os::getenv("DEPS_DIR").unwrap(), "{}".to_string());
|
||||||
}}
|
}}
|
||||||
"#,
|
"#,
|
||||||
escape_path(&p.root().join("target")),
|
escape_path(&p.root().join("target")),
|
||||||
@ -736,8 +737,8 @@ test!(custom_build_in_dependency {
|
|||||||
.file("src/foo.rs", format!(r#"
|
.file("src/foo.rs", format!(r#"
|
||||||
use std::os;
|
use std::os;
|
||||||
fn main() {{
|
fn main() {{
|
||||||
assert_eq!(os::getenv("OUT_DIR").unwrap(), "{}".to_str());
|
assert_eq!(os::getenv("OUT_DIR").unwrap(), "{}".to_string());
|
||||||
assert_eq!(os::getenv("DEPS_DIR").unwrap(), "{}".to_str());
|
assert_eq!(os::getenv("DEPS_DIR").unwrap(), "{}".to_string());
|
||||||
}}
|
}}
|
||||||
"#,
|
"#,
|
||||||
escape_path(&p.root().join("target/deps")),
|
escape_path(&p.root().join("target/deps")),
|
||||||
@ -807,7 +808,7 @@ test!(many_crate_types_old_style_lib_location {
|
|||||||
match f.filename_str().unwrap() {
|
match f.filename_str().unwrap() {
|
||||||
"deps" => None,
|
"deps" => None,
|
||||||
s if s.contains("fingerprint") || s.contains("dSYM") => None,
|
s if s.contains("fingerprint") || s.contains("dSYM") => None,
|
||||||
s => Some(s.to_str())
|
s => Some(s.to_string())
|
||||||
}
|
}
|
||||||
}).collect();
|
}).collect();
|
||||||
files.sort();
|
files.sort();
|
||||||
@ -845,7 +846,7 @@ test!(many_crate_types_correct {
|
|||||||
match f.filename_str().unwrap() {
|
match f.filename_str().unwrap() {
|
||||||
"deps" => None,
|
"deps" => None,
|
||||||
s if s.contains("fingerprint") || s.contains("dSYM") => None,
|
s if s.contains("fingerprint") || s.contains("dSYM") => None,
|
||||||
s => Some(s.to_str())
|
s => Some(s.to_string())
|
||||||
}
|
}
|
||||||
}).collect();
|
}).collect();
|
||||||
files.sort();
|
files.sort();
|
||||||
@ -954,3 +955,120 @@ test!(missing_lib_and_bin {
|
|||||||
.with_stderr("either a [[lib]] or [[bin]] section \
|
.with_stderr("either a [[lib]] or [[bin]] section \
|
||||||
must be present\n"));
|
must be present\n"));
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test!(verbose_build {
|
||||||
|
let mut p = project("foo");
|
||||||
|
p = p
|
||||||
|
.file("Cargo.toml", r#"
|
||||||
|
[package]
|
||||||
|
|
||||||
|
name = "test"
|
||||||
|
version = "0.0.0"
|
||||||
|
authors = []
|
||||||
|
"#)
|
||||||
|
.file("src/lib.rs", "");
|
||||||
|
let output = p.cargo_process("cargo-build").arg("-v")
|
||||||
|
.exec_with_output().assert();
|
||||||
|
let out = str::from_utf8(output.output.as_slice()).assert();
|
||||||
|
let hash = out.slice_from(out.find_str("extra-filename=").unwrap() + 15);
|
||||||
|
let hash = hash.slice_to(17);
|
||||||
|
assert_eq!(out, format!("\
|
||||||
|
{} `rustc {dir}{sep}src{sep}lib.rs --crate-name test --crate-type lib \
|
||||||
|
-C metadata=test:-:0.0.0:-:file:{dir} \
|
||||||
|
-C extra-filename={hash} \
|
||||||
|
--out-dir {dir}{sep}target \
|
||||||
|
-L {dir}{sep}target \
|
||||||
|
-L {dir}{sep}target{sep}deps`
|
||||||
|
{} test v0.0.0 (file:{dir})\n",
|
||||||
|
RUNNING, COMPILING,
|
||||||
|
dir = p.root().display(),
|
||||||
|
sep = path::SEP,
|
||||||
|
hash = hash).as_slice());
|
||||||
|
})
|
||||||
|
|
||||||
|
test!(verbose_release_build {
|
||||||
|
let mut p = project("foo");
|
||||||
|
p = p
|
||||||
|
.file("Cargo.toml", r#"
|
||||||
|
[package]
|
||||||
|
|
||||||
|
name = "test"
|
||||||
|
version = "0.0.0"
|
||||||
|
authors = []
|
||||||
|
"#)
|
||||||
|
.file("src/lib.rs", "");
|
||||||
|
let output = p.cargo_process("cargo-build").arg("-v").arg("--release")
|
||||||
|
.exec_with_output().assert();
|
||||||
|
let out = str::from_utf8(output.output.as_slice()).assert();
|
||||||
|
let hash = out.slice_from(out.find_str("extra-filename=").unwrap() + 15);
|
||||||
|
let hash = hash.slice_to(17);
|
||||||
|
assert_eq!(out, format!("\
|
||||||
|
{} `rustc {dir}{sep}src{sep}lib.rs --crate-name test --crate-type lib \
|
||||||
|
--opt-level 3 \
|
||||||
|
-C metadata=test:-:0.0.0:-:file:{dir} \
|
||||||
|
-C extra-filename={hash} \
|
||||||
|
--out-dir {dir}{sep}target{sep}release \
|
||||||
|
-L {dir}{sep}target{sep}release \
|
||||||
|
-L {dir}{sep}target{sep}release{sep}deps`
|
||||||
|
{} test v0.0.0 (file:{dir})\n",
|
||||||
|
RUNNING, COMPILING,
|
||||||
|
dir = p.root().display(),
|
||||||
|
sep = path::SEP,
|
||||||
|
hash = hash).as_slice());
|
||||||
|
})
|
||||||
|
|
||||||
|
test!(verbose_release_build_deps {
|
||||||
|
let mut p = project("foo");
|
||||||
|
p = p
|
||||||
|
.file("Cargo.toml", r#"
|
||||||
|
[package]
|
||||||
|
|
||||||
|
name = "test"
|
||||||
|
version = "0.0.0"
|
||||||
|
authors = []
|
||||||
|
|
||||||
|
[dependencies.foo]
|
||||||
|
path = "foo"
|
||||||
|
"#)
|
||||||
|
.file("src/lib.rs", "")
|
||||||
|
.file("foo/Cargo.toml", r#"
|
||||||
|
[package]
|
||||||
|
|
||||||
|
name = "foo"
|
||||||
|
version = "0.0.0"
|
||||||
|
authors = []
|
||||||
|
"#)
|
||||||
|
.file("foo/src/lib.rs", "");
|
||||||
|
let output = p.cargo_process("cargo-build").arg("-v").arg("--release")
|
||||||
|
.exec_with_output().assert();
|
||||||
|
let out = str::from_utf8(output.output.as_slice()).assert();
|
||||||
|
let pos1 = out.find_str("extra-filename=").unwrap();
|
||||||
|
let hash1 = out.slice_from(pos1 + 15).slice_to(17);
|
||||||
|
let pos2 = out.slice_from(pos1 + 10).find_str("extra-filename=").unwrap();
|
||||||
|
let hash2 = out.slice_from(pos1 + 10 + pos2 + 15).slice_to(17);
|
||||||
|
assert_eq!(out, format!("\
|
||||||
|
{running} `rustc {dir}{sep}foo{sep}src{sep}lib.rs --crate-name foo \
|
||||||
|
--crate-type lib \
|
||||||
|
--opt-level 3 \
|
||||||
|
-C metadata=foo:-:0.0.0:-:file:{dir} \
|
||||||
|
-C extra-filename={hash1} \
|
||||||
|
--out-dir {dir}{sep}target{sep}release{sep}deps \
|
||||||
|
-L {dir}{sep}target{sep}release{sep}deps \
|
||||||
|
-L {dir}{sep}target{sep}release{sep}deps`
|
||||||
|
{running} `rustc {dir}{sep}src{sep}lib.rs --crate-name test --crate-type lib \
|
||||||
|
--opt-level 3 \
|
||||||
|
-C metadata=test:-:0.0.0:-:file:{dir} \
|
||||||
|
-C extra-filename={hash2} \
|
||||||
|
--out-dir {dir}{sep}target{sep}release \
|
||||||
|
-L {dir}{sep}target{sep}release \
|
||||||
|
-L {dir}{sep}target{sep}release{sep}deps \
|
||||||
|
--extern foo={dir}{sep}target{sep}release{sep}deps/libfoo{hash1}.rlib`
|
||||||
|
{compiling} foo v0.0.0 (file:{dir})
|
||||||
|
{compiling} test v0.0.0 (file:{dir})\n",
|
||||||
|
running = RUNNING,
|
||||||
|
compiling = COMPILING,
|
||||||
|
dir = p.root().display(),
|
||||||
|
sep = path::SEP,
|
||||||
|
hash1 = hash1,
|
||||||
|
hash2 = hash2).as_slice());
|
||||||
|
})
|
||||||
|
@ -66,7 +66,7 @@ test!(cargo_compile_with_nested_deps_shorthand {
|
|||||||
"#)
|
"#)
|
||||||
.file("bar/baz/src/baz.rs", r#"
|
.file("bar/baz/src/baz.rs", r#"
|
||||||
pub fn gimme() -> String {
|
pub fn gimme() -> String {
|
||||||
"test passed".to_str()
|
"test passed".to_string()
|
||||||
}
|
}
|
||||||
"#);
|
"#);
|
||||||
|
|
||||||
@ -97,32 +97,28 @@ test!(cargo_compile_with_root_dev_deps {
|
|||||||
[dev-dependencies.bar]
|
[dev-dependencies.bar]
|
||||||
|
|
||||||
version = "0.5.0"
|
version = "0.5.0"
|
||||||
path = "bar"
|
path = "../bar"
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
|
|
||||||
name = "foo"
|
name = "foo"
|
||||||
"#)
|
"#)
|
||||||
.file("src/foo.rs",
|
.file("src/main.rs",
|
||||||
main_file(r#""{}", bar::gimme()"#, ["bar"]).as_slice())
|
main_file(r#""{}", bar::gimme()"#, ["bar"]).as_slice());
|
||||||
.file("bar/Cargo.toml", r#"
|
let p2 = project("bar")
|
||||||
[project]
|
.file("Cargo.toml", r#"
|
||||||
|
[package]
|
||||||
|
|
||||||
name = "bar"
|
name = "bar"
|
||||||
version = "0.5.0"
|
version = "0.5.0"
|
||||||
authors = ["wycats@example.com"]
|
authors = ["wycats@example.com"]
|
||||||
|
|
||||||
[[lib]]
|
|
||||||
|
|
||||||
name = "bar"
|
|
||||||
"#)
|
"#)
|
||||||
.file("bar/src/bar.rs", r#"
|
.file("src/lib.rs", r#"
|
||||||
pub fn gimme() -> &'static str {
|
pub fn gimme() -> &'static str {
|
||||||
"zoidberg"
|
"zoidberg"
|
||||||
}
|
}
|
||||||
"#)
|
"#);
|
||||||
;
|
|
||||||
|
|
||||||
|
p2.build();
|
||||||
assert_that(p.cargo_process("cargo-build"),
|
assert_that(p.cargo_process("cargo-build"),
|
||||||
execs().with_stdout(format!("{} bar v0.5.0 (file:{})\n\
|
execs().with_stdout(format!("{} bar v0.5.0 (file:{})\n\
|
||||||
{} foo v0.5.0 (file:{})\n",
|
{} foo v0.5.0 (file:{})\n",
|
||||||
|
@ -1,4 +1,7 @@
|
|||||||
|
use std::str;
|
||||||
|
|
||||||
use support::{project, execs, basic_bin_manifest, COMPILING, cargo_dir};
|
use support::{project, execs, basic_bin_manifest, COMPILING, cargo_dir};
|
||||||
|
use support::{ResultTest};
|
||||||
use hamcrest::{assert_that, existing_file};
|
use hamcrest::{assert_that, existing_file};
|
||||||
use cargo::util::process;
|
use cargo::util::process;
|
||||||
|
|
||||||
@ -38,3 +41,94 @@ test!(cargo_test_simple {
|
|||||||
|
|
||||||
assert_that(&p.bin("test/foo"), existing_file());
|
assert_that(&p.bin("test/foo"), existing_file());
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test!(test_with_lib_dep {
|
||||||
|
let p = project("foo")
|
||||||
|
.file("Cargo.toml", r#"
|
||||||
|
[project]
|
||||||
|
name = "foo"
|
||||||
|
version = "0.0.1"
|
||||||
|
authors = []
|
||||||
|
"#)
|
||||||
|
.file("src/lib.rs", "
|
||||||
|
pub fn foo(){}
|
||||||
|
#[test] fn lib_test() {}
|
||||||
|
")
|
||||||
|
.file("src/main.rs", "
|
||||||
|
extern crate foo;
|
||||||
|
|
||||||
|
fn main() {}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bin_test() {}
|
||||||
|
");
|
||||||
|
|
||||||
|
let output = p.cargo_process("cargo-test")
|
||||||
|
.exec_with_output().assert();
|
||||||
|
let out = str::from_utf8(output.output.as_slice()).assert();
|
||||||
|
|
||||||
|
let bin = "\
|
||||||
|
running 1 test
|
||||||
|
test bin_test ... ok
|
||||||
|
|
||||||
|
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured";
|
||||||
|
let lib = "\
|
||||||
|
running 1 test
|
||||||
|
test lib_test ... ok
|
||||||
|
|
||||||
|
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured";
|
||||||
|
|
||||||
|
let head = format!("{compiling} foo v0.0.1 (file:{dir})",
|
||||||
|
compiling = COMPILING, dir = p.root().display());
|
||||||
|
|
||||||
|
assert!(out == format!("{}\n\n{}\n\n\n{}\n\n", head, bin, lib).as_slice() ||
|
||||||
|
out == format!("{}\n\n{}\n\n\n{}\n\n", head, lib, bin).as_slice());
|
||||||
|
})
|
||||||
|
|
||||||
|
test!(test_with_deep_lib_dep {
|
||||||
|
let p = project("bar")
|
||||||
|
.file("Cargo.toml", r#"
|
||||||
|
[package]
|
||||||
|
name = "bar"
|
||||||
|
version = "0.0.1"
|
||||||
|
authors = []
|
||||||
|
|
||||||
|
[dependencies.foo]
|
||||||
|
path = "../foo"
|
||||||
|
"#)
|
||||||
|
.file("src/lib.rs", "
|
||||||
|
extern crate foo;
|
||||||
|
#[test]
|
||||||
|
fn bar_test() {
|
||||||
|
foo::foo();
|
||||||
|
}
|
||||||
|
");
|
||||||
|
let p2 = project("foo")
|
||||||
|
.file("Cargo.toml", r#"
|
||||||
|
[package]
|
||||||
|
name = "foo"
|
||||||
|
version = "0.0.1"
|
||||||
|
authors = []
|
||||||
|
"#)
|
||||||
|
.file("src/lib.rs", "
|
||||||
|
pub fn foo() {}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn foo_test() {}
|
||||||
|
");
|
||||||
|
|
||||||
|
p2.build();
|
||||||
|
assert_that(p.cargo_process("cargo-test"),
|
||||||
|
execs().with_status(0)
|
||||||
|
.with_stdout(format!("\
|
||||||
|
{compiling} foo v0.0.1 (file:{dir})
|
||||||
|
{compiling} bar v0.0.1 (file:{dir})
|
||||||
|
|
||||||
|
running 1 test
|
||||||
|
test bar_test ... ok
|
||||||
|
|
||||||
|
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured\n\n\
|
||||||
|
",
|
||||||
|
compiling = COMPILING,
|
||||||
|
dir = p.root().display()).as_slice()));
|
||||||
|
})
|
||||||
|
@ -56,5 +56,5 @@ fn colored_output<S: Str>(string: S, color: color::Color) -> IoResult<String> {
|
|||||||
try!(term.write_str(string.as_slice()));
|
try!(term.write_str(string.as_slice()));
|
||||||
try!(term.reset());
|
try!(term.reset());
|
||||||
try!(term.flush());
|
try!(term.flush());
|
||||||
Ok(from_utf8_lossy(term.get_ref().get_ref()).to_str())
|
Ok(from_utf8_lossy(term.get_ref().get_ref()).to_string())
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user