-use std::slice;
-use std::fmt;
+use std::cmp;
use std::fmt::{Show,Formatter};
-use std::path::Path;
+use std::fmt;
+use std::slice;
use semver::Version;
+
use core::{
Dependency,
Manifest,
Summary
};
use core::dependency::SerializedDependency;
-use util::{CargoResult, graph};
+use util::{CargoResult, graph, Config};
use serialize::{Encoder,Encodable};
-use core::source::SourceId;
+use core::source::{SourceId, SourceSet, Source};
// TODO: Is manifest_path a relic?
#[deriving(Clone,PartialEq)]
manifest: Manifest,
// The root of the package
manifest_path: Path,
+ // Where this package came from
+ source_id: SourceId,
}
#[deriving(Encodable)]
dependencies: Vec<SerializedDependency>,
authors: Vec<String>,
targets: Vec<Target>,
- manifest_path: String
+ manifest_path: String,
}
impl<E, S: Encoder<E>> Encodable<S, E> for Package {
}
impl Package {
- pub fn new(manifest: Manifest, manifest_path: &Path) -> Package {
+ pub fn new(manifest: Manifest,
+ manifest_path: &Path,
+ source_id: &SourceId) -> Package {
Package {
manifest: manifest,
- manifest_path: manifest_path.clone()
+ manifest_path: manifest_path.clone(),
+ source_id: source_id.clone(),
}
}
}
pub fn get_source_ids(&self) -> Vec<SourceId> {
- let mut ret = vec!(SourceId::for_path(&self.get_root()));
+ let mut ret = vec!(self.source_id.clone());
ret.push_all(self.manifest.get_source_ids());
ret
}
+
+ pub fn get_fingerprint(&self, config: &Config) -> CargoResult<String> {
+ let mut sources = self.get_source_ids();
+ // Sort the sources just to make sure we have a consistent fingerprint.
+ sources.sort_by(|a, b| {
+ cmp::lexical_ordering(a.kind.cmp(&b.kind),
+ a.url.to_str().cmp(&b.url.to_str()))
+ });
+ let sources = sources.iter().map(|source_id| {
+ source_id.load(config)
+ }).collect::<Vec<_>>();
+ SourceSet::new(sources).fingerprint()
+ }
}
impl Show for Package {
/// and that the packages are already locally available on the file
/// system.
fn get(&self, packages: &[PackageId]) -> CargoResult<Vec<Package>>;
+
+ /// Generates a unique string which represents the fingerprint of the
+ /// current state of the source.
+ ///
+ /// This fingerprint is used to determine the "fresheness" of the source
+ /// later on. It must be guaranteed that the fingerprint of a source is
+ /// constant if and only if the output product will remain constant.
+ fn fingerprint(&self) -> CargoResult<String>;
}
-#[deriving(Show,Clone,PartialEq)]
+#[deriving(Show, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum SourceKind {
/// GitKind(<git reference>) represents a git repository
GitKind(String),
Ok(ret)
}
+
+ fn fingerprint(&self) -> CargoResult<String> {
+ let mut ret = String::new();
+ for source in self.sources.iter() {
+ ret.push_str(try!(source.fingerprint()).as_slice());
+ }
+ return Ok(ret);
+ }
}
let (manifest, nested) = cargo_try!(read_manifest(data.as_slice(),
source_id));
- Ok((Package::new(manifest, path), nested))
+ Ok((Package::new(manifest, path, source_id), nested))
}
pub fn read_packages(path: &Path, source_id: &SourceId)
use std::os::args;
use std::io;
-use std::path::Path;
-use core::{Package,PackageSet,Target};
+use std::io::File;
+use std::str;
+
+use core::{Package, PackageSet, Target};
use util;
use util::{CargoResult, ChainError, ProcessBuilder, internal, human, CargoError};
+use util::{Config};
type Args = Vec<String>;
+struct Context<'a> {
+ dest: &'a Path,
+ deps_dir: &'a Path,
+ primary: bool,
+ rustc_version: &'a str,
+ compiled_anything: bool,
+ config: &'a Config,
+}
+
pub fn compile_packages(pkg: &Package, deps: &PackageSet) -> CargoResult<()> {
debug!("compile_packages; pkg={}; deps={}", pkg, deps);
+ let config = try!(Config::new());
let target_dir = pkg.get_absolute_target_dir();
let deps_target_dir = target_dir.join("deps");
+ let output = cargo_try!(util::process("rustc").arg("-v").exec_with_output());
+ let rustc_version = str::from_utf8(output.output.as_slice()).unwrap();
+
// First ensure that the destination directory exists
debug!("creating target dir; path={}", target_dir.display());
try!(mk_target(&target_dir));
try!(mk_target(&deps_target_dir));
+ let mut cx = Context {
+ dest: &deps_target_dir,
+ deps_dir: &deps_target_dir,
+ primary: false,
+ rustc_version: rustc_version.as_slice(),
+ compiled_anything: false,
+ config: &config,
+ };
+
// Traverse the dependencies in topological order
for dep in try!(topsort(deps)).iter() {
- println!("Compiling {}", dep);
- try!(compile_pkg(dep, &deps_target_dir, &deps_target_dir, false));
+ try!(compile_pkg(dep, &mut cx));
}
- println!("Compiling {}", pkg);
- try!(compile_pkg(pkg, &target_dir, &deps_target_dir, true));
+ cx.primary = true;
+ cx.dest = &target_dir;
+ try!(compile_pkg(pkg, &mut cx));
Ok(())
}
-fn compile_pkg(pkg: &Package, dest: &Path, deps_dir: &Path,
- primary: bool) -> CargoResult<()> {
+fn compile_pkg(pkg: &Package, cx: &mut Context) -> CargoResult<()> {
debug!("compile_pkg; pkg={}; targets={}", pkg, pkg.get_targets());
+ // First check to see if this package is fresh.
+ //
+ // Note that we're compiling things in topological order, so if nothing has
+ // been built up to this point and we're fresh, then we can safely skip
+ // recompilation. If anything has previously been rebuilt, it may have been
+ // a dependency of ours, so just go ahead and rebuild ourselves.
+ //
+ // This is not quite accurate, we should only trigger forceful
+ // recompilations for downstream dependencies of ourselves, not everyone
+ // compiled afterwards.
+ let fingerprint_loc = cx.dest.join(format!(".{}.fingerprint",
+ pkg.get_name()));
+ let (is_fresh, fingerprint) = try!(is_fresh(pkg, &fingerprint_loc, cx));
+ if !cx.compiled_anything && is_fresh {
+ println!("Skipping fresh {}", pkg);
+ return Ok(())
+ }
+
+ // Alright, so this package is not fresh and we need to compile it. Start
+ // off by printing a nice helpful message and then run the custom build
+ // command if one is present.
+ println!("Compiling {}", pkg);
+
match pkg.get_manifest().get_build() {
- Some(cmd) => try!(compile_custom(pkg, cmd, dest, deps_dir, primary)),
+ Some(cmd) => try!(compile_custom(pkg, cmd, cx)),
None => {}
}
- // compile
+ // After the custom command has run, execute rustc for all targets of our
+ // package.
for target in pkg.get_targets().iter() {
// Only compile lib targets for dependencies
- if primary || target.is_lib() {
- try!(rustc(&pkg.get_root(), target, dest, deps_dir, primary))
+ if cx.primary || target.is_lib() {
+ try!(rustc(&pkg.get_root(), target, cx))
}
}
+ // Now that everything has successfully compiled, write our new fingerprint
+ // to the relevant location to prevent recompilations in the future.
+ cargo_try!(File::create(&fingerprint_loc).write_str(fingerprint.as_slice()));
+ cx.compiled_anything = true;
+
Ok(())
}
+fn is_fresh(dep: &Package, loc: &Path,
+ cx: &Context) -> CargoResult<(bool, String)> {
+ let new_fingerprint = format!("{}{}", cx.rustc_version,
+ try!(dep.get_fingerprint(cx.config)));
+ let mut file = match File::open(loc) {
+ Ok(file) => file,
+ Err(..) => return Ok((false, new_fingerprint)),
+ };
+ let old_fingerprint = cargo_try!(file.read_to_str());
+
+ log!(5, "old fingerprint: {}", old_fingerprint);
+ log!(5, "new fingerprint: {}", new_fingerprint);
+
+ Ok((old_fingerprint == new_fingerprint, new_fingerprint))
+}
+
fn mk_target(target: &Path) -> CargoResult<()> {
io::fs::mkdir_recursive(target, io::UserRWX).chain_error(|| {
internal("could not create target directory")
})
}
-fn compile_custom(pkg: &Package, cmd: &str, dest: &Path, deps_dir: &Path,
- _primary: bool) -> CargoResult<()> {
+fn compile_custom(pkg: &Package, cmd: &str, cx: &Context) -> CargoResult<()> {
// FIXME: this needs to be smarter about splitting
let mut cmd = cmd.split(' ');
let mut p = util::process(cmd.next().unwrap())
.cwd(pkg.get_root())
- .env("OUT_DIR", Some(dest.as_str().unwrap()))
- .env("DEPS_DIR", Some(dest.join(deps_dir).as_str().unwrap()));
+ .env("OUT_DIR", Some(cx.dest.as_str().unwrap()))
+ .env("DEPS_DIR", Some(cx.dest.join(cx.deps_dir)
+ .as_str().unwrap()));
for arg in cmd {
p = p.arg(arg);
}
p.exec_with_output().map(|_| ()).map_err(|e| e.mark_human())
}
-fn rustc(root: &Path, target: &Target, dest: &Path, deps: &Path,
- verbose: bool) -> CargoResult<()> {
+fn rustc(root: &Path, target: &Target, cx: &Context) -> CargoResult<()> {
let crate_types = target.rustc_crate_types();
log!(5, "root={}; target={}; crate_types={}; dest={}; deps={}; verbose={}",
- root.display(), target, crate_types, dest.display(), deps.display(),
- verbose);
+ root.display(), target, crate_types, cx.dest.display(),
+ cx.deps_dir.display(), cx.primary);
- let rustc = prepare_rustc(root, target, crate_types, dest, deps);
+ let rustc = prepare_rustc(root, target, crate_types, cx);
- try!(if verbose {
+ try!(if cx.primary {
rustc.exec().map_err(|err| human(err.to_str()))
} else {
rustc.exec_with_output().and(Ok(())).map_err(|err| human(err.to_str()))
}
fn prepare_rustc(root: &Path, target: &Target, crate_types: Vec<&str>,
- dest: &Path, deps: &Path) -> ProcessBuilder {
+ cx: &Context) -> ProcessBuilder {
let mut args = Vec::new();
- build_base_args(&mut args, target, crate_types, dest);
- build_deps_args(&mut args, dest, deps);
+ build_base_args(&mut args, target, crate_types, cx);
+ build_deps_args(&mut args, cx);
util::process("rustc")
.cwd(root.clone())
}
fn build_base_args(into: &mut Args, target: &Target, crate_types: Vec<&str>,
- dest: &Path) {
+ cx: &Context) {
// TODO: Handle errors in converting paths into args
into.push(target.get_path().display().to_str());
for crate_type in crate_types.iter() {
into.push(crate_type.to_str());
}
into.push("--out-dir".to_str());
- into.push(dest.display().to_str());
+ into.push(cx.dest.display().to_str());
}
-fn build_deps_args(dst: &mut Args, deps: &Path, dest: &Path) {
+fn build_deps_args(dst: &mut Args, cx: &Context) {
dst.push("-L".to_str());
- dst.push(deps.display().to_str());
+ dst.push(cx.dest.display().to_str());
dst.push("-L".to_str());
- dst.push(dest.display().to_str());
+ dst.push(cx.deps_dir.display().to_str());
}
fn topsort(deps: &PackageSet) -> CargoResult<PackageSet> {
.map(|pkg| pkg.clone())
.collect())
}
+
+ fn fingerprint(&self) -> CargoResult<String> {
+ let db = self.remote.db_at(&self.db_path);
+ db.rev_for(self.reference.as_slice())
+ }
}
#[cfg(test)]
Ok(GitDatabase { remote: self.clone(), path: into.clone() })
}
+ pub fn db_at(&self, db_path: &Path) -> GitDatabase {
+ GitDatabase { remote: self.clone(), path: db_path.clone() }
+ }
+
fn fetch_into(&self, path: &Path) -> CargoResult<()> {
Ok(git!(*path, "fetch --force --quiet --tags {} \
refs/heads/*:refs/heads/*", self.fetch_location()))
}
pub fn copy_to<S: Str>(&self, reference: S,
- dest: &Path) -> CargoResult<GitCheckout> {
+ dest: &Path) -> CargoResult<GitCheckout> {
let checkout = cargo_try!(GitCheckout::clone_into(dest, self.clone(),
GitReference::for_str(reference.as_slice())));
}
fn fetch(&self) -> CargoResult<()> {
+ // In git 1.8, apparently --tags explicitly *only* fetches tags, it does
+ // not fetch anything else. In git 1.9, however, git apparently fetches
+ // everything when --tags is passed.
+ //
+ // This means that if we want to fetch everything we need to execute
+ // both with and without --tags on 1.8 (apparently), and only with
+ // --tags on 1.9. For simplicity, we execute with and without --tags for
+ // all gits.
+ //
+ // FIXME: This is suspicious. I have been informated that, for example,
+ // bundler does not do this, yet bundler appears to work!
+ git!(self.location, "fetch --force --quiet {}",
+ self.get_source().display());
git!(self.location, "fetch --force --quiet --tags {}",
self.get_source().display());
cargo_try!(self.reset(self.revision.as_slice()));
+use std::cmp;
+use std::fmt::{Show, Formatter};
use std::fmt;
-use std::fmt::{Show,Formatter};
-use core::{Package,PackageId,Summary,SourceId,Source};
+use std::io::fs;
+
+use core::{Package, PackageId, Summary, SourceId, Source};
use ops;
use util::{CargoResult, internal};
.map(|pkg| pkg.clone())
.collect())
}
+
+ fn fingerprint(&self) -> CargoResult<String> {
+ let mut max = None;
+ let target_dir = self.path().join("target");
+ for child in cargo_try!(fs::walk_dir(&self.path())) {
+ if target_dir.is_ancestor_of(&child) { continue }
+ let stat = cargo_try!(fs::stat(&child));
+ max = cmp::max(max, Some(stat.modified));
+ }
+ match max {
+ None => Ok(String::new()),
+ Some(time) => Ok(time.to_str()),
+ }
+ }
}
process(program)
.cwd(self.root())
.env("HOME", Some(paths::home().display().to_str().as_slice()))
+ .extra_path(cargo_dir())
}
pub fn cargo_process(&self, program: &str) -> ProcessBuilder {
self.build();
self.process(program)
- .extra_path(cargo_dir())
}
pub fn file<B: BytesContainer, S: Str>(mut self, path: B,
let mut files: Vec<String> = files.iter().filter_map(|f| {
match f.filename_str().unwrap() {
"deps" => None,
+ s if !s.starts_with("lib") => None,
s => Some(s.to_str())
}
}).collect();
files.sort();
let file0 = files.get(0).as_slice();
let file1 = files.get(1).as_slice();
+ println!("{} {}", file0, file1);
assert!(file0.ends_with(".rlib") || file1.ends_with(".rlib"));
assert!(file0.ends_with(os::consts::DLL_SUFFIX) ||
file1.ends_with(os::consts::DLL_SUFFIX));
cargo::util::process("parent").extra_path(p.root().join("target")),
execs().with_stdout("hello world\n"));
})
+
+test!(recompilation {
+ let git_project = git_repo("bar", |project| {
+ project
+ .file("Cargo.toml", r#"
+ [project]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["carlhuda@example.com"]
+
+ [[lib]] name = "bar"
+ "#)
+ .file("src/bar.rs", r#"
+ pub fn bar() {}
+ "#)
+ }).assert();
+
+ let p = project("foo")
+ .file("Cargo.toml", format!(r#"
+ [project]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [dependencies.bar]
+
+ version = "0.5.0"
+ git = "file://{}"
+
+ [[bin]]
+
+ name = "foo"
+ "#, git_project.root().display()))
+ .file("src/foo.rs",
+ main_file(r#""{}", bar::bar()"#, ["bar"]).as_slice());
+
+ // First time around we should compile both foo and bar
+ assert_that(p.cargo_process("cargo-compile"),
+ execs().with_stdout(format!("Updating git repository `file:{}`\n\
+ Compiling bar v0.5.0 (file:{})\n\
+ Compiling foo v0.5.0 (file:{})\n",
+ git_project.root().display(),
+ git_project.root().display(),
+ p.root().display())));
+ // Don't recompile the second time
+ assert_that(p.process("cargo-compile"),
+ execs().with_stdout(format!("Updating git repository `file:{}`\n\
+ Skipping fresh bar v0.5.0 (file:{})\n\
+ Skipping fresh foo v0.5.0 (file:{})\n",
+ git_project.root().display(),
+ git_project.root().display(),
+ p.root().display())));
+ // Modify a file manually, shouldn't trigger a recompile
+ File::create(&git_project.root().join("src/bar.rs")).write_str(r#"
+ pub fn bar() { println!("hello!"); }
+ "#).assert();
+ assert_that(p.process("cargo-compile"),
+ execs().with_stdout(format!("Updating git repository `file:{}`\n\
+ Skipping fresh bar v0.5.0 (file:{})\n\
+ Skipping fresh foo v0.5.0 (file:{})\n",
+ git_project.root().display(),
+ git_project.root().display(),
+ p.root().display())));
+ // Commit the changes and make sure we trigger a recompile
+ File::create(&git_project.root().join("src/bar.rs")).write_str(r#"
+ pub fn bar() { println!("hello!"); }
+ "#).assert();
+ git_project.process("git").args(["add", "."]).exec_with_output().assert();
+ git_project.process("git").args(["commit", "-m", "test"]).exec_with_output()
+ .assert();
+ assert_that(p.process("cargo-compile"),
+ execs().with_stdout(format!("Updating git repository `file:{}`\n\
+ Compiling bar v0.5.0 (file:{})\n\
+ Compiling foo v0.5.0 (file:{})\n",
+ git_project.root().display(),
+ git_project.root().display(),
+ p.root().display())));
+})
+use std::io::File;
+use std::io::timer;
+
use support::{ResultTest,project,execs,main_file};
use hamcrest::{assert_that,existing_file};
use cargo;
cargo::util::process("foo").extra_path(p.root().join("target")),
execs().with_stdout("test passed\n"));
})
+
+test!(no_rebuild_dependency {
+ let mut p = project("foo");
+ let bar = p.root().join("bar");
+ p = p
+ .file(".cargo/config", format!(r#"
+ paths = ["{}"]
+ "#, bar.display()).as_slice())
+ .file("Cargo.toml", r#"
+ [project]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[bin]] name = "foo"
+ [dependencies.bar] version = "0.5.0"
+ "#)
+ .file("src/foo.rs", r#"
+ extern crate bar;
+ fn main() { bar::bar() }
+ "#)
+ .file("bar/Cargo.toml", r#"
+ [project]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[lib]] name = "bar"
+ "#)
+ .file("bar/src/bar.rs", r#"
+ pub fn bar() {}
+ "#);
+ // First time around we should compile both foo and bar
+ assert_that(p.cargo_process("cargo-compile"),
+ execs().with_stdout(format!("Compiling bar v0.5.0 (file:{})\n\
+ Compiling foo v0.5.0 (file:{})\n",
+ bar.display(),
+ p.root().display())));
+ // This time we shouldn't compile bar
+ assert_that(p.process("cargo-compile"),
+ execs().with_stdout(format!("Skipping fresh bar v0.5.0 (file:{})\n\
+ Skipping fresh foo v0.5.0 (file:{})\n",
+ bar.display(),
+ p.root().display())));
+
+ p.build(); // rebuild the files (rewriting them in the process)
+ assert_that(p.process("cargo-compile"),
+ execs().with_stdout(format!("Compiling bar v0.5.0 (file:{})\n\
+ Compiling foo v0.5.0 (file:{})\n",
+ bar.display(),
+ p.root().display())));
+})
+
+test!(deep_dependencies_trigger_rebuild {
+ let mut p = project("foo");
+ let bar = p.root().join("bar");
+ let baz = p.root().join("baz");
+ p = p
+ .file(".cargo/config", format!(r#"
+ paths = ["{}", "{}"]
+ "#, bar.display(), baz.display()).as_slice())
+ .file("Cargo.toml", r#"
+ [project]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[bin]] name = "foo"
+ [dependencies.bar] version = "0.5.0"
+ "#)
+ .file("src/foo.rs", r#"
+ extern crate bar;
+ fn main() { bar::bar() }
+ "#)
+ .file("bar/Cargo.toml", r#"
+ [project]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[lib]] name = "bar"
+ [dependencies.baz] version = "0.5.0"
+ "#)
+ .file("bar/src/bar.rs", r#"
+ extern crate baz;
+ pub fn bar() { baz::baz() }
+ "#)
+ .file("baz/Cargo.toml", r#"
+ [project]
+
+ name = "baz"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[lib]] name = "baz"
+ "#)
+ .file("baz/src/baz.rs", r#"
+ pub fn baz() {}
+ "#);
+ assert_that(p.cargo_process("cargo-compile"),
+ execs().with_stdout(format!("Compiling baz v0.5.0 (file:{})\n\
+ Compiling bar v0.5.0 (file:{})\n\
+ Compiling foo v0.5.0 (file:{})\n",
+ baz.display(),
+ bar.display(),
+ p.root().display())));
+ assert_that(p.process("cargo-compile"),
+ execs().with_stdout(format!("Skipping fresh baz v0.5.0 (file:{})\n\
+ Skipping fresh bar v0.5.0 (file:{})\n\
+ Skipping fresh foo v0.5.0 (file:{})\n",
+ baz.display(),
+ bar.display(),
+ p.root().display())));
+
+ // Make sure an update to baz triggers a rebuild of bar
+ //
+ // We base recompilation off mtime, so sleep for at least a second to ensure
+ // that this write will change the mtime.
+ timer::sleep(1000);
+ File::create(&p.root().join("baz/src/baz.rs")).write_str(r#"
+ pub fn baz() { println!("hello!"); }
+ "#).assert();
+ assert_that(p.process("cargo-compile"),
+ execs().with_stdout(format!("Compiling baz v0.5.0 (file:{})\n\
+ Compiling bar v0.5.0 (file:{})\n\
+ Compiling foo v0.5.0 (file:{})\n",
+ baz.display(),
+ bar.display(),
+ p.root().display())));
+
+ // Make sure an update to bar doesn't trigger baz
+ File::create(&p.root().join("bar/src/bar.rs")).write_str(r#"
+ extern crate baz;
+ pub fn bar() { println!("hello!"); baz::baz(); }
+ "#).assert();
+ assert_that(p.process("cargo-compile"),
+ execs().with_stdout(format!("Skipping fresh baz v0.5.0 (file:{})\n\
+ Compiling bar v0.5.0 (file:{})\n\
+ Compiling foo v0.5.0 (file:{})\n",
+ baz.display(),
+ bar.display(),
+ p.root().display())));
+})
+
+test!(no_rebuild_two_deps {
+ let mut p = project("foo");
+ let bar = p.root().join("bar");
+ let baz = p.root().join("baz");
+ p = p
+ .file(".cargo/config", format!(r#"
+ paths = ["{}", "{}"]
+ "#, bar.display(), baz.display()).as_slice())
+ .file("Cargo.toml", r#"
+ [project]
+
+ name = "foo"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[bin]] name = "foo"
+ [dependencies.bar] version = "0.5.0"
+ [dependencies.baz] version = "0.5.0"
+ "#)
+ .file("src/foo.rs", r#"
+ extern crate bar;
+ fn main() { bar::bar() }
+ "#)
+ .file("bar/Cargo.toml", r#"
+ [project]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[lib]] name = "bar"
+ [dependencies.baz] version = "0.5.0"
+ "#)
+ .file("bar/src/bar.rs", r#"
+ pub fn bar() {}
+ "#)
+ .file("baz/Cargo.toml", r#"
+ [project]
+
+ name = "baz"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+
+ [[lib]] name = "baz"
+ "#)
+ .file("baz/src/baz.rs", r#"
+ pub fn baz() {}
+ "#);
+ assert_that(p.cargo_process("cargo-compile"),
+ execs().with_stdout(format!("Compiling baz v0.5.0 (file:{})\n\
+ Compiling bar v0.5.0 (file:{})\n\
+ Compiling foo v0.5.0 (file:{})\n",
+ baz.display(),
+ bar.display(),
+ p.root().display())));
+ assert_that(p.process("cargo-compile"),
+ execs().with_stdout(format!("Skipping fresh baz v0.5.0 (file:{})\n\
+ Skipping fresh bar v0.5.0 (file:{})\n\
+ Skipping fresh foo v0.5.0 (file:{})\n",
+ baz.display(),
+ bar.display(),
+ p.root().display())));
+})