Import cargo_0.32.0.orig.tar.gz
authorVasudev Kamath <vasudev@copyninja.info>
Wed, 12 Dec 2018 16:17:42 +0000 (16:17 +0000)
committerVasudev Kamath <vasudev@copyninja.info>
Wed, 12 Dec 2018 16:17:42 +0000 (16:17 +0000)
[dgit import orig cargo_0.32.0.orig.tar.gz]

295 files changed:
.github/ISSUE_TEMPLATE/bug_report.md [new file with mode: 0644]
.github/ISSUE_TEMPLATE/feature_request.md [new file with mode: 0644]
.github/stale.yml [new file with mode: 0644]
.gitignore [new file with mode: 0644]
.travis.yml [new file with mode: 0644]
ARCHITECTURE.md [new file with mode: 0644]
CONTRIBUTING.md [new file with mode: 0644]
Cargo.toml [new file with mode: 0644]
LICENSE-APACHE [new file with mode: 0644]
LICENSE-MIT [new file with mode: 0644]
LICENSE-THIRD-PARTY [new file with mode: 0644]
README.md [new file with mode: 0644]
appveyor.yml [new file with mode: 0644]
src/bin/cargo/cli.rs [new file with mode: 0644]
src/bin/cargo/command_prelude.rs [new file with mode: 0644]
src/bin/cargo/commands/bench.rs [new file with mode: 0644]
src/bin/cargo/commands/build.rs [new file with mode: 0644]
src/bin/cargo/commands/check.rs [new file with mode: 0644]
src/bin/cargo/commands/clean.rs [new file with mode: 0644]
src/bin/cargo/commands/doc.rs [new file with mode: 0644]
src/bin/cargo/commands/fetch.rs [new file with mode: 0644]
src/bin/cargo/commands/fix.rs [new file with mode: 0644]
src/bin/cargo/commands/generate_lockfile.rs [new file with mode: 0644]
src/bin/cargo/commands/git_checkout.rs [new file with mode: 0644]
src/bin/cargo/commands/init.rs [new file with mode: 0644]
src/bin/cargo/commands/install.rs [new file with mode: 0644]
src/bin/cargo/commands/locate_project.rs [new file with mode: 0644]
src/bin/cargo/commands/login.rs [new file with mode: 0644]
src/bin/cargo/commands/metadata.rs [new file with mode: 0644]
src/bin/cargo/commands/mod.rs [new file with mode: 0644]
src/bin/cargo/commands/new.rs [new file with mode: 0644]
src/bin/cargo/commands/owner.rs [new file with mode: 0644]
src/bin/cargo/commands/package.rs [new file with mode: 0644]
src/bin/cargo/commands/pkgid.rs [new file with mode: 0644]
src/bin/cargo/commands/publish.rs [new file with mode: 0644]
src/bin/cargo/commands/read_manifest.rs [new file with mode: 0644]
src/bin/cargo/commands/run.rs [new file with mode: 0644]
src/bin/cargo/commands/rustc.rs [new file with mode: 0644]
src/bin/cargo/commands/rustdoc.rs [new file with mode: 0644]
src/bin/cargo/commands/search.rs [new file with mode: 0644]
src/bin/cargo/commands/test.rs [new file with mode: 0644]
src/bin/cargo/commands/uninstall.rs [new file with mode: 0644]
src/bin/cargo/commands/update.rs [new file with mode: 0644]
src/bin/cargo/commands/verify_project.rs [new file with mode: 0644]
src/bin/cargo/commands/version.rs [new file with mode: 0644]
src/bin/cargo/commands/yank.rs [new file with mode: 0644]
src/bin/cargo/main.rs [new file with mode: 0644]
src/cargo/core/compiler/build_config.rs [new file with mode: 0644]
src/cargo/core/compiler/build_context/mod.rs [new file with mode: 0644]
src/cargo/core/compiler/build_context/target_info.rs [new file with mode: 0644]
src/cargo/core/compiler/build_plan.rs [new file with mode: 0644]
src/cargo/core/compiler/compilation.rs [new file with mode: 0644]
src/cargo/core/compiler/context/compilation_files.rs [new file with mode: 0644]
src/cargo/core/compiler/context/mod.rs [new file with mode: 0644]
src/cargo/core/compiler/context/unit_dependencies.rs [new file with mode: 0644]
src/cargo/core/compiler/custom_build.rs [new file with mode: 0644]
src/cargo/core/compiler/fingerprint.rs [new file with mode: 0644]
src/cargo/core/compiler/job.rs [new file with mode: 0644]
src/cargo/core/compiler/job_queue.rs [new file with mode: 0644]
src/cargo/core/compiler/layout.rs [new file with mode: 0644]
src/cargo/core/compiler/mod.rs [new file with mode: 0644]
src/cargo/core/compiler/output_depinfo.rs [new file with mode: 0644]
src/cargo/core/dependency.rs [new file with mode: 0644]
src/cargo/core/features.rs [new file with mode: 0644]
src/cargo/core/interning.rs [new file with mode: 0644]
src/cargo/core/manifest.rs [new file with mode: 0644]
src/cargo/core/mod.rs [new file with mode: 0644]
src/cargo/core/package.rs [new file with mode: 0644]
src/cargo/core/package_id.rs [new file with mode: 0644]
src/cargo/core/package_id_spec.rs [new file with mode: 0644]
src/cargo/core/profiles.rs [new file with mode: 0644]
src/cargo/core/registry.rs [new file with mode: 0644]
src/cargo/core/resolver/conflict_cache.rs [new file with mode: 0644]
src/cargo/core/resolver/context.rs [new file with mode: 0644]
src/cargo/core/resolver/encode.rs [new file with mode: 0644]
src/cargo/core/resolver/errors.rs [new file with mode: 0644]
src/cargo/core/resolver/mod.rs [new file with mode: 0644]
src/cargo/core/resolver/resolve.rs [new file with mode: 0644]
src/cargo/core/resolver/types.rs [new file with mode: 0644]
src/cargo/core/shell.rs [new file with mode: 0644]
src/cargo/core/source/mod.rs [new file with mode: 0644]
src/cargo/core/source/source_id.rs [new file with mode: 0644]
src/cargo/core/summary.rs [new file with mode: 0644]
src/cargo/core/workspace.rs [new file with mode: 0644]
src/cargo/lib.rs [new file with mode: 0644]
src/cargo/macros.rs [new file with mode: 0644]
src/cargo/ops/cargo_clean.rs [new file with mode: 0644]
src/cargo/ops/cargo_compile.rs [new file with mode: 0644]
src/cargo/ops/cargo_doc.rs [new file with mode: 0644]
src/cargo/ops/cargo_fetch.rs [new file with mode: 0644]
src/cargo/ops/cargo_generate_lockfile.rs [new file with mode: 0644]
src/cargo/ops/cargo_install.rs [new file with mode: 0644]
src/cargo/ops/cargo_new.rs [new file with mode: 0644]
src/cargo/ops/cargo_output_metadata.rs [new file with mode: 0644]
src/cargo/ops/cargo_package.rs [new file with mode: 0644]
src/cargo/ops/cargo_pkgid.rs [new file with mode: 0644]
src/cargo/ops/cargo_read_manifest.rs [new file with mode: 0644]
src/cargo/ops/cargo_run.rs [new file with mode: 0644]
src/cargo/ops/cargo_test.rs [new file with mode: 0644]
src/cargo/ops/fix.rs [new file with mode: 0644]
src/cargo/ops/lockfile.rs [new file with mode: 0644]
src/cargo/ops/mod.rs [new file with mode: 0644]
src/cargo/ops/registry.rs [new file with mode: 0644]
src/cargo/ops/resolve.rs [new file with mode: 0644]
src/cargo/sources/config.rs [new file with mode: 0644]
src/cargo/sources/directory.rs [new file with mode: 0644]
src/cargo/sources/git/mod.rs [new file with mode: 0644]
src/cargo/sources/git/source.rs [new file with mode: 0644]
src/cargo/sources/git/utils.rs [new file with mode: 0644]
src/cargo/sources/mod.rs [new file with mode: 0644]
src/cargo/sources/path.rs [new file with mode: 0644]
src/cargo/sources/registry/index.rs [new file with mode: 0644]
src/cargo/sources/registry/local.rs [new file with mode: 0644]
src/cargo/sources/registry/mod.rs [new file with mode: 0644]
src/cargo/sources/registry/remote.rs [new file with mode: 0644]
src/cargo/sources/replaced.rs [new file with mode: 0644]
src/cargo/util/cfg.rs [new file with mode: 0644]
src/cargo/util/config.rs [new file with mode: 0644]
src/cargo/util/dependency_queue.rs [new file with mode: 0644]
src/cargo/util/diagnostic_server.rs [new file with mode: 0644]
src/cargo/util/errors.rs [new file with mode: 0644]
src/cargo/util/flock.rs [new file with mode: 0644]
src/cargo/util/graph.rs [new file with mode: 0644]
src/cargo/util/hex.rs [new file with mode: 0644]
src/cargo/util/important_paths.rs [new file with mode: 0644]
src/cargo/util/job.rs [new file with mode: 0644]
src/cargo/util/lev_distance.rs [new file with mode: 0644]
src/cargo/util/lockserver.rs [new file with mode: 0644]
src/cargo/util/machine_message.rs [new file with mode: 0644]
src/cargo/util/mod.rs [new file with mode: 0644]
src/cargo/util/network.rs [new file with mode: 0644]
src/cargo/util/paths.rs [new file with mode: 0644]
src/cargo/util/process_builder.rs [new file with mode: 0644]
src/cargo/util/profile.rs [new file with mode: 0644]
src/cargo/util/progress.rs [new file with mode: 0644]
src/cargo/util/read2.rs [new file with mode: 0644]
src/cargo/util/rustc.rs [new file with mode: 0644]
src/cargo/util/sha256.rs [new file with mode: 0644]
src/cargo/util/to_semver.rs [new file with mode: 0644]
src/cargo/util/to_url.rs [new file with mode: 0644]
src/cargo/util/toml/mod.rs [new file with mode: 0644]
src/cargo/util/toml/targets.rs [new file with mode: 0644]
src/cargo/util/vcs.rs [new file with mode: 0644]
src/crates-io/Cargo.toml [new file with mode: 0644]
src/crates-io/LICENSE-APACHE [new symlink]
src/crates-io/LICENSE-MIT [new symlink]
src/crates-io/lib.rs [new file with mode: 0644]
src/doc/README.md [new file with mode: 0644]
src/doc/book.toml [new file with mode: 0644]
src/doc/src/SUMMARY.md [new file with mode: 0644]
src/doc/src/faq.md [new file with mode: 0644]
src/doc/src/getting-started/first-steps.md [new file with mode: 0644]
src/doc/src/getting-started/index.md [new file with mode: 0644]
src/doc/src/getting-started/installation.md [new file with mode: 0644]
src/doc/src/guide/build-cache.md [new file with mode: 0644]
src/doc/src/guide/cargo-toml-vs-cargo-lock.md [new file with mode: 0644]
src/doc/src/guide/continuous-integration.md [new file with mode: 0644]
src/doc/src/guide/creating-a-new-project.md [new file with mode: 0644]
src/doc/src/guide/dependencies.md [new file with mode: 0644]
src/doc/src/guide/index.md [new file with mode: 0644]
src/doc/src/guide/project-layout.md [new file with mode: 0644]
src/doc/src/guide/tests.md [new file with mode: 0644]
src/doc/src/guide/why-cargo-exists.md [new file with mode: 0644]
src/doc/src/guide/working-on-an-existing-project.md [new file with mode: 0644]
src/doc/src/images/Cargo-Logo-Small.png [new file with mode: 0644]
src/doc/src/images/auth-level-acl.png [new file with mode: 0644]
src/doc/src/images/org-level-acl.png [new file with mode: 0644]
src/doc/src/index.md [new file with mode: 0644]
src/doc/src/reference/build-scripts.md [new file with mode: 0644]
src/doc/src/reference/config.md [new file with mode: 0644]
src/doc/src/reference/environment-variables.md [new file with mode: 0644]
src/doc/src/reference/external-tools.md [new file with mode: 0644]
src/doc/src/reference/index.md [new file with mode: 0644]
src/doc/src/reference/manifest.md [new file with mode: 0644]
src/doc/src/reference/pkgid-spec.md [new file with mode: 0644]
src/doc/src/reference/publishing.md [new file with mode: 0644]
src/doc/src/reference/source-replacement.md [new file with mode: 0644]
src/doc/src/reference/specifying-dependencies.md [new file with mode: 0644]
src/doc/src/reference/unstable.md [new file with mode: 0644]
src/doc/theme/favicon.png [new file with mode: 0644]
src/etc/_cargo [new file with mode: 0644]
src/etc/cargo.bashcomp.sh [new file with mode: 0644]
src/etc/man/cargo-bench.1 [new file with mode: 0644]
src/etc/man/cargo-build.1 [new file with mode: 0644]
src/etc/man/cargo-check.1 [new file with mode: 0644]
src/etc/man/cargo-clean.1 [new file with mode: 0644]
src/etc/man/cargo-doc.1 [new file with mode: 0644]
src/etc/man/cargo-fetch.1 [new file with mode: 0644]
src/etc/man/cargo-generate-lockfile.1 [new file with mode: 0644]
src/etc/man/cargo-init.1 [new file with mode: 0644]
src/etc/man/cargo-install.1 [new file with mode: 0644]
src/etc/man/cargo-login.1 [new file with mode: 0644]
src/etc/man/cargo-metadata.1 [new file with mode: 0644]
src/etc/man/cargo-new.1 [new file with mode: 0644]
src/etc/man/cargo-owner.1 [new file with mode: 0644]
src/etc/man/cargo-package.1 [new file with mode: 0644]
src/etc/man/cargo-pkgid.1 [new file with mode: 0644]
src/etc/man/cargo-publish.1 [new file with mode: 0644]
src/etc/man/cargo-run.1 [new file with mode: 0644]
src/etc/man/cargo-rustc.1 [new file with mode: 0644]
src/etc/man/cargo-rustdoc.1 [new file with mode: 0644]
src/etc/man/cargo-search.1 [new file with mode: 0644]
src/etc/man/cargo-test.1 [new file with mode: 0644]
src/etc/man/cargo-uninstall.1 [new file with mode: 0644]
src/etc/man/cargo-update.1 [new file with mode: 0644]
src/etc/man/cargo-version.1 [new file with mode: 0644]
src/etc/man/cargo-yank.1 [new file with mode: 0644]
src/etc/man/cargo.1 [new file with mode: 0644]
tests/testsuite/alt_registry.rs [new file with mode: 0644]
tests/testsuite/bad_config.rs [new file with mode: 0644]
tests/testsuite/bad_manifest_path.rs [new file with mode: 0644]
tests/testsuite/bench.rs [new file with mode: 0644]
tests/testsuite/build.rs [new file with mode: 0644]
tests/testsuite/build_auth.rs [new file with mode: 0644]
tests/testsuite/build_lib.rs [new file with mode: 0644]
tests/testsuite/build_plan.rs [new file with mode: 0644]
tests/testsuite/build_script.rs [new file with mode: 0644]
tests/testsuite/build_script_env.rs [new file with mode: 0644]
tests/testsuite/cargo_alias_config.rs [new file with mode: 0644]
tests/testsuite/cargo_command.rs [new file with mode: 0644]
tests/testsuite/cargo_features.rs [new file with mode: 0644]
tests/testsuite/cfg.rs [new file with mode: 0644]
tests/testsuite/check-style.sh [new file with mode: 0755]
tests/testsuite/check.rs [new file with mode: 0644]
tests/testsuite/clean.rs [new file with mode: 0644]
tests/testsuite/concurrent.rs [new file with mode: 0644]
tests/testsuite/config.rs [new file with mode: 0644]
tests/testsuite/corrupt_git.rs [new file with mode: 0644]
tests/testsuite/cross_compile.rs [new file with mode: 0644]
tests/testsuite/cross_publish.rs [new file with mode: 0644]
tests/testsuite/custom_target.rs [new file with mode: 0644]
tests/testsuite/death.rs [new file with mode: 0644]
tests/testsuite/dep_info.rs [new file with mode: 0644]
tests/testsuite/directory.rs [new file with mode: 0644]
tests/testsuite/doc.rs [new file with mode: 0644]
tests/testsuite/edition.rs [new file with mode: 0644]
tests/testsuite/features.rs [new file with mode: 0644]
tests/testsuite/fetch.rs [new file with mode: 0644]
tests/testsuite/fix.rs [new file with mode: 0644]
tests/testsuite/freshness.rs [new file with mode: 0644]
tests/testsuite/generate_lockfile.rs [new file with mode: 0644]
tests/testsuite/git.rs [new file with mode: 0644]
tests/testsuite/init.rs [new file with mode: 0644]
tests/testsuite/install.rs [new file with mode: 0644]
tests/testsuite/jobserver.rs [new file with mode: 0644]
tests/testsuite/local_registry.rs [new file with mode: 0644]
tests/testsuite/lockfile_compat.rs [new file with mode: 0644]
tests/testsuite/login.rs [new file with mode: 0644]
tests/testsuite/main.rs [new file with mode: 0644]
tests/testsuite/member_errors.rs [new file with mode: 0644]
tests/testsuite/metabuild.rs [new file with mode: 0644]
tests/testsuite/metadata.rs [new file with mode: 0644]
tests/testsuite/net_config.rs [new file with mode: 0644]
tests/testsuite/new.rs [new file with mode: 0644]
tests/testsuite/out_dir.rs [new file with mode: 0644]
tests/testsuite/overrides.rs [new file with mode: 0644]
tests/testsuite/package.rs [new file with mode: 0644]
tests/testsuite/patch.rs [new file with mode: 0644]
tests/testsuite/path.rs [new file with mode: 0644]
tests/testsuite/plugins.rs [new file with mode: 0644]
tests/testsuite/proc_macro.rs [new file with mode: 0644]
tests/testsuite/profile_config.rs [new file with mode: 0644]
tests/testsuite/profile_overrides.rs [new file with mode: 0644]
tests/testsuite/profile_targets.rs [new file with mode: 0644]
tests/testsuite/profiles.rs [new file with mode: 0644]
tests/testsuite/publish.rs [new file with mode: 0644]
tests/testsuite/read_manifest.rs [new file with mode: 0644]
tests/testsuite/registry.rs [new file with mode: 0644]
tests/testsuite/rename_deps.rs [new file with mode: 0644]
tests/testsuite/required_features.rs [new file with mode: 0644]
tests/testsuite/resolve.rs [new file with mode: 0644]
tests/testsuite/run.rs [new file with mode: 0644]
tests/testsuite/rustc.rs [new file with mode: 0644]
tests/testsuite/rustc_info_cache.rs [new file with mode: 0644]
tests/testsuite/rustdoc.rs [new file with mode: 0644]
tests/testsuite/rustdocflags.rs [new file with mode: 0644]
tests/testsuite/rustflags.rs [new file with mode: 0644]
tests/testsuite/search.rs [new file with mode: 0644]
tests/testsuite/shell_quoting.rs [new file with mode: 0644]
tests/testsuite/small_fd_limits.rs [new file with mode: 0644]
tests/testsuite/support/cross_compile.rs [new file with mode: 0644]
tests/testsuite/support/git.rs [new file with mode: 0644]
tests/testsuite/support/install.rs [new file with mode: 0644]
tests/testsuite/support/mod.rs [new file with mode: 0644]
tests/testsuite/support/paths.rs [new file with mode: 0644]
tests/testsuite/support/publish.rs [new file with mode: 0644]
tests/testsuite/support/registry.rs [new file with mode: 0644]
tests/testsuite/support/resolver.rs [new file with mode: 0644]
tests/testsuite/test.rs [new file with mode: 0644]
tests/testsuite/tool_paths.rs [new file with mode: 0644]
tests/testsuite/update.rs [new file with mode: 0644]
tests/testsuite/verify_project.rs [new file with mode: 0644]
tests/testsuite/version.rs [new file with mode: 0644]
tests/testsuite/warn_on_failure.rs [new file with mode: 0644]
tests/testsuite/workspaces.rs [new file with mode: 0644]

diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644 (file)
index 0000000..1dacf59
--- /dev/null
@@ -0,0 +1,27 @@
+---
+name: Bug report
+about: Create a report to help us improve
+---
+
+<!-- Thanks for filing a 🐛 bug report 😄! -->
+
+**Problem**
+<!-- A clear and concise description of what the bug is. -->
+<!-- including what currently happens and what you expected to happen. -->
+
+**Steps**
+<!-- The steps to reproduce the bug. -->
+1.
+2.
+3.
+
+**Possible Solution(s)**
+<!-- Not obligatory, but suggest a fix/reason for the bug, -->
+<!-- or ideas how to implement the addition or change -->
+
+**Notes**
+
+Output of `cargo version`:
+
+<!-- Also, any additional context or information you feel may be relevant to the issue. -->
+<!-- (e.g rust version, OS platform/distribution/version, target toolchain(s), release channel.. -->
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644 (file)
index 0000000..173b23b
--- /dev/null
@@ -0,0 +1,15 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+---
+
+<!-- Thanks for filing a 🙋 feature request 😄! -->
+
+**Describe the problem you are trying to solve**
+<!-- A clear and concise description of the problem this feature request is trying to solve. -->
+
+**Describe the solution you'd like**
+<!-- A clear and concise description of what you want to happen. -->
+
+**Notes**
+<!-- Any additional context or information you feel may be relevant to the issue. -->
diff --git a/.github/stale.yml b/.github/stale.yml
new file mode 100644 (file)
index 0000000..0c77307
--- /dev/null
@@ -0,0 +1,37 @@
+# Default values: https://probot.github.io/apps/stale/#installation
+
+daysUntilStale: 1160 # ~3 yrs 2 months
+daysUntilClose: 30
+
+exemptLabels:
+  - C-tracking-issue      # keep tracking issues open
+  - C-feature-request     # keep feature requests open (at least for now)
+  - "Feature accepted"    # keep accepted features
+
+staleLabel: stale
+
+markComment: >
+  As there hasn't been any activity here in a while would someone (the author, a
+  team member, or any interested party) be able to summarise the current state,
+  perhaps making explicit:
+    * Is this still relevant?
+    * If so, what is blocking it?
+    * Is it known what could be done to help move this forward?
+
+  Thank you!
+
+
+  (The cargo team is currently evaluating the use of Stale bot, and using #6035
+  as the tracking issue to gather feedback.)
+
+
+  If you're reading this comment from the distant future, fear not if this
+  was closed automatically. If you believe it's still an issue please leave a
+  comment and a team member can reopen this issue. Opening a new issue is also
+  acceptable!
+
+closeComment: >
+  As I didn't see any updates in 30 days I'm going to close this.
+  Please see the previous comment for more information!
+
+limitPerRun: 1 # 1 per hour, so 24 per day
diff --git a/.gitignore b/.gitignore
new file mode 100644 (file)
index 0000000..85e363a
--- /dev/null
@@ -0,0 +1,14 @@
+/target
+Cargo.lock
+.cargo
+/config.stamp
+/Makefile
+/config.mk
+src/doc/build
+src/etc/*.pyc
+src/registry/target
+rustc
+__pycache__
+.idea/
+*.iml
+*.swp
diff --git a/.travis.yml b/.travis.yml
new file mode 100644 (file)
index 0000000..a3fdd80
--- /dev/null
@@ -0,0 +1,72 @@
+language: rust
+rust: stable
+sudo: required
+dist: trusty
+
+git:
+  depth: 1
+
+# Using 'cache: cargo' to cache target/ and all of $HOME/.cargo/
+# doesn't work well: the cache is large and it takes several minutes
+# to move it to and from S3. So instead we only cache the mdbook
+# binary.
+cache:
+  directories:
+    - $HOME/.cargo/bin/
+
+matrix:
+  include:
+    - env: TARGET=x86_64-unknown-linux-gnu
+           ALT=i686-unknown-linux-gnu
+      if: branch != master OR type = pull_request
+    - env: TARGET=x86_64-apple-darwin
+           ALT=i686-apple-darwin
+      os: osx
+      if: branch != master OR type = pull_request
+
+    - env: TARGET=x86_64-unknown-linux-gnu
+           ALT=i686-unknown-linux-gnu
+      rust: beta
+      if: branch != master OR type = pull_request
+
+    # Minimum Rust supported channel. We enable these to make sure we
+    # continue to work on the advertised minimum Rust version.
+    # However cargo only supports the latest stable so this will get
+    # increased every 6 weeks or so when the first PR to use a new feature.
+    - env: TARGET=x86_64-unknown-linux-gnu
+           ALT=i686-unknown-linux-gnu
+      rust: 1.28.0
+      script:
+        - rustup toolchain install nightly
+        - cargo +nightly generate-lockfile -Z minimal-versions
+        - cargo -V
+        - cargo test
+      if: branch != master OR type = pull_request
+
+    - env: TARGET=x86_64-unknown-linux-gnu
+           ALT=i686-unknown-linux-gnu
+      rust: nightly
+      install:
+        - mdbook --help || cargo install mdbook --force
+      script:
+        - cargo test
+        - cargo doc --no-deps
+        - (cd src/doc && mdbook build --dest-dir ../../target/doc)
+      if: branch != master OR type = pull_request
+
+  exclude:
+    - rust: stable
+
+before_script:
+  - rustup target add $ALT
+script:
+  - cargo test
+
+notifications:
+  email:
+    on_success: never
+
+addons:
+  apt:
+    packages:
+      - gcc-multilib
diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md
new file mode 100644 (file)
index 0000000..e80c515
--- /dev/null
@@ -0,0 +1,137 @@
+# Cargo Architecture
+
+This document gives a high level overview of Cargo internals. You may
+find it useful if you want to contribute to Cargo or if you are
+interested in the inner workings of Cargo.
+
+The purpose of Cargo is to formalize a canonical Rust workflow, by automating
+the standard tasks associated with distributing software.  Cargo simplifies
+structuring a new project, adding dependencies, writing and running unit tests,
+and more.
+
+
+## Subcommands
+
+Cargo is a single binary composed of a set of [`clap`][] subcommands. All subcommands live in
+`src/bin/cargo/commands` directory. `src/bin/cargo/main.rs` is the entry point.
+
+Each subcommand, such as `src/bin/cargo/commands/build.rs`, has its own API
+interface, similarly to Git's, parsing command line options, reading the
+configuration files, discovering the Cargo project in the current directory and
+delegating the actual implementation to one
+of the functions in `src/cargo/ops/mod.rs`. This short file is a good
+place to find out about most of the things that Cargo can do.
+Subcommands are designed to pipe to one another, and custom subcommands make
+Cargo easy to extend and attach tools to.
+
+[`clap`]: https://clap.rs/
+
+
+## Important Data Structures
+
+There are some important data structures which are used throughout
+Cargo.
+
+`Config` is available almost everywhere and holds "global"
+information, such as `CARGO_HOME` or configuration from
+`.cargo/config` files. The `shell` method of `Config` is the entry
+point for printing status messages and other info to the console.
+
+`Workspace` is the description of the workspace for the current
+working directory. Each workspace contains at least one
+`Package`. Each package corresponds to a single `Cargo.toml`, and may
+define several `Target`s, such as the library, binaries, integration
+test or examples. Targets are crates (each target defines a crate
+root, like `src/lib.rs` or `examples/foo.rs`) and are what is actually
+compiled by `rustc`.
+
+A typical package defines the single library target and several
+auxiliary ones. Packages are a unit of dependency in Cargo, and when
+package `foo` depends on package `bar`, that means that each target
+from `foo` needs the library target from `bar`.
+
+`PackageId` is the unique identifier of a (possibly remote)
+package. It consist of three components: name, version and source
+id. Source is the place where the source code for package comes
+from. Typical sources are crates.io, a git repository or a folder on
+the local hard drive.
+
+`Resolve` is the representation of a directed acyclic graph of package
+dependencies, which uses `PackageId`s for nodes. This is the data
+structure that is saved to the lock file. If there is no lockfile,
+Cargo constructs a resolve by finding a graph of packages which
+matches declared dependency specification according to semver.
+
+
+## Persistence
+
+Cargo is a non-daemon command line application, which means that all
+the information used by Cargo must be persisted on the hard drive. The
+main sources of information are `Cargo.toml` and `Cargo.lock` files,
+`.cargo/config` configuration files and the globally shared registry
+of packages downloaded from crates.io, usually located at
+`~/.cargo/registry`. See `src/sources/registry` for the specifics of
+the registry storage format.
+
+
+## Concurrency
+
+Cargo is mostly single threaded. The only concurrency inside a single
+instance of Cargo happens during compilation, when several instances
+of `rustc` are invoked in parallel to build independent
+targets. However there can be several different instances of Cargo
+process running concurrently on the system. Cargo guarantees that this
+is always safe by using file locks when accessing potentially shared
+data like the registry or the target directory.
+
+
+## Tests
+
+Cargo has an impressive test suite located in the `tests` folder. Most
+of the test are integration: a project structure with `Cargo.toml` and
+rust source code is created in a temporary directory, `cargo` binary
+is invoked via `std::process::Command` and then stdout and stderr are
+verified against the expected output. To simplify testing, several
+macros of the form `[MACRO]` are used in the expected output. For
+example, `[..]` matches any string.
+
+To see stdout and stderr streams of the subordinate process, add `.stream()`
+call to the built-up `Execs`:
+
+```rust
+// Before
+p.cargo("run").run();
+
+// After
+p.cargo("run").stream().run();
+```
+
+Alternatively to build and run a custom version of cargo simply run `cargo build`
+and execute `target/debug/cargo`. Note that `+nightly`/`+stable` (and variants),
+being [rustup](https://rustup.rs/) features, won't work when executing the locally
+built cargo binary directly, you have to instead build with `cargo +nightly build`
+and run with `rustup run` (e.g `rustup run nightly
+<path-to-cargo>/target/debug/cargo <args>..`) (or set the `RUSTC` env var to point
+to nightly rustc).
+
+Because the test suite has `#![deny(warnings)]` at times you might find it
+convenient to override this with `RUSTFLAGS`, for example
+`RUSTFLAGS="--cap-lints warn" cargo build`.
+
+## Logging
+
+Cargo uses [`env_logger`](https://docs.rs/env_logger/*/env_logger/), so you can set
+`RUST_LOG` environment variable to get the logs. This is useful both for diagnosing
+bugs in stable Cargo and for local development. Cargo also has internal hierarchical 
+profiling infrastructure, which is activated via `CARGO_PROFILE` variable 
+
+```
+# Outputs all logs with levels debug and higher  
+$ RUST_LOG=debug cargo generate-lockfile
+
+# Don't forget that you can filter by module as well 
+$ RUST_LOG=cargo::core::resolver=trace cargo generate-lockfile
+
+# Output first three levels of profiling info
+$ CARGO_PROFILE=3 cargo generate-lockfile
+```
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..6d7252c
--- /dev/null
@@ -0,0 +1,202 @@
+# Contributing to Cargo
+
+Thank you for your interest in contributing to Cargo! Good places to
+start are this document, [ARCHITECTURE.md](ARCHITECTURE.md), which
+describes the high-level structure of Cargo and [E-easy] bugs on the
+issue tracker.
+
+If you have a general question about Cargo or it's internals, feel free to ask
+on [IRC].
+
+## Code of Conduct
+
+All contributors are expected to follow our [Code of Conduct].
+
+## Bug reports
+
+We can't fix what we don't know about, so please report problems liberally. This
+includes problems with understanding the documentation, unhelpful error messages
+and unexpected behavior.
+
+**If you think that you have identified an issue with Cargo that might compromise
+its users' security, please do not open a public issue on GitHub. Instead,
+we ask you to refer to Rust's [security policy].**
+
+Opening an issue is as easy as following [this link][new-issues] and filling out
+the fields. Here's a template that you can use to file an issue, though it's not
+necessary to use it exactly:
+
+    <short summary of the problem>
+
+    I tried this: <minimal example that causes the problem>
+
+    I expected to see this happen: <explanation>
+
+    Instead, this happened: <explanation>
+
+    I'm using <output of `cargo --version`>
+
+All three components are important: what you did, what you expected, what
+happened instead. Please use https://gist.github.com/ if your examples run long.
+
+
+## Feature requests
+
+Cargo follows the general Rust model of evolution. All major features go through
+an RFC process. Therefore, before opening a feature request issue create a
+Pre-RFC thread on the [internals][irlo] forum to get preliminary feedback.
+Implementing a feature as a [custom subcommand][subcommands] is encouraged as it
+helps demonstrate the demand for the functionality and is a great way to deliver
+a working solution faster as it can iterate outside of cargo's release cadence.
+
+## Working on issues
+
+If you're looking for somewhere to start, check out the [E-easy][E-Easy] and
+[E-mentor][E-mentor] tags.
+
+Feel free to ask for guidelines on how to tackle a problem on [IRC] or open a
+[new issue][new-issues]. This is especially important if you want to add new
+features to Cargo or make large changes to the already existing code-base.
+Cargo's core developers will do their best to provide help.
+
+If you start working on an already-filed issue, post a comment on this issue to
+let people know that somebody is working it. Feel free to ask for comments if
+you are unsure about the solution you would like to submit.
+
+While Cargo does make use of some Rust-features available only through the
+`nightly` toolchain, it must compile on stable Rust. Code added to Cargo
+is encouraged to make use of the latest stable features of the language and
+`stdlib`.
+
+We use the "fork and pull" model [described here][development-models], where
+contributors push changes to their personal fork and create pull requests to
+bring those changes into the source repository. This process is partly
+automated: Pull requests are made against Cargo's master-branch, tested and
+reviewed. Once a change is approved to be merged, a friendly bot merges the
+changes into an internal branch, runs the full test-suite on that branch
+and only then merges into master. This ensures that Cargo's master branch
+passes the test-suite at all times.
+
+Your basic steps to get going:
+
+* Fork Cargo and create a branch from master for the issue you are working on.
+* Please adhere to the code style that you see around the location you are
+working on.
+* [Commit as you go][githelp].
+* Include tests that cover all non-trivial code. The existing tests
+in `test/` provide templates on how to test Cargo's behavior in a
+sandbox-environment. The internal crate `cargotest` provides a vast amount
+of helpers to minimize boilerplate.  See [`cargotest/mod.rs`] for an
+introduction to writing tests.
+* Make sure `cargo test` passes. If you do not have the cross-compilers
+installed locally, install them using the instructions returned by
+`cargo test cross_compile::cross_tests` (twice, with `--toolchain nightly`
+added to get the nightly cross target too); alternatively just
+ignore the cross-compile test failures or disable them by
+using `CFG_DISABLE_CROSS_TESTS=1 cargo test`. Note that some tests are enabled
+only on `nightly` toolchain. If you can, test both toolchains.
+* All code changes are expected to comply with the formatting suggested by `rustfmt`.
+You can use `rustup component add --toolchain nightly rustfmt-preview` to install `rustfmt` and use
+`rustfmt +nightly --unstable-features --skip-children` on the changed files to automatically format your code.
+* Push your commits to GitHub and create a pull request against Cargo's
+`master` branch.
+
+## Pull requests
+
+After the pull request is made, a friendly bot will automatically assign a
+reviewer; the review-process will make sure that the proposed changes are
+sound. Please give the assigned reviewer sufficient time, especially during
+weekends. If you don't get a reply, you may poke the core developers on [IRC].
+
+A merge of Cargo's master-branch and your changes is immediately queued
+to be tested after the pull request is made. In case unforeseen
+problems are discovered during this step (e.g. a failure on a platform you
+originally did not develop on), you may ask for guidance. Push additional
+commits to your branch to tackle these problems.
+
+The reviewer might point out changes deemed necessary. Please add them as
+extra commits; this ensures that the reviewer can see what has changed since
+the code was previously reviewed. Large or tricky changes may require several
+passes of review and changes.
+
+Once the reviewer approves your pull request, a friendly bot picks it up
+and [merges][mergequeue] it into Cargo's `master` branch.
+
+## Contributing to the documentation
+
+To contribute to the documentation, all you need to do is change the markdown
+files in the `src/doc` directory. To view the rendered version of changes you
+have made locally, make sure you have `mdbook` installed and run:
+
+```sh
+cd src/doc
+mdbook build
+open book/index.html
+```
+
+To install `mdbook` run `cargo install mdbook`.
+
+
+## Issue Triage
+
+Sometimes an issue will stay open, even though the bug has been fixed. And
+sometimes, the original bug may go stale because something has changed in the
+meantime.
+
+It can be helpful to go through older bug reports and make sure that they are
+still valid. Load up an older issue, double check that it's still true, and
+leave a comment letting us know if it is or is not. The [least recently
+updated sort][lru] is good for finding issues like this.
+
+Contributors with sufficient permissions on the Rust-repository can help by
+adding labels to triage issues:
+
+* Yellow, **A**-prefixed labels state which **area** of the project an issue
+  relates to.
+
+* Magenta, **B**-prefixed labels identify bugs which are **blockers**.
+
+* Light purple, **C**-prefixed labels represent the **category** of an issue.
+  In particular, **C-feature-request** marks *proposals* for new features. If
+  an issue is **C-feature-request**, but is not **Feature accepted** or **I-nominated**,
+  then it was not thoroughly discussed, and might need some additional design
+  or perhaps should be implemented as an external subcommand first. Ping
+  @rust-lang/cargo if you want to send a PR for such issue.
+
+* Dark purple, **Command**-prefixed labels mean the issue has to do with a
+  specific cargo command.
+
+* Green, **E**-prefixed labels explain the level of **experience** or
+  **effort** necessary to fix the issue. [**E-mentor**][E-mentor] issues also
+  have some instructions on how to get started.
+
+* Red, **I**-prefixed labels indicate the **importance** of the issue. The
+  **[I-nominated][]** label indicates that an issue has been nominated for
+  prioritizing at the next triage meeting.
+
+* Purple gray, **O**-prefixed labels are the **operating system** or platform
+  that this issue is specific to.
+
+* Orange, **P**-prefixed labels indicate a bug's **priority**. These labels
+  are only assigned during triage meetings and replace the **[I-nominated][]**
+  label.
+
+* The light orange **relnotes** label marks issues that should be documented in
+  the release notes of the next release.
+
+
+[githelp]: https://dont-be-afraid-to-commit.readthedocs.io/en/latest/git/commandlinegit.html
+[development-models]: https://help.github.com/articles/about-collaborative-development-models/
+[gist]: https://gist.github.com/
+[new-issues]: https://github.com/rust-lang/cargo/issues/new
+[mergequeue]: https://buildbot2.rust-lang.org/homu/queue/cargo
+[security policy]: https://www.rust-lang.org/security.html
+[lru]: https://github.com/rust-lang/cargo/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-asc
+[E-easy]: https://github.com/rust-lang/cargo/labels/E-easy
+[E-mentor]: https://github.com/rust-lang/cargo/labels/E-mentor
+[I-nominated]: https://github.com/rust-lang/cargo/labels/I-nominated
+[Code of Conduct]: https://www.rust-lang.org/conduct.html
+[IRC]: https://kiwiirc.com/client/irc.mozilla.org/cargo
+[`cargotest/mod.rs`]: https://github.com/rust-lang/cargo/blob/master/tests/testsuite/cargotest/mod.rs
+[irlo]: https://internals.rust-lang.org/
+[subcommands]: https://doc.rust-lang.org/cargo/reference/external-tools.html#custom-subcommands
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644 (file)
index 0000000..5dbff3e
--- /dev/null
@@ -0,0 +1,106 @@
+[package]
+name = "cargo"
+version = "0.32.0"
+authors = ["Yehuda Katz <wycats@gmail.com>",
+           "Carl Lerche <me@carllerche.com>",
+           "Alex Crichton <alex@alexcrichton.com>"]
+license = "MIT OR Apache-2.0"
+homepage = "https://crates.io"
+repository = "https://github.com/rust-lang/cargo"
+documentation = "https://docs.rs/cargo"
+description = """
+Cargo, a package manager for Rust.
+"""
+
+[lib]
+name = "cargo"
+path = "src/cargo/lib.rs"
+
+[dependencies]
+atty = "0.2"
+bytesize = "1.0"
+crates-io = { path = "src/crates-io", version = "0.20" }
+crossbeam-utils = "0.5"
+crypto-hash = "0.3.1"
+curl = { version = "0.4.17", features = ['http2'] }
+curl-sys = "0.4.12"
+env_logger = "0.5.11"
+failure = "0.1.2"
+filetime = "0.2"
+flate2 = "1.0.3"
+fs2 = "0.4"
+git2 = "0.7.5"
+git2-curl = "0.8.1"
+glob = "0.2.11"
+hex = "0.3"
+home = "0.3"
+ignore = "0.4"
+lazy_static = "1.0.0"
+jobserver = "0.1.11"
+lazycell = "1.2.0"
+libc = "0.2"
+log = "0.4"
+libgit2-sys = "0.7.9"
+num_cpus = "1.0"
+opener = "0.3.0"
+rustfix = "0.4.2"
+same-file = "1"
+semver = { version = "0.9.0", features = ["serde"] }
+serde = "1.0"
+serde_derive = "1.0"
+serde_ignored = "0.0.4"
+serde_json = { version = "1.0.30", features = ["raw_value"] }
+shell-escape = "0.1.4"
+tar = { version = "0.4.15", default-features = false }
+tempfile = "3.0"
+termcolor = "1.0"
+toml = "0.4.2"
+url = "1.1"
+clap = "2.31.2"
+unicode-width = "0.1.5"
+openssl = { version = '0.10.11', optional = true }
+
+# A noop dependency that changes in the Rust repository, it's a bit of a hack.
+# See the `src/tools/rustc-workspace-hack/README.md` file in `rust-lang/rust`
+# for more information.
+rustc-workspace-hack = "1.0.0"
+
+[target.'cfg(target_os = "macos")'.dependencies]
+core-foundation = { version = "0.6.0", features = ["mac_os_10_7_support"] }
+
+[target.'cfg(windows)'.dependencies]
+miow = "0.3.1"
+fwdansi = "1"
+
+[target.'cfg(windows)'.dependencies.winapi]
+version = "0.3"
+features = [
+  "basetsd",
+  "handleapi",
+  "jobapi",
+  "jobapi2",
+  "memoryapi",
+  "minwindef",
+  "ntdef",
+  "ntstatus",
+  "processenv",
+  "processthreadsapi",
+  "psapi",
+  "synchapi",
+  "winerror",
+  "winbase",
+  "wincon",
+  "winnt",
+]
+
+[dev-dependencies]
+bufstream = "0.1"
+proptest = "0.8.7"
+
+[[bin]]
+name = "cargo"
+test = false
+doc = false
+
+[features]
+vendored-openssl = ['openssl/vendored']
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
new file mode 100644 (file)
index 0000000..16fe87b
--- /dev/null
@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/LICENSE-MIT b/LICENSE-MIT
new file mode 100644 (file)
index 0000000..31aa793
--- /dev/null
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/LICENSE-THIRD-PARTY b/LICENSE-THIRD-PARTY
new file mode 100644 (file)
index 0000000..c9897b9
--- /dev/null
@@ -0,0 +1,1272 @@
+The Cargo source code itself does not bundle any third party libraries, but it
+depends on a number of libraries which carry their own copyright notices and
+license terms. These libraries are normally all linked static into the binary
+distributions of Cargo:
+
+* OpenSSL - http://www.openssl.org/source/license.html
+
+    Copyright (c) 1998-2011 The OpenSSL Project.  All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    1. Redistributions of source code must retain the above copyright
+       notice, this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+       notice, this list of conditions and the following disclaimer in
+       the documentation and/or other materials provided with the
+       distribution.
+
+    3. All advertising materials mentioning features or use of this
+       software must display the following acknowledgment:
+       "This product includes software developed by the OpenSSL Project
+       for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+
+    4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+       endorse or promote products derived from this software without
+       prior written permission. For written permission, please contact
+       openssl-core@openssl.org.
+
+    5. Products derived from this software may not be called "OpenSSL"
+       nor may "OpenSSL" appear in their names without prior written
+       permission of the OpenSSL Project.
+
+    6. Redistributions of any form whatsoever must retain the following
+       acknowledgment:
+       "This product includes software developed by the OpenSSL Project
+       for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+
+    THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+    EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
+    ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+    NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+    STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+    OF THE POSSIBILITY OF SUCH DAMAGE.
+    ====================================================================
+
+    This product includes cryptographic software written by Eric Young
+    (eay@cryptsoft.com).  This product includes software written by Tim
+    Hudson (tjh@cryptsoft.com).
+
+    ---
+
+    Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+    All rights reserved.
+
+    This package is an SSL implementation written
+    by Eric Young (eay@cryptsoft.com).
+    The implementation was written so as to conform with Netscapes SSL.
+
+    This library is free for commercial and non-commercial use as long as
+    the following conditions are aheared to.  The following conditions
+    apply to all code found in this distribution, be it the RC4, RSA,
+    lhash, DES, etc., code; not just the SSL code.  The SSL documentation
+    included with this distribution is covered by the same copyright terms
+    except that the holder is Tim Hudson (tjh@cryptsoft.com).
+
+    Copyright remains Eric Young's, and as such any Copyright notices in
+    the code are not to be removed.
+    If this package is used in a product, Eric Young should be given attribution
+    as the author of the parts of the library used.
+    This can be in the form of a textual message at program startup or
+    in documentation (online or textual) provided with the package.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+    1. Redistributions of source code must retain the copyright
+       notice, this list of conditions and the following disclaimer.
+    2. Redistributions in binary form must reproduce the above copyright
+       notice, this list of conditions and the following disclaimer in the
+       documentation and/or other materials provided with the distribution.
+    3. All advertising materials mentioning features or use of this software
+       must display the following acknowledgement:
+       "This product includes cryptographic software written by
+        Eric Young (eay@cryptsoft.com)"
+       The word 'cryptographic' can be left out if the rouines from the library
+       being used are not cryptographic related :-).
+    4. If you include any Windows specific code (or a derivative thereof) from
+       the apps directory (application code) you must include an acknowledgement:
+       "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+
+    THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+    ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+    ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+    FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+    DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+    OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+    HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+    LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+    OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+    SUCH DAMAGE.
+
+    The licence and distribution terms for any publically available version or
+    derivative of this code cannot be changed.  i.e. this code cannot simply be
+    copied and put under another distribution licence
+    [including the GNU Public Licence.]
+
+* libgit2 - https://github.com/libgit2/libgit2/blob/master/COPYING
+
+     libgit2 is Copyright (C) the libgit2 contributors,
+     unless otherwise stated. See the AUTHORS file for details.
+
+     Note that the only valid version of the GPL as far as this project
+     is concerned is _this_ particular version of the license (ie v2, not
+     v2.2 or v3.x or whatever), unless explicitly otherwise stated.
+
+    ----------------------------------------------------------------------
+
+          LINKING EXCEPTION
+
+     In addition to the permissions in the GNU General Public License,
+     the authors give you unlimited permission to link the compiled
+     version of this library into combinations with other programs,
+     and to distribute those combinations without any restriction
+     coming from the use of this file.  (The General Public License
+     restrictions do apply in other respects; for example, they cover
+     modification of the file, and distribution when not linked into
+     a combined executable.)
+
+    ----------------------------------------------------------------------
+
+            GNU GENERAL PUBLIC LICENSE
+               Version 2, June 1991
+
+     Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+                           59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+     Everyone is permitted to copy and distribute verbatim copies
+     of this license document, but changing it is not allowed.
+
+              Preamble
+
+      The licenses for most software are designed to take away your
+    freedom to share and change it.  By contrast, the GNU General Public
+    License is intended to guarantee your freedom to share and change free
+    software--to make sure the software is free for all its users.  This
+    General Public License applies to most of the Free Software
+    Foundation's software and to any other program whose authors commit to
+    using it.  (Some other Free Software Foundation software is covered by
+    the GNU Library General Public License instead.)  You can apply it to
+    your programs, too.
+
+      When we speak of free software, we are referring to freedom, not
+    price.  Our General Public Licenses are designed to make sure that you
+    have the freedom to distribute copies of free software (and charge for
+    this service if you wish), that you receive source code or can get it
+    if you want it, that you can change the software or use pieces of it
+    in new free programs; and that you know you can do these things.
+
+      To protect your rights, we need to make restrictions that forbid
+    anyone to deny you these rights or to ask you to surrender the rights.
+    These restrictions translate to certain responsibilities for you if you
+    distribute copies of the software, or if you modify it.
+
+      For example, if you distribute copies of such a program, whether
+    gratis or for a fee, you must give the recipients all the rights that
+    you have.  You must make sure that they, too, receive or can get the
+    source code.  And you must show them these terms so they know their
+    rights.
+
+      We protect your rights with two steps: (1) copyright the software, and
+    (2) offer you this license which gives you legal permission to copy,
+    distribute and/or modify the software.
+
+      Also, for each author's protection and ours, we want to make certain
+    that everyone understands that there is no warranty for this free
+    software.  If the software is modified by someone else and passed on, we
+    want its recipients to know that what they have is not the original, so
+    that any problems introduced by others will not reflect on the original
+    authors' reputations.
+
+      Finally, any free program is threatened constantly by software
+    patents.  We wish to avoid the danger that redistributors of a free
+    program will individually obtain patent licenses, in effect making the
+    program proprietary.  To prevent this, we have made it clear that any
+    patent must be licensed for everyone's free use or not licensed at all.
+
+      The precise terms and conditions for copying, distribution and
+    modification follow.
+
+            GNU GENERAL PUBLIC LICENSE
+       TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+      0. This License applies to any program or other work which contains
+    a notice placed by the copyright holder saying it may be distributed
+    under the terms of this General Public License.  The "Program", below,
+    refers to any such program or work, and a "work based on the Program"
+    means either the Program or any derivative work under copyright law:
+    that is to say, a work containing the Program or a portion of it,
+    either verbatim or with modifications and/or translated into another
+    language.  (Hereinafter, translation is included without limitation in
+    the term "modification".)  Each licensee is addressed as "you".
+
+    Activities other than copying, distribution and modification are not
+    covered by this License; they are outside its scope.  The act of
+    running the Program is not restricted, and the output from the Program
+    is covered only if its contents constitute a work based on the
+    Program (independent of having been made by running the Program).
+    Whether that is true depends on what the Program does.
+
+      1. You may copy and distribute verbatim copies of the Program's
+    source code as you receive it, in any medium, provided that you
+    conspicuously and appropriately publish on each copy an appropriate
+    copyright notice and disclaimer of warranty; keep intact all the
+    notices that refer to this License and to the absence of any warranty;
+    and give any other recipients of the Program a copy of this License
+    along with the Program.
+
+    You may charge a fee for the physical act of transferring a copy, and
+    you may at your option offer warranty protection in exchange for a fee.
+
+      2. You may modify your copy or copies of the Program or any portion
+    of it, thus forming a work based on the Program, and copy and
+    distribute such modifications or work under the terms of Section 1
+    above, provided that you also meet all of these conditions:
+
+        a) You must cause the modified files to carry prominent notices
+        stating that you changed the files and the date of any change.
+
+        b) You must cause any work that you distribute or publish, that in
+        whole or in part contains or is derived from the Program or any
+        part thereof, to be licensed as a whole at no charge to all third
+        parties under the terms of this License.
+
+        c) If the modified program normally reads commands interactively
+        when run, you must cause it, when started running for such
+        interactive use in the most ordinary way, to print or display an
+        announcement including an appropriate copyright notice and a
+        notice that there is no warranty (or else, saying that you provide
+        a warranty) and that users may redistribute the program under
+        these conditions, and telling the user how to view a copy of this
+        License.  (Exception: if the Program itself is interactive but
+        does not normally print such an announcement, your work based on
+        the Program is not required to print an announcement.)
+
+    These requirements apply to the modified work as a whole.  If
+    identifiable sections of that work are not derived from the Program,
+    and can be reasonably considered independent and separate works in
+    themselves, then this License, and its terms, do not apply to those
+    sections when you distribute them as separate works.  But when you
+    distribute the same sections as part of a whole which is a work based
+    on the Program, the distribution of the whole must be on the terms of
+    this License, whose permissions for other licensees extend to the
+    entire whole, and thus to each and every part regardless of who wrote it.
+
+    Thus, it is not the intent of this section to claim rights or contest
+    your rights to work written entirely by you; rather, the intent is to
+    exercise the right to control the distribution of derivative or
+    collective works based on the Program.
+
+    In addition, mere aggregation of another work not based on the Program
+    with the Program (or with a work based on the Program) on a volume of
+    a storage or distribution medium does not bring the other work under
+    the scope of this License.
+
+      3. You may copy and distribute the Program (or a work based on it,
+    under Section 2) in object code or executable form under the terms of
+    Sections 1 and 2 above provided that you also do one of the following:
+
+        a) Accompany it with the complete corresponding machine-readable
+        source code, which must be distributed under the terms of Sections
+        1 and 2 above on a medium customarily used for software interchange; or,
+
+        b) Accompany it with a written offer, valid for at least three
+        years, to give any third party, for a charge no more than your
+        cost of physically performing source distribution, a complete
+        machine-readable copy of the corresponding source code, to be
+        distributed under the terms of Sections 1 and 2 above on a medium
+        customarily used for software interchange; or,
+
+        c) Accompany it with the information you received as to the offer
+        to distribute corresponding source code.  (This alternative is
+        allowed only for noncommercial distribution and only if you
+        received the program in object code or executable form with such
+        an offer, in accord with Subsection b above.)
+
+    The source code for a work means the preferred form of the work for
+    making modifications to it.  For an executable work, complete source
+    code means all the source code for all modules it contains, plus any
+    associated interface definition files, plus the scripts used to
+    control compilation and installation of the executable.  However, as a
+    special exception, the source code distributed need not include
+    anything that is normally distributed (in either source or binary
+    form) with the major components (compiler, kernel, and so on) of the
+    operating system on which the executable runs, unless that component
+    itself accompanies the executable.
+
+    If distribution of executable or object code is made by offering
+    access to copy from a designated place, then offering equivalent
+    access to copy the source code from the same place counts as
+    distribution of the source code, even though third parties are not
+    compelled to copy the source along with the object code.
+
+      4. You may not copy, modify, sublicense, or distribute the Program
+    except as expressly provided under this License.  Any attempt
+    otherwise to copy, modify, sublicense or distribute the Program is
+    void, and will automatically terminate your rights under this License.
+    However, parties who have received copies, or rights, from you under
+    this License will not have their licenses terminated so long as such
+    parties remain in full compliance.
+
+      5. You are not required to accept this License, since you have not
+    signed it.  However, nothing else grants you permission to modify or
+    distribute the Program or its derivative works.  These actions are
+    prohibited by law if you do not accept this License.  Therefore, by
+    modifying or distributing the Program (or any work based on the
+    Program), you indicate your acceptance of this License to do so, and
+    all its terms and conditions for copying, distributing or modifying
+    the Program or works based on it.
+
+      6. Each time you redistribute the Program (or any work based on the
+    Program), the recipient automatically receives a license from the
+    original licensor to copy, distribute or modify the Program subject to
+    these terms and conditions.  You may not impose any further
+    restrictions on the recipients' exercise of the rights granted herein.
+    You are not responsible for enforcing compliance by third parties to
+    this License.
+
+      7. If, as a consequence of a court judgment or allegation of patent
+    infringement or for any other reason (not limited to patent issues),
+    conditions are imposed on you (whether by court order, agreement or
+    otherwise) that contradict the conditions of this License, they do not
+    excuse you from the conditions of this License.  If you cannot
+    distribute so as to satisfy simultaneously your obligations under this
+    License and any other pertinent obligations, then as a consequence you
+    may not distribute the Program at all.  For example, if a patent
+    license would not permit royalty-free redistribution of the Program by
+    all those who receive copies directly or indirectly through you, then
+    the only way you could satisfy both it and this License would be to
+    refrain entirely from distribution of the Program.
+
+    If any portion of this section is held invalid or unenforceable under
+    any particular circumstance, the balance of the section is intended to
+    apply and the section as a whole is intended to apply in other
+    circumstances.
+
+    It is not the purpose of this section to induce you to infringe any
+    patents or other property right claims or to contest validity of any
+    such claims; this section has the sole purpose of protecting the
+    integrity of the free software distribution system, which is
+    implemented by public license practices.  Many people have made
+    generous contributions to the wide range of software distributed
+    through that system in reliance on consistent application of that
+    system; it is up to the author/donor to decide if he or she is willing
+    to distribute software through any other system and a licensee cannot
+    impose that choice.
+
+    This section is intended to make thoroughly clear what is believed to
+    be a consequence of the rest of this License.
+
+      8. If the distribution and/or use of the Program is restricted in
+    certain countries either by patents or by copyrighted interfaces, the
+    original copyright holder who places the Program under this License
+    may add an explicit geographical distribution limitation excluding
+    those countries, so that distribution is permitted only in or among
+    countries not thus excluded.  In such case, this License incorporates
+    the limitation as if written in the body of this License.
+
+      9. The Free Software Foundation may publish revised and/or new versions
+    of the General Public License from time to time.  Such new versions will
+    be similar in spirit to the present version, but may differ in detail to
+    address new problems or concerns.
+
+    Each version is given a distinguishing version number.  If the Program
+    specifies a version number of this License which applies to it and "any
+    later version", you have the option of following the terms and conditions
+    either of that version or of any later version published by the Free
+    Software Foundation.  If the Program does not specify a version number of
+    this License, you may choose any version ever published by the Free Software
+    Foundation.
+
+      10. If you wish to incorporate parts of the Program into other free
+    programs whose distribution conditions are different, write to the author
+    to ask for permission.  For software which is copyrighted by the Free
+    Software Foundation, write to the Free Software Foundation; we sometimes
+    make exceptions for this.  Our decision will be guided by the two goals
+    of preserving the free status of all derivatives of our free software and
+    of promoting the sharing and reuse of software generally.
+
+              NO WARRANTY
+
+      11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+    FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+    OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+    PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+    OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+    TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+    PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+    REPAIR OR CORRECTION.
+
+      12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+    WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+    REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+    INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+    OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+    TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+    YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+    PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+    POSSIBILITY OF SUCH DAMAGES.
+
+             END OF TERMS AND CONDITIONS
+
+          How to Apply These Terms to Your New Programs
+
+      If you develop a new program, and you want it to be of the greatest
+    possible use to the public, the best way to achieve this is to make it
+    free software which everyone can redistribute and change under these terms.
+
+      To do so, attach the following notices to the program.  It is safest
+    to attach them to the start of each source file to most effectively
+    convey the exclusion of warranty; and each file should have at least
+    the "copyright" line and a pointer to where the full notice is found.
+
+        <one line to give the program's name and a brief idea of what it does.>
+        Copyright (C) <year>  <name of author>
+
+        This program is free software; you can redistribute it and/or modify
+        it under the terms of the GNU General Public License as published by
+        the Free Software Foundation; either version 2 of the License, or
+        (at your option) any later version.
+
+        This program is distributed in the hope that it will be useful,
+        but WITHOUT ANY WARRANTY; without even the implied warranty of
+        MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+        GNU General Public License for more details.
+
+        You should have received a copy of the GNU General Public License
+        along with this program; if not, write to the Free Software
+        Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+
+    Also add information on how to contact you by electronic and paper mail.
+
+    If the program is interactive, make it output a short notice like this
+    when it starts in an interactive mode:
+
+        Gnomovision version 69, Copyright (C) year name of author
+        Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+        This is free software, and you are welcome to redistribute it
+        under certain conditions; type `show c' for details.
+
+    The hypothetical commands `show w' and `show c' should show the appropriate
+    parts of the General Public License.  Of course, the commands you use may
+    be called something other than `show w' and `show c'; they could even be
+    mouse-clicks or menu items--whatever suits your program.
+
+    You should also get your employer (if you work as a programmer) or your
+    school, if any, to sign a "copyright disclaimer" for the program, if
+    necessary.  Here is a sample; alter the names:
+
+      Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+      `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+      <signature of Ty Coon>, 1 April 1989
+      Ty Coon, President of Vice
+
+    This General Public License does not permit incorporating your program into
+    proprietary programs.  If your program is a subroutine library, you may
+    consider it more useful to permit linking proprietary applications with the
+    library.  If this is what you want to do, use the GNU Library General
+    Public License instead of this License.
+
+    ----------------------------------------------------------------------
+
+    The bundled ZLib code is licensed under the ZLib license:
+
+    Copyright (C) 1995-2010 Jean-loup Gailly and Mark Adler
+
+      This software is provided 'as-is', without any express or implied
+      warranty.  In no event will the authors be held liable for any damages
+      arising from the use of this software.
+
+      Permission is granted to anyone to use this software for any purpose,
+      including commercial applications, and to alter it and redistribute it
+      freely, subject to the following restrictions:
+
+      1. The origin of this software must not be misrepresented; you must not
+         claim that you wrote the original software. If you use this software
+         in a product, an acknowledgment in the product documentation would be
+         appreciated but is not required.
+      2. Altered source versions must be plainly marked as such, and must not be
+         misrepresented as being the original software.
+      3. This notice may not be removed or altered from any source distribution.
+
+      Jean-loup Gailly        Mark Adler
+      jloup@gzip.org          madler@alumni.caltech.edu
+
+    ----------------------------------------------------------------------
+
+    The Clar framework is licensed under the MIT license:
+
+    Copyright (C) 2011 by Vicent Marti
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+
+    ----------------------------------------------------------------------
+
+    The regex library (deps/regex/) is licensed under the GNU LGPL
+
+                      GNU LESSER GENERAL PUBLIC LICENSE
+                           Version 2.1, February 1999
+
+     Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+     51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+     Everyone is permitted to copy and distribute verbatim copies
+     of this license document, but changing it is not allowed.
+
+    [This is the first released version of the Lesser GPL.  It also counts
+     as the successor of the GNU Library Public License, version 2, hence
+     the version number 2.1.]
+
+                                Preamble
+
+      The licenses for most software are designed to take away your
+    freedom to share and change it.  By contrast, the GNU General Public
+    Licenses are intended to guarantee your freedom to share and change
+    free software--to make sure the software is free for all its users.
+
+      This license, the Lesser General Public License, applies to some
+    specially designated software packages--typically libraries--of the
+    Free Software Foundation and other authors who decide to use it.  You
+    can use it too, but we suggest you first think carefully about whether
+    this license or the ordinary General Public License is the better
+    strategy to use in any particular case, based on the explanations below.
+
+      When we speak of free software, we are referring to freedom of use,
+    not price.  Our General Public Licenses are designed to make sure that
+    you have the freedom to distribute copies of free software (and charge
+    for this service if you wish); that you receive source code or can get
+    it if you want it; that you can change the software and use pieces of
+    it in new free programs; and that you are informed that you can do
+    these things.
+
+      To protect your rights, we need to make restrictions that forbid
+    distributors to deny you these rights or to ask you to surrender these
+    rights.  These restrictions translate to certain responsibilities for
+    you if you distribute copies of the library or if you modify it.
+
+      For example, if you distribute copies of the library, whether gratis
+    or for a fee, you must give the recipients all the rights that we gave
+    you.  You must make sure that they, too, receive or can get the source
+    code.  If you link other code with the library, you must provide
+    complete object files to the recipients, so that they can relink them
+    with the library after making changes to the library and recompiling
+    it.  And you must show them these terms so they know their rights.
+
+      We protect your rights with a two-step method: (1) we copyright the
+    library, and (2) we offer you this license, which gives you legal
+    permission to copy, distribute and/or modify the library.
+
+      To protect each distributor, we want to make it very clear that
+    there is no warranty for the free library.  Also, if the library is
+    modified by someone else and passed on, the recipients should know
+    that what they have is not the original version, so that the original
+    author's reputation will not be affected by problems that might be
+    introduced by others.
+
+      Finally, software patents pose a constant threat to the existence of
+    any free program.  We wish to make sure that a company cannot
+    effectively restrict the users of a free program by obtaining a
+    restrictive license from a patent holder.  Therefore, we insist that
+    any patent license obtained for a version of the library must be
+    consistent with the full freedom of use specified in this license.
+
+      Most GNU software, including some libraries, is covered by the
+    ordinary GNU General Public License.  This license, the GNU Lesser
+    General Public License, applies to certain designated libraries, and
+    is quite different from the ordinary General Public License.  We use
+    this license for certain libraries in order to permit linking those
+    libraries into non-free programs.
+
+      When a program is linked with a library, whether statically or using
+    a shared library, the combination of the two is legally speaking a
+    combined work, a derivative of the original library.  The ordinary
+    General Public License therefore permits such linking only if the
+    entire combination fits its criteria of freedom.  The Lesser General
+    Public License permits more lax criteria for linking other code with
+    the library.
+
+      We call this license the "Lesser" General Public License because it
+    does Less to protect the user's freedom than the ordinary General
+    Public License.  It also provides other free software developers Less
+    of an advantage over competing non-free programs.  These disadvantages
+    are the reason we use the ordinary General Public License for many
+    libraries.  However, the Lesser license provides advantages in certain
+    special circumstances.
+
+      For example, on rare occasions, there may be a special need to
+    encourage the widest possible use of a certain library, so that it becomes
+    a de-facto standard.  To achieve this, non-free programs must be
+    allowed to use the library.  A more frequent case is that a free
+    library does the same job as widely used non-free libraries.  In this
+    case, there is little to gain by limiting the free library to free
+    software only, so we use the Lesser General Public License.
+
+      In other cases, permission to use a particular library in non-free
+    programs enables a greater number of people to use a large body of
+    free software.  For example, permission to use the GNU C Library in
+    non-free programs enables many more people to use the whole GNU
+    operating system, as well as its variant, the GNU/Linux operating
+    system.
+
+      Although the Lesser General Public License is Less protective of the
+    users' freedom, it does ensure that the user of a program that is
+    linked with the Library has the freedom and the wherewithal to run
+    that program using a modified version of the Library.
+
+      The precise terms and conditions for copying, distribution and
+    modification follow.  Pay close attention to the difference between a
+    "work based on the library" and a "work that uses the library".  The
+    former contains code derived from the library, whereas the latter must
+    be combined with the library in order to run.
+
+                      GNU LESSER GENERAL PUBLIC LICENSE
+       TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+      0. This License Agreement applies to any software library or other
+    program which contains a notice placed by the copyright holder or
+    other authorized party saying it may be distributed under the terms of
+    this Lesser General Public License (also called "this License").
+    Each licensee is addressed as "you".
+
+      A "library" means a collection of software functions and/or data
+    prepared so as to be conveniently linked with application programs
+    (which use some of those functions and data) to form executables.
+
+      The "Library", below, refers to any such software library or work
+    which has been distributed under these terms.  A "work based on the
+    Library" means either the Library or any derivative work under
+    copyright law: that is to say, a work containing the Library or a
+    portion of it, either verbatim or with modifications and/or translated
+    straightforwardly into another language.  (Hereinafter, translation is
+    included without limitation in the term "modification".)
+
+      "Source code" for a work means the preferred form of the work for
+    making modifications to it.  For a library, complete source code means
+    all the source code for all modules it contains, plus any associated
+    interface definition files, plus the scripts used to control compilation
+    and installation of the library.
+
+      Activities other than copying, distribution and modification are not
+    covered by this License; they are outside its scope.  The act of
+    running a program using the Library is not restricted, and output from
+    such a program is covered only if its contents constitute a work based
+    on the Library (independent of the use of the Library in a tool for
+    writing it).  Whether that is true depends on what the Library does
+    and what the program that uses the Library does.
+
+      1. You may copy and distribute verbatim copies of the Library's
+    complete source code as you receive it, in any medium, provided that
+    you conspicuously and appropriately publish on each copy an
+    appropriate copyright notice and disclaimer of warranty; keep intact
+    all the notices that refer to this License and to the absence of any
+    warranty; and distribute a copy of this License along with the
+    Library.
+
+      You may charge a fee for the physical act of transferring a copy,
+    and you may at your option offer warranty protection in exchange for a
+    fee.
+
+      2. You may modify your copy or copies of the Library or any portion
+    of it, thus forming a work based on the Library, and copy and
+    distribute such modifications or work under the terms of Section 1
+    above, provided that you also meet all of these conditions:
+
+        a) The modified work must itself be a software library.
+
+        b) You must cause the files modified to carry prominent notices
+        stating that you changed the files and the date of any change.
+
+        c) You must cause the whole of the work to be licensed at no
+        charge to all third parties under the terms of this License.
+
+        d) If a facility in the modified Library refers to a function or a
+        table of data to be supplied by an application program that uses
+        the facility, other than as an argument passed when the facility
+        is invoked, then you must make a good faith effort to ensure that,
+        in the event an application does not supply such function or
+        table, the facility still operates, and performs whatever part of
+        its purpose remains meaningful.
+
+        (For example, a function in a library to compute square roots has
+        a purpose that is entirely well-defined independent of the
+        application.  Therefore, Subsection 2d requires that any
+        application-supplied function or table used by this function must
+        be optional: if the application does not supply it, the square
+        root function must still compute square roots.)
+
+    These requirements apply to the modified work as a whole.  If
+    identifiable sections of that work are not derived from the Library,
+    and can be reasonably considered independent and separate works in
+    themselves, then this License, and its terms, do not apply to those
+    sections when you distribute them as separate works.  But when you
+    distribute the same sections as part of a whole which is a work based
+    on the Library, the distribution of the whole must be on the terms of
+    this License, whose permissions for other licensees extend to the
+    entire whole, and thus to each and every part regardless of who wrote
+    it.
+
+    Thus, it is not the intent of this section to claim rights or contest
+    your rights to work written entirely by you; rather, the intent is to
+    exercise the right to control the distribution of derivative or
+    collective works based on the Library.
+
+    In addition, mere aggregation of another work not based on the Library
+    with the Library (or with a work based on the Library) on a volume of
+    a storage or distribution medium does not bring the other work under
+    the scope of this License.
+
+      3. You may opt to apply the terms of the ordinary GNU General Public
+    License instead of this License to a given copy of the Library.  To do
+    this, you must alter all the notices that refer to this License, so
+    that they refer to the ordinary GNU General Public License, version 2,
+    instead of to this License.  (If a newer version than version 2 of the
+    ordinary GNU General Public License has appeared, then you can specify
+    that version instead if you wish.)  Do not make any other change in
+    these notices.
+
+      Once this change is made in a given copy, it is irreversible for
+    that copy, so the ordinary GNU General Public License applies to all
+    subsequent copies and derivative works made from that copy.
+
+      This option is useful when you wish to copy part of the code of
+    the Library into a program that is not a library.
+
+      4. You may copy and distribute the Library (or a portion or
+    derivative of it, under Section 2) in object code or executable form
+    under the terms of Sections 1 and 2 above provided that you accompany
+    it with the complete corresponding machine-readable source code, which
+    must be distributed under the terms of Sections 1 and 2 above on a
+    medium customarily used for software interchange.
+
+      If distribution of object code is made by offering access to copy
+    from a designated place, then offering equivalent access to copy the
+    source code from the same place satisfies the requirement to
+    distribute the source code, even though third parties are not
+    compelled to copy the source along with the object code.
+
+      5. A program that contains no derivative of any portion of the
+    Library, but is designed to work with the Library by being compiled or
+    linked with it, is called a "work that uses the Library".  Such a
+    work, in isolation, is not a derivative work of the Library, and
+    therefore falls outside the scope of this License.
+
+      However, linking a "work that uses the Library" with the Library
+    creates an executable that is a derivative of the Library (because it
+    contains portions of the Library), rather than a "work that uses the
+    library".  The executable is therefore covered by this License.
+    Section 6 states terms for distribution of such executables.
+
+      When a "work that uses the Library" uses material from a header file
+    that is part of the Library, the object code for the work may be a
+    derivative work of the Library even though the source code is not.
+    Whether this is true is especially significant if the work can be
+    linked without the Library, or if the work is itself a library.  The
+    threshold for this to be true is not precisely defined by law.
+
+      If such an object file uses only numerical parameters, data
+    structure layouts and accessors, and small macros and small inline
+    functions (ten lines or less in length), then the use of the object
+    file is unrestricted, regardless of whether it is legally a derivative
+    work.  (Executables containing this object code plus portions of the
+    Library will still fall under Section 6.)
+
+      Otherwise, if the work is a derivative of the Library, you may
+    distribute the object code for the work under the terms of Section 6.
+    Any executables containing that work also fall under Section 6,
+    whether or not they are linked directly with the Library itself.
+
+      6. As an exception to the Sections above, you may also combine or
+    link a "work that uses the Library" with the Library to produce a
+    work containing portions of the Library, and distribute that work
+    under terms of your choice, provided that the terms permit
+    modification of the work for the customer's own use and reverse
+    engineering for debugging such modifications.
+
+      You must give prominent notice with each copy of the work that the
+    Library is used in it and that the Library and its use are covered by
+    this License.  You must supply a copy of this License.  If the work
+    during execution displays copyright notices, you must include the
+    copyright notice for the Library among them, as well as a reference
+    directing the user to the copy of this License.  Also, you must do one
+    of these things:
+
+        a) Accompany the work with the complete corresponding
+        machine-readable source code for the Library including whatever
+        changes were used in the work (which must be distributed under
+        Sections 1 and 2 above); and, if the work is an executable linked
+        with the Library, with the complete machine-readable "work that
+        uses the Library", as object code and/or source code, so that the
+        user can modify the Library and then relink to produce a modified
+        executable containing the modified Library.  (It is understood
+        that the user who changes the contents of definitions files in the
+        Library will not necessarily be able to recompile the application
+        to use the modified definitions.)
+
+        b) Use a suitable shared library mechanism for linking with the
+        Library.  A suitable mechanism is one that (1) uses at run time a
+        copy of the library already present on the user's computer system,
+        rather than copying library functions into the executable, and (2)
+        will operate properly with a modified version of the library, if
+        the user installs one, as long as the modified version is
+        interface-compatible with the version that the work was made with.
+
+        c) Accompany the work with a written offer, valid for at
+        least three years, to give the same user the materials
+        specified in Subsection 6a, above, for a charge no more
+        than the cost of performing this distribution.
+
+        d) If distribution of the work is made by offering access to copy
+        from a designated place, offer equivalent access to copy the above
+        specified materials from the same place.
+
+        e) Verify that the user has already received a copy of these
+        materials or that you have already sent this user a copy.
+
+      For an executable, the required form of the "work that uses the
+    Library" must include any data and utility programs needed for
+    reproducing the executable from it.  However, as a special exception,
+    the materials to be distributed need not include anything that is
+    normally distributed (in either source or binary form) with the major
+    components (compiler, kernel, and so on) of the operating system on
+    which the executable runs, unless that component itself accompanies
+    the executable.
+
+      It may happen that this requirement contradicts the license
+    restrictions of other proprietary libraries that do not normally
+    accompany the operating system.  Such a contradiction means you cannot
+    use both them and the Library together in an executable that you
+    distribute.
+
+      7. You may place library facilities that are a work based on the
+    Library side-by-side in a single library together with other library
+    facilities not covered by this License, and distribute such a combined
+    library, provided that the separate distribution of the work based on
+    the Library and of the other library facilities is otherwise
+    permitted, and provided that you do these two things:
+
+        a) Accompany the combined library with a copy of the same work
+        based on the Library, uncombined with any other library
+        facilities.  This must be distributed under the terms of the
+        Sections above.
+
+        b) Give prominent notice with the combined library of the fact
+        that part of it is a work based on the Library, and explaining
+        where to find the accompanying uncombined form of the same work.
+
+      8. You may not copy, modify, sublicense, link with, or distribute
+    the Library except as expressly provided under this License.  Any
+    attempt otherwise to copy, modify, sublicense, link with, or
+    distribute the Library is void, and will automatically terminate your
+    rights under this License.  However, parties who have received copies,
+    or rights, from you under this License will not have their licenses
+    terminated so long as such parties remain in full compliance.
+
+      9. You are not required to accept this License, since you have not
+    signed it.  However, nothing else grants you permission to modify or
+    distribute the Library or its derivative works.  These actions are
+    prohibited by law if you do not accept this License.  Therefore, by
+    modifying or distributing the Library (or any work based on the
+    Library), you indicate your acceptance of this License to do so, and
+    all its terms and conditions for copying, distributing or modifying
+    the Library or works based on it.
+
+      10. Each time you redistribute the Library (or any work based on the
+    Library), the recipient automatically receives a license from the
+    original licensor to copy, distribute, link with or modify the Library
+    subject to these terms and conditions.  You may not impose any further
+    restrictions on the recipients' exercise of the rights granted herein.
+    You are not responsible for enforcing compliance by third parties with
+    this License.
+
+      11. If, as a consequence of a court judgment or allegation of patent
+    infringement or for any other reason (not limited to patent issues),
+    conditions are imposed on you (whether by court order, agreement or
+    otherwise) that contradict the conditions of this License, they do not
+    excuse you from the conditions of this License.  If you cannot
+    distribute so as to satisfy simultaneously your obligations under this
+    License and any other pertinent obligations, then as a consequence you
+    may not distribute the Library at all.  For example, if a patent
+    license would not permit royalty-free redistribution of the Library by
+    all those who receive copies directly or indirectly through you, then
+    the only way you could satisfy both it and this License would be to
+    refrain entirely from distribution of the Library.
+
+    If any portion of this section is held invalid or unenforceable under any
+    particular circumstance, the balance of the section is intended to apply,
+    and the section as a whole is intended to apply in other circumstances.
+
+    It is not the purpose of this section to induce you to infringe any
+    patents or other property right claims or to contest validity of any
+    such claims; this section has the sole purpose of protecting the
+    integrity of the free software distribution system which is
+    implemented by public license practices.  Many people have made
+    generous contributions to the wide range of software distributed
+    through that system in reliance on consistent application of that
+    system; it is up to the author/donor to decide if he or she is willing
+    to distribute software through any other system and a licensee cannot
+    impose that choice.
+
+    This section is intended to make thoroughly clear what is believed to
+    be a consequence of the rest of this License.
+
+      12. If the distribution and/or use of the Library is restricted in
+    certain countries either by patents or by copyrighted interfaces, the
+    original copyright holder who places the Library under this License may add
+    an explicit geographical distribution limitation excluding those countries,
+    so that distribution is permitted only in or among countries not thus
+    excluded.  In such case, this License incorporates the limitation as if
+    written in the body of this License.
+
+      13. The Free Software Foundation may publish revised and/or new
+    versions of the Lesser General Public License from time to time.
+    Such new versions will be similar in spirit to the present version,
+    but may differ in detail to address new problems or concerns.
+
+    Each version is given a distinguishing version number.  If the Library
+    specifies a version number of this License which applies to it and
+    "any later version", you have the option of following the terms and
+    conditions either of that version or of any later version published by
+    the Free Software Foundation.  If the Library does not specify a
+    license version number, you may choose any version ever published by
+    the Free Software Foundation.
+
+      14. If you wish to incorporate parts of the Library into other free
+    programs whose distribution conditions are incompatible with these,
+    write to the author to ask for permission.  For software which is
+    copyrighted by the Free Software Foundation, write to the Free
+    Software Foundation; we sometimes make exceptions for this.  Our
+    decision will be guided by the two goals of preserving the free status
+    of all derivatives of our free software and of promoting the sharing
+    and reuse of software generally.
+
+                                NO WARRANTY
+
+      15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+    WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+    EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+    OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+    KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+    PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+    LIBRARY IS WITH YOU.  SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+    THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+      16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+    WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+    AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+    FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+    CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+    LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+    RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+    FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+    SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+    DAMAGES.
+
+                         END OF TERMS AND CONDITIONS
+
+               How to Apply These Terms to Your New Libraries
+
+      If you develop a new library, and you want it to be of the greatest
+    possible use to the public, we recommend making it free software that
+    everyone can redistribute and change.  You can do so by permitting
+    redistribution under these terms (or, alternatively, under the terms of the
+    ordinary General Public License).
+
+      To apply these terms, attach the following notices to the library.  It is
+    safest to attach them to the start of each source file to most effectively
+    convey the exclusion of warranty; and each file should have at least the
+    "copyright" line and a pointer to where the full notice is found.
+
+        <one line to give the library's name and a brief idea of what it does.>
+        Copyright (C) <year>  <name of author>
+
+        This library is free software; you can redistribute it and/or
+        modify it under the terms of the GNU Lesser General Public
+        License as published by the Free Software Foundation; either
+        version 2.1 of the License, or (at your option) any later version.
+
+        This library is distributed in the hope that it will be useful,
+        but WITHOUT ANY WARRANTY; without even the implied warranty of
+        MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+        Lesser General Public License for more details.
+
+        You should have received a copy of the GNU Lesser General Public
+        License along with this library; if not, write to the Free Software
+        Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+    Also add information on how to contact you by electronic and paper mail.
+
+    You should also get your employer (if you work as a programmer) or your
+    school, if any, to sign a "copyright disclaimer" for the library, if
+    necessary.  Here is a sample; alter the names:
+
+      Yoyodyne, Inc., hereby disclaims all copyright interest in the
+      library `Frob' (a library for tweaking knobs) written by James Random Hacker.
+
+      <signature of Ty Coon>, 1 April 1990
+      Ty Coon, President of Vice
+
+    That's all there is to it!
+
+    ----------------------------------------------------------------------
+
+* libssh2 - http://www.libssh2.org/license.html
+
+    Copyright (c) 2004-2007 Sara Golemon <sarag@libssh2.org>
+    Copyright (c) 2005,2006 Mikhail Gusarov <dottedmag@dottedmag.net>
+    Copyright (c) 2006-2007 The Written Word, Inc.
+    Copyright (c) 2007 Eli Fant <elifantu@mail.ru>
+    Copyright (c) 2009 Daniel Stenberg
+    Copyright (C) 2008, 2009 Simon Josefsson
+    All rights reserved.
+
+    Redistribution and use in source and binary forms,
+    with or without modification, are permitted provided
+    that the following conditions are met:
+
+      Redistributions of source code must retain the above
+      copyright notice, this list of conditions and the
+      following disclaimer.
+
+      Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials
+      provided with the distribution.
+
+      Neither the name of the copyright holder nor the names
+      of any other contributors may be used to endorse or
+      promote products derived from this software without
+      specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+    INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+    OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+    ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+    CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+    BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+    SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+    WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+    USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
+    OF SUCH DAMAGE.
+
+* libcurl - http://curl.haxx.se/docs/copyright.html
+
+    COPYRIGHT AND PERMISSION NOTICE
+
+    Copyright (c) 1996 - 2014, Daniel Stenberg, daniel@haxx.se.
+
+    All rights reserved.
+
+    Permission to use, copy, modify, and distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+    DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+    OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+    USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder shall not
+    be used in advertising or otherwise to promote the sale, use or other
+    dealings in this Software without prior written authorization of the
+    copyright holder.
+
+* flate2-rs - https://github.com/alexcrichton/flate2-rs/blob/master/LICENSE-MIT
+* link-config - https://github.com/alexcrichton/link-config/blob/master/LICENSE-MIT
+* openssl-static-sys - https://github.com/alexcrichton/openssl-static-sys/blob/master/LICENSE-MIT
+* toml-rs - https://github.com/alexcrichton/toml-rs/blob/master/LICENSE-MIT
+* libssh2-static-sys - https://github.com/alexcrichton/libssh2-static-sys/blob/master/LICENSE-MIT
+* git2-rs - https://github.com/alexcrichton/git2-rs/blob/master/LICENSE-MIT
+* tar-rs - https://github.com/alexcrichton/tar-rs/blob/master/LICENSE-MIT
+
+    Copyright (c) 2014 Alex Crichton
+
+    Permission is hereby granted, free of charge, to any
+    person obtaining a copy of this software and associated
+    documentation files (the "Software"), to deal in the
+    Software without restriction, including without
+    limitation the rights to use, copy, modify, merge,
+    publish, distribute, sublicense, and/or sell copies of
+    the Software, and to permit persons to whom the Software
+    is furnished to do so, subject to the following
+    conditions:
+
+    The above copyright notice and this permission notice
+    shall be included in all copies or substantial portions
+    of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+    TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+    PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+    SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+    CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+    IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+    DEALINGS IN THE SOFTWARE.
+
+* glob - https://github.com/rust-lang/glob/blob/master/LICENSE-MIT
+* semver - https://github.com/rust-lang/semver/blob/master/LICENSE-MIT
+
+    Copyright (c) 2014 The Rust Project Developers
+
+    Permission is hereby granted, free of charge, to any
+    person obtaining a copy of this software and associated
+    documentation files (the "Software"), to deal in the
+    Software without restriction, including without
+    limitation the rights to use, copy, modify, merge,
+    publish, distribute, sublicense, and/or sell copies of
+    the Software, and to permit persons to whom the Software
+    is furnished to do so, subject to the following
+    conditions:
+
+    The above copyright notice and this permission notice
+    shall be included in all copies or substantial portions
+    of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+    TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+    PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+    SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+    CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+    IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+    DEALINGS IN THE SOFTWARE.
+
+* rust-url - https://github.com/servo/rust-url/blob/master/LICENSE-MIT
+
+    Copyright (c) 2006-2009 Graydon Hoare
+    Copyright (c) 2009-2013 Mozilla Foundation
+
+    Permission is hereby granted, free of charge, to any
+    person obtaining a copy of this software and associated
+    documentation files (the "Software"), to deal in the
+    Software without restriction, including without
+    limitation the rights to use, copy, modify, merge,
+    publish, distribute, sublicense, and/or sell copies of
+    the Software, and to permit persons to whom the Software
+    is furnished to do so, subject to the following
+    conditions:
+
+    The above copyright notice and this permission notice
+    shall be included in all copies or substantial portions
+    of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+    ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+    TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+    PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+    SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+    CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+    IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+    DEALINGS IN THE SOFTWARE.
+
+* rust-encoding - https://github.com/lifthrasiir/rust-encoding/blob/master/LICENSE.txt
+
+    The MIT License (MIT)
+
+    Copyright (c) 2013, Kang Seonghoon.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+
+* curl-rust - https://github.com/carllerche/curl-rust/blob/master/LICENSE
+
+    Copyright (c) 2014 Carl Lerche
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in
+    all copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+    THE SOFTWARE.
+
+* docopt.rs - https://github.com/docopt/docopt.rs/blob/master/UNLICENSE
+
+    This is free and unencumbered software released into the public domain.
+
+    Anyone is free to copy, modify, publish, use, compile, sell, or
+    distribute this software, either in source code form or as a compiled
+    binary, for any purpose, commercial or non-commercial, and by any
+    means.
+
+    In jurisdictions that recognize copyright laws, the author or authors
+    of this software dedicate any and all copyright interest in the
+    software to the public domain. We make this dedication for the benefit
+    of the public at large and to the detriment of our heirs and
+    successors. We intend this dedication to be an overt act of
+    relinquishment in perpetuity of all present and future rights to this
+    software under copyright law.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+    IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+    OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+    ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+    OTHER DEALINGS IN THE SOFTWARE.
+
+    For more information, please refer to <http://unlicense.org/>
+
diff --git a/README.md b/README.md
new file mode 100644 (file)
index 0000000..ed8ca97
--- /dev/null
+++ b/README.md
@@ -0,0 +1,90 @@
+# Cargo
+
+Cargo downloads your Rust project’s dependencies and compiles your project.
+
+Learn more at https://doc.rust-lang.org/cargo/
+
+## Code Status
+
+[![Build Status](https://travis-ci.org/rust-lang/cargo.svg?branch=master)](https://travis-ci.org/rust-lang/cargo)
+[![Build Status](https://ci.appveyor.com/api/projects/status/github/rust-lang/cargo?branch=master&svg=true)](https://ci.appveyor.com/project/rust-lang-libs/cargo)
+
+Code documentation: https://docs.rs/cargo/
+
+## Installing Cargo
+
+Cargo is distributed by default with Rust, so if you've got `rustc` installed
+locally you probably also have `cargo` installed locally.
+
+## Compiling from Source
+
+Cargo requires the following tools and packages to build:
+
+* `python`
+* `curl` (on Unix)
+* `cmake`
+* OpenSSL headers (only for Unix, this is the `libssl-dev` package on ubuntu)
+* `cargo` and `rustc`
+
+First, you'll want to check out this repository
+
+```
+git clone https://github.com/rust-lang/cargo
+cd cargo
+```
+
+With `cargo` already installed, you can simply run:
+
+```
+cargo build --release
+```
+
+## Adding new subcommands to Cargo
+
+Cargo is designed to be extensible with new subcommands without having to modify
+Cargo itself. See [the Wiki page][third-party-subcommands] for more details and
+a list of known community-developed subcommands.
+
+[third-party-subcommands]: https://github.com/rust-lang/cargo/wiki/Third-party-cargo-subcommands
+
+
+## Releases
+
+High level release notes are available as part of [Rust's release notes][rel].
+Cargo releases coincide with Rust releases.
+
+[rel]: https://github.com/rust-lang/rust/blob/master/RELEASES.md
+
+## Reporting issues
+
+Found a bug? We'd love to know about it!
+
+Please report all issues on the GitHub [issue tracker][issues].
+
+[issues]: https://github.com/rust-lang/cargo/issues
+
+## Contributing
+
+See [CONTRIBUTING.md](CONTRIBUTING.md). You may also find the architecture
+documentation useful ([ARCHITECTURE.md](ARCHITECTURE.md)).
+
+## License
+
+Cargo is primarily distributed under the terms of both the MIT license
+and the Apache License (Version 2.0).
+
+See LICENSE-APACHE and LICENSE-MIT for details.
+
+### Third party software
+
+This product includes software developed by the OpenSSL Project
+for use in the OpenSSL Toolkit (http://www.openssl.org/).
+
+In binary form, this product includes software that is licensed under the
+terms of the GNU General Public License, version 2, with a linking exception,
+which can be obtained from the [upstream repository][1].
+
+See LICENSE-THIRD-PARTY for details.
+
+[1]: https://github.com/libgit2/libgit2
+
diff --git a/appveyor.yml b/appveyor.yml
new file mode 100644 (file)
index 0000000..282633e
--- /dev/null
@@ -0,0 +1,29 @@
+environment:
+  matrix:
+  - TARGET: x86_64-pc-windows-msvc
+    OTHER_TARGET: i686-pc-windows-msvc
+  - TARGET: x86_64-pc-windows-msvc
+    MINIMAL_VERSIONS: true
+    CFG_DISABLE_CROSS_TESTS: 1
+
+install:
+  - if NOT defined APPVEYOR_PULL_REQUEST_NUMBER if "%APPVEYOR_REPO_BRANCH%" == "master" appveyor exit
+  - appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe
+  - rustup-init.exe -y --default-host x86_64-pc-windows-msvc --default-toolchain nightly
+  - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin
+  - if defined MINIMAL_VERSIONS rustup toolchain install 1.28.0
+  - if defined OTHER_TARGET rustup target add %OTHER_TARGET%
+  - rustc -V
+  - cargo -V
+  - git submodule update --init
+
+clone_depth: 1
+
+build: false
+
+test_script:
+  # we don't have ci time to run the full `cargo test` with `minimal-versions` like
+  # - if defined MINIMAL_VERSIONS cargo +nightly generate-lockfile -Z minimal-versions && cargo +stable test
+  # so we just run `cargo check --tests` like
+  - if defined MINIMAL_VERSIONS cargo +nightly generate-lockfile -Z minimal-versions && cargo +1.28.0 check --tests
+  - if NOT defined MINIMAL_VERSIONS cargo test
diff --git a/src/bin/cargo/cli.rs b/src/bin/cargo/cli.rs
new file mode 100644 (file)
index 0000000..a53a6fe
--- /dev/null
@@ -0,0 +1,238 @@
+extern crate clap;
+
+use clap::{AppSettings, Arg, ArgMatches};
+
+use cargo::{self, CliResult, Config};
+
+use super::list_commands;
+use super::commands;
+use command_prelude::*;
+
+pub fn main(config: &mut Config) -> CliResult {
+    let args = match cli().get_matches_safe() {
+        Ok(args) => args,
+        Err(e) => {
+            if e.kind == clap::ErrorKind::UnrecognizedSubcommand {
+                // An unrecognized subcommand might be an external subcommand.
+                let cmd = &e.info.as_ref().unwrap()[0].to_owned();
+                return super::execute_external_subcommand(config, cmd, &[cmd, "--help"])
+                    .map_err(|_| e.into());
+            } else {
+                return Err(e)?;
+            }
+        }
+    };
+
+    if args.value_of("unstable-features") == Some("help") {
+        println!(
+            "
+Available unstable (nightly-only) flags:
+
+    -Z avoid-dev-deps   -- Avoid installing dev-dependencies if possible
+    -Z minimal-versions -- Install minimal dependency versions instead of maximum
+    -Z no-index-update  -- Do not update the registry, avoids a network request for benchmarking
+    -Z offline          -- Offline mode that does not perform network requests
+    -Z unstable-options -- Allow the usage of unstable options such as --registry
+    -Z config-profile   -- Read profiles from .cargo/config files
+
+Run with 'cargo -Z [FLAG] [SUBCOMMAND]'"
+        );
+        return Ok(());
+    }
+
+    let is_verbose = args.occurrences_of("verbose") > 0;
+    if args.is_present("version") {
+        let version = get_version_string(is_verbose);
+        print!("{}", version);
+        return Ok(());
+    }
+
+    if let Some(ref code) = args.value_of("explain") {
+        let mut procss = config.rustc(None)?.process();
+        procss.arg("--explain").arg(code).exec()?;
+        return Ok(());
+    }
+
+    if args.is_present("list") {
+        println!("Installed Commands:");
+        for command in list_commands(config) {
+            match command {
+                CommandInfo::BuiltIn { name, about } => {
+                    let summary = about.unwrap_or_default();
+                    let summary = summary.lines().next().unwrap_or(&summary); // display only the first line
+                    println!("    {:<20} {}", name, summary)
+                }
+                CommandInfo::External { name, path } => {
+                    if is_verbose {
+                        println!("    {:<20} {}", name, path.display())
+                    } else {
+                        println!("    {}", name)
+                    }
+                }
+            }
+        }
+        return Ok(());
+    }
+
+    let args = expand_aliases(config, args)?;
+
+    execute_subcommand(config, &args)
+}
+
+pub fn get_version_string(is_verbose: bool) -> String {
+    let version = cargo::version();
+    let mut version_string = String::from(version.to_string());
+    version_string.push_str("\n");
+    if is_verbose {
+        version_string.push_str(&format!(
+            "release: {}.{}.{}\n",
+            version.major, version.minor, version.patch
+        ));
+        if let Some(ref cfg) = version.cfg_info {
+            if let Some(ref ci) = cfg.commit_info {
+                version_string.push_str(&format!("commit-hash: {}\n", ci.commit_hash));
+                version_string.push_str(&format!("commit-date: {}\n", ci.commit_date));
+            }
+        }
+    }
+    version_string
+}
+
+fn expand_aliases(
+    config: &mut Config,
+    args: ArgMatches<'static>,
+) -> Result<ArgMatches<'static>, CliError> {
+    if let (cmd, Some(args)) = args.subcommand() {
+        match (
+            commands::builtin_exec(cmd),
+            super::aliased_command(config, cmd)?,
+        ) {
+            (None, Some(mut alias)) => {
+                alias.extend(
+                    args.values_of("")
+                        .unwrap_or_default()
+                        .map(|s| s.to_string()),
+                );
+                let args = cli()
+                    .setting(AppSettings::NoBinaryName)
+                    .get_matches_from_safe(alias)?;
+                return expand_aliases(config, args);
+            }
+            (Some(_), Some(_)) => {
+                config.shell().warn(format!(
+                    "alias `{}` is ignored, because it is shadowed by a built in command",
+                    cmd
+                ))?;
+            }
+            (_, None) => {}
+        }
+    };
+    Ok(args)
+}
+
+fn execute_subcommand(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let (cmd, subcommand_args) = match args.subcommand() {
+        (cmd, Some(args)) => (cmd, args),
+        _ => {
+            cli().print_help()?;
+            return Ok(());
+        }
+    };
+
+    let arg_target_dir = &subcommand_args.value_of_path("target-dir", config);
+
+    config.configure(
+        args.occurrences_of("verbose") as u32,
+        if args.is_present("quiet") {
+            Some(true)
+        } else {
+            None
+        },
+        &args.value_of("color").map(|s| s.to_string()),
+        args.is_present("frozen"),
+        args.is_present("locked"),
+        arg_target_dir,
+        &args.values_of_lossy("unstable-features")
+            .unwrap_or_default(),
+    )?;
+
+    if let Some(exec) = commands::builtin_exec(cmd) {
+        return exec(config, subcommand_args);
+    }
+
+    let mut ext_args: Vec<&str> = vec![cmd];
+    ext_args.extend(subcommand_args.values_of("").unwrap_or_default());
+    super::execute_external_subcommand(config, cmd, &ext_args)
+}
+
+fn cli() -> App {
+    App::new("cargo")
+        .settings(&[
+            AppSettings::UnifiedHelpMessage,
+            AppSettings::DeriveDisplayOrder,
+            AppSettings::VersionlessSubcommands,
+            AppSettings::AllowExternalSubcommands,
+        ])
+        .about("")
+        .template(
+            "\
+Rust's package manager
+
+USAGE:
+    {usage}
+
+OPTIONS:
+{unified}
+
+Some common cargo commands are (see all commands with --list):
+    build       Compile the current package
+    check       Analyze the current package and report errors, but don't build object files
+    clean       Remove the target directory
+    doc         Build this package's and its dependencies' documentation
+    new         Create a new cargo package
+    init        Create a new cargo package in an existing directory
+    run         Build and execute src/main.rs
+    test        Run the tests
+    bench       Run the benchmarks
+    update      Update dependencies listed in Cargo.lock
+    search      Search registry for crates
+    publish     Package and upload this package to the registry
+    install     Install a Rust binary
+    uninstall   Uninstall a Rust binary
+
+See 'cargo help <command>' for more information on a specific command.\n",
+        )
+        .arg(opt("version", "Print version info and exit").short("V"))
+        .arg(opt("list", "List installed commands"))
+        .arg(opt("explain", "Run `rustc --explain CODE`").value_name("CODE"))
+        .arg(
+            opt(
+                "verbose",
+                "Use verbose output (-vv very verbose/build.rs output)",
+            ).short("v")
+                .multiple(true)
+                .global(true),
+        )
+        .arg(
+            opt("quiet", "No output printed to stdout")
+                .short("q")
+                .global(true),
+        )
+        .arg(
+            opt("color", "Coloring: auto, always, never")
+                .value_name("WHEN")
+                .global(true),
+        )
+        .arg(opt("frozen", "Require Cargo.lock and cache are up to date").global(true))
+        .arg(opt("locked", "Require Cargo.lock is up to date").global(true))
+        .arg(
+            Arg::with_name("unstable-features")
+                .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details")
+                .short("Z")
+                .value_name("FLAG")
+                .multiple(true)
+                .number_of_values(1)
+                .global(true),
+        )
+        .subcommands(commands::builtin())
+}
diff --git a/src/bin/cargo/command_prelude.rs b/src/bin/cargo/command_prelude.rs
new file mode 100644 (file)
index 0000000..c8f2fc8
--- /dev/null
@@ -0,0 +1,453 @@
+use std::path::PathBuf;
+use std::fs;
+
+use clap::{self, SubCommand};
+use cargo::CargoResult;
+use cargo::core::Workspace;
+use cargo::core::compiler::{BuildConfig, MessageFormat};
+use cargo::ops::{CompileFilter, CompileOptions, NewOptions, Packages, VersionControl};
+use cargo::sources::CRATES_IO_REGISTRY;
+use cargo::util::paths;
+use cargo::util::important_paths::find_root_manifest_for_wd;
+
+pub use clap::{AppSettings, Arg, ArgMatches};
+pub use cargo::{CliError, CliResult, Config};
+pub use cargo::core::compiler::CompileMode;
+
+pub type App = clap::App<'static, 'static>;
+
+pub trait AppExt: Sized {
+    fn _arg(self, arg: Arg<'static, 'static>) -> Self;
+
+    fn arg_package_spec(
+        self,
+        package: &'static str,
+        all: &'static str,
+        exclude: &'static str,
+    ) -> Self {
+        self.arg_package_spec_simple(package)
+            ._arg(opt("all", all))
+            ._arg(multi_opt("exclude", "SPEC", exclude))
+    }
+
+    fn arg_package_spec_simple(self, package: &'static str) -> Self {
+        self._arg(multi_opt("package", "SPEC", package).short("p"))
+    }
+
+    fn arg_package(self, package: &'static str) -> Self {
+        self._arg(opt("package", package).short("p").value_name("SPEC"))
+    }
+
+    fn arg_jobs(self) -> Self {
+        self._arg(
+            opt("jobs", "Number of parallel jobs, defaults to # of CPUs")
+                .short("j")
+                .value_name("N"),
+        )
+    }
+
+    fn arg_targets_all(
+        self,
+        lib: &'static str,
+        bin: &'static str,
+        bins: &'static str,
+        example: &'static str,
+        examples: &'static str,
+        test: &'static str,
+        tests: &'static str,
+        bench: &'static str,
+        benches: &'static str,
+        all: &'static str,
+    ) -> Self {
+        self.arg_targets_lib_bin(lib, bin, bins)
+            ._arg(multi_opt("example", "NAME", example))
+            ._arg(opt("examples", examples))
+            ._arg(multi_opt("test", "NAME", test))
+            ._arg(opt("tests", tests))
+            ._arg(multi_opt("bench", "NAME", bench))
+            ._arg(opt("benches", benches))
+            ._arg(opt("all-targets", all))
+    }
+
+    fn arg_targets_lib_bin(self, lib: &'static str, bin: &'static str, bins: &'static str) -> Self {
+        self._arg(opt("lib", lib))
+            ._arg(multi_opt("bin", "NAME", bin))
+            ._arg(opt("bins", bins))
+    }
+
+    fn arg_targets_bins_examples(
+        self,
+        bin: &'static str,
+        bins: &'static str,
+        example: &'static str,
+        examples: &'static str,
+    ) -> Self {
+        self._arg(multi_opt("bin", "NAME", bin))
+            ._arg(opt("bins", bins))
+            ._arg(multi_opt("example", "NAME", example))
+            ._arg(opt("examples", examples))
+    }
+
+    fn arg_targets_bin_example(self, bin: &'static str, example: &'static str) -> Self {
+        self._arg(multi_opt("bin", "NAME", bin))
+            ._arg(multi_opt("example", "NAME", example))
+    }
+
+    fn arg_features(self) -> Self {
+        self._arg(
+            opt("features", "Space-separated list of features to activate").value_name("FEATURES"),
+        )._arg(opt("all-features", "Activate all available features"))
+            ._arg(opt(
+                "no-default-features",
+                "Do not activate the `default` feature",
+            ))
+    }
+
+    fn arg_release(self, release: &'static str) -> Self {
+        self._arg(opt("release", release))
+    }
+
+    fn arg_doc(self, doc: &'static str) -> Self {
+        self._arg(opt("doc", doc))
+    }
+
+    fn arg_target_triple(self, target: &'static str) -> Self {
+        self._arg(opt("target", target).value_name("TRIPLE"))
+    }
+
+    fn arg_target_dir(self) -> Self {
+        self._arg(opt("target-dir", "Directory for all generated artifacts").value_name("DIRECTORY"))
+    }
+
+    fn arg_manifest_path(self) -> Self {
+        self._arg(opt("manifest-path", "Path to Cargo.toml").value_name("PATH"))
+    }
+
+    fn arg_message_format(self) -> Self {
+        self._arg(
+            opt("message-format", "Error format")
+                .value_name("FMT")
+                .case_insensitive(true)
+                .possible_values(&["human", "json", "short"])
+                .default_value("human"),
+        )
+    }
+
+    fn arg_build_plan(self) -> Self {
+        self._arg(opt("build-plan", "Output the build plan in JSON"))
+    }
+
+    fn arg_new_opts(self) -> Self {
+        self._arg(
+            opt(
+                "vcs",
+                "\
+                 Initialize a new repository for the given version \
+                 control system (git, hg, pijul, or fossil) or do not \
+                 initialize any version control at all (none), overriding \
+                 a global configuration.",
+            ).value_name("VCS")
+                .possible_values(&["git", "hg", "pijul", "fossil", "none"]),
+        )
+            ._arg(opt("bin", "Use a binary (application) template [default]"))
+            ._arg(opt("lib", "Use a library template"))
+            ._arg(
+                opt("edition", "Edition to set for the crate generated")
+                    .possible_values(&["2015", "2018"])
+                    .value_name("YEAR")
+            )
+            ._arg(
+                opt(
+                    "name",
+                    "Set the resulting package name, defaults to the directory name",
+                ).value_name("NAME"),
+            )
+    }
+
+    fn arg_index(self) -> Self {
+        self._arg(opt("index", "Registry index to upload the package to").value_name("INDEX"))
+            ._arg(
+                opt("host", "DEPRECATED, renamed to '--index'")
+                    .value_name("HOST")
+                    .hidden(true),
+            )
+    }
+}
+
+impl AppExt for App {
+    fn _arg(self, arg: Arg<'static, 'static>) -> Self {
+        self.arg(arg)
+    }
+}
+
+pub fn opt(name: &'static str, help: &'static str) -> Arg<'static, 'static> {
+    Arg::with_name(name).long(name).help(help)
+}
+
+pub fn multi_opt(
+    name: &'static str,
+    value_name: &'static str,
+    help: &'static str,
+) -> Arg<'static, 'static> {
+    // Note that all `.multiple(true)` arguments in Cargo should specify
+    // `.number_of_values(1)` as well, so that `--foo val1 val2` is
+    // **not** parsed as `foo` with values ["val1", "val2"].
+    // `number_of_values` should become the default in clap 3.
+    opt(name, help)
+        .value_name(value_name)
+        .multiple(true)
+        .number_of_values(1)
+}
+
+pub fn subcommand(name: &'static str) -> App {
+    SubCommand::with_name(name).settings(&[
+        AppSettings::UnifiedHelpMessage,
+        AppSettings::DeriveDisplayOrder,
+        AppSettings::DontCollapseArgsInUsage,
+    ])
+}
+
+pub trait ArgMatchesExt {
+    fn value_of_u32(&self, name: &str) -> CargoResult<Option<u32>> {
+        let arg = match self._value_of(name) {
+            None => None,
+            Some(arg) => Some(arg.parse::<u32>().map_err(|_| {
+                clap::Error::value_validation_auto(format!("could not parse `{}` as a number", arg))
+            })?),
+        };
+        Ok(arg)
+    }
+
+    /// Returns value of the `name` command-line argument as an absolute path
+    fn value_of_path(&self, name: &str, config: &Config) -> Option<PathBuf> {
+        self._value_of(name).map(|path| config.cwd().join(path))
+    }
+
+    fn root_manifest(&self, config: &Config) -> CargoResult<PathBuf> {
+        if let Some(path) = self.value_of_path("manifest-path", config) {
+            // In general, we try to avoid normalizing paths in Cargo,
+            // but in this particular case we need it to fix #3586.
+            let path = paths::normalize_path(&path);
+            if !path.ends_with("Cargo.toml") {
+                bail!("the manifest-path must be a path to a Cargo.toml file")
+            }
+            if fs::metadata(&path).is_err() {
+                bail!(
+                    "manifest path `{}` does not exist",
+                    self._value_of("manifest-path").unwrap()
+                )
+            }
+            return Ok(path);
+        }
+        find_root_manifest_for_wd(config.cwd())
+    }
+
+    fn workspace<'a>(&self, config: &'a Config) -> CargoResult<Workspace<'a>> {
+        let root = self.root_manifest(config)?;
+        let mut ws = Workspace::new(&root, config)?;
+        if config.cli_unstable().avoid_dev_deps {
+            ws.set_require_optional_deps(false);
+        }
+        Ok(ws)
+    }
+
+    fn jobs(&self) -> CargoResult<Option<u32>> {
+        self.value_of_u32("jobs")
+    }
+
+    fn target(&self) -> Option<String> {
+        self._value_of("target").map(|s| s.to_string())
+    }
+
+    fn compile_options<'a>(
+        &self,
+        config: &'a Config,
+        mode: CompileMode,
+    ) -> CargoResult<CompileOptions<'a>> {
+        let spec = Packages::from_flags(
+            self._is_present("all"),
+            self._values_of("exclude"),
+            self._values_of("package"),
+        )?;
+
+        let message_format = match self._value_of("message-format") {
+            None => MessageFormat::Human,
+            Some(f) => {
+                if f.eq_ignore_ascii_case("json") {
+                    MessageFormat::Json
+                } else if f.eq_ignore_ascii_case("human") {
+                    MessageFormat::Human
+                } else if f.eq_ignore_ascii_case("short") {
+                    MessageFormat::Short
+                } else {
+                    panic!("Impossible message format: {:?}", f)
+                }
+            }
+        };
+
+        let mut build_config = BuildConfig::new(config, self.jobs()?, &self.target(), mode)?;
+        build_config.message_format = message_format;
+        build_config.release = self._is_present("release");
+        build_config.build_plan = self._is_present("build-plan");
+        if build_config.build_plan && !config.cli_unstable().unstable_options {
+            Err(format_err!(
+                "`--build-plan` flag is unstable, pass `-Z unstable-options` to enable it"
+            ))?;
+        };
+
+        let opts = CompileOptions {
+            config,
+            build_config,
+            features: self._values_of("features"),
+            all_features: self._is_present("all-features"),
+            no_default_features: self._is_present("no-default-features"),
+            spec,
+            filter: CompileFilter::new(
+                self._is_present("lib"),
+                self._values_of("bin"),
+                self._is_present("bins"),
+                self._values_of("test"),
+                self._is_present("tests"),
+                self._values_of("example"),
+                self._is_present("examples"),
+                self._values_of("bench"),
+                self._is_present("benches"),
+                self._is_present("all-targets"),
+            ),
+            target_rustdoc_args: None,
+            target_rustc_args: None,
+            local_rustdoc_args: None,
+            export_dir: None,
+        };
+        Ok(opts)
+    }
+
+    fn compile_options_for_single_package<'a>(
+        &self,
+        config: &'a Config,
+        mode: CompileMode,
+    ) -> CargoResult<CompileOptions<'a>> {
+        let mut compile_opts = self.compile_options(config, mode)?;
+        compile_opts.spec = Packages::Packages(self._values_of("package"));
+        Ok(compile_opts)
+    }
+
+    fn new_options(&self, config: &Config) -> CargoResult<NewOptions> {
+        let vcs = self._value_of("vcs").map(|vcs| match vcs {
+            "git" => VersionControl::Git,
+            "hg" => VersionControl::Hg,
+            "pijul" => VersionControl::Pijul,
+            "fossil" => VersionControl::Fossil,
+            "none" => VersionControl::NoVcs,
+            vcs => panic!("Impossible vcs: {:?}", vcs),
+        });
+        NewOptions::new(
+            vcs,
+            self._is_present("bin"),
+            self._is_present("lib"),
+            self.value_of_path("path", config).unwrap(),
+            self._value_of("name").map(|s| s.to_string()),
+            self._value_of("edition").map(|s| s.to_string()),
+            self.registry(config)?,
+        )
+    }
+
+    fn registry(&self, config: &Config) -> CargoResult<Option<String>> {
+        match self._value_of("registry") {
+            Some(registry) => {
+                if !config.cli_unstable().unstable_options {
+                    return Err(format_err!(
+                        "registry option is an unstable feature and \
+                         requires -Zunstable-options to use."
+                    ));
+                }
+
+                if registry == CRATES_IO_REGISTRY {
+                    // If "crates.io" is specified then we just need to return None
+                    // as that will cause cargo to use crates.io. This is required
+                    // for the case where a default alterative registry is used
+                    // but the user wants to switch back to crates.io for a single
+                    // command.
+                    Ok(None)
+                }
+                else {
+                    Ok(Some(registry.to_string()))                    
+                }
+            }
+            None => {
+                config.default_registry()
+            }
+        }
+    }
+
+    fn index(&self, config: &Config) -> CargoResult<Option<String>> {
+        // TODO: Deprecated
+        // remove once it has been decided --host can be removed
+        // We may instead want to repurpose the host flag, as
+        // mentioned in this issue
+        // https://github.com/rust-lang/cargo/issues/4208
+        let msg = "The flag '--host' is no longer valid.
+
+Previous versions of Cargo accepted this flag, but it is being
+deprecated. The flag is being renamed to 'index', as the flag
+wants the location of the index. Please use '--index' instead.
+
+This will soon become a hard error, so it's either recommended
+to update to a fixed version or contact the upstream maintainer
+about this warning.";
+
+        let index = match self._value_of("host") {
+            Some(host) => {
+                config.shell().warn(&msg)?;
+                Some(host.to_string())
+            }
+            None => self._value_of("index").map(|s| s.to_string()),
+        };
+        Ok(index)
+    }
+
+    fn _value_of(&self, name: &str) -> Option<&str>;
+
+    fn _values_of(&self, name: &str) -> Vec<String>;
+
+    fn _is_present(&self, name: &str) -> bool;
+}
+
+impl<'a> ArgMatchesExt for ArgMatches<'a> {
+    fn _value_of(&self, name: &str) -> Option<&str> {
+        self.value_of(name)
+    }
+
+    fn _values_of(&self, name: &str) -> Vec<String> {
+        self.values_of(name)
+            .unwrap_or_default()
+            .map(|s| s.to_string())
+            .collect()
+    }
+
+    fn _is_present(&self, name: &str) -> bool {
+        self.is_present(name)
+    }
+}
+
+pub fn values(args: &ArgMatches, name: &str) -> Vec<String> {
+    args.values_of(name)
+        .unwrap_or_default()
+        .map(|s| s.to_string())
+        .collect()
+}
+
+#[derive(PartialEq, PartialOrd, Eq, Ord)]
+pub enum CommandInfo {
+    BuiltIn { name: String, about: Option<String>, },
+    External { name: String, path: PathBuf },
+}
+
+impl CommandInfo {
+    pub fn name(&self) -> String {
+        match self {
+            CommandInfo::BuiltIn { name, .. } => name.to_string(),
+            CommandInfo::External { name, .. } => name.to_string(),
+        }
+    }
+}
diff --git a/src/bin/cargo/commands/bench.rs b/src/bin/cargo/commands/bench.rs
new file mode 100644 (file)
index 0000000..b688e36
--- /dev/null
@@ -0,0 +1,104 @@
+use command_prelude::*;
+
+use cargo::ops::{self, TestOptions};
+
+pub fn cli() -> App {
+    subcommand("bench")
+        .setting(AppSettings::TrailingVarArg)
+        .about("Execute all benchmarks of a local package")
+        .arg(
+            Arg::with_name("BENCHNAME")
+                .help("If specified, only run benches containing this string in their names"),
+        )
+        .arg(
+            Arg::with_name("args")
+                .help("Arguments for the bench binary")
+                .multiple(true)
+                .last(true),
+        )
+        .arg_targets_all(
+            "Benchmark only this package's library",
+            "Benchmark only the specified binary",
+            "Benchmark all binaries",
+            "Benchmark only the specified example",
+            "Benchmark all examples",
+            "Benchmark only the specified test target",
+            "Benchmark all tests",
+            "Benchmark only the specified bench target",
+            "Benchmark all benches",
+            "Benchmark all targets",
+        )
+        .arg(opt("no-run", "Compile, but don't run benchmarks"))
+        .arg_package_spec(
+            "Package to run benchmarks for",
+            "Benchmark all packages in the workspace",
+            "Exclude packages from the benchmark",
+        )
+        .arg_jobs()
+        .arg_features()
+        .arg_target_triple("Build for the target triple")
+        .arg_target_dir()
+        .arg_manifest_path()
+        .arg_message_format()
+        .arg(opt(
+            "no-fail-fast",
+            "Run all benchmarks regardless of failure",
+        ))
+        .after_help(
+            "\
+The benchmark filtering argument `BENCHNAME` and all the arguments following the
+two dashes (`--`) are passed to the benchmark binaries and thus to libtest
+(rustc's built in unit-test and micro-benchmarking framework).  If you're
+passing arguments to both Cargo and the binary, the ones after `--` go to the
+binary, the ones before go to Cargo.  For details about libtest's arguments see
+the output of `cargo bench -- --help`.
+
+If the --package argument is given, then SPEC is a package id specification
+which indicates which package should be benchmarked. If it is not given, then
+the current package is benchmarked. For more information on SPEC and its format,
+see the `cargo help pkgid` command.
+
+All packages in the workspace are benchmarked if the `--all` flag is supplied. The
+`--all` flag is automatically assumed for a virtual manifest.
+Note that `--exclude` has to be specified in conjunction with the `--all` flag.
+
+The --jobs argument affects the building of the benchmark executable but does
+not affect how many jobs are used when running the benchmarks.
+
+Compilation can be customized with the `bench` profile in the manifest.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    let mut compile_opts = args.compile_options(config, CompileMode::Bench)?;
+    compile_opts.build_config.release = true;
+
+    let ops = TestOptions {
+        no_run: args.is_present("no-run"),
+        no_fail_fast: args.is_present("no-fail-fast"),
+        compile_opts,
+    };
+
+    let mut bench_args = vec![];
+    bench_args.extend(
+        args.value_of("BENCHNAME")
+            .into_iter()
+            .map(|s| s.to_string()),
+    );
+    bench_args.extend(
+        args.values_of("args")
+            .unwrap_or_default()
+            .map(|s| s.to_string()),
+    );
+
+    let err = ops::run_benches(&ws, &ops, &bench_args)?;
+    match err {
+        None => Ok(()),
+        Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) {
+            Some(i) => CliError::new(format_err!("bench failed"), i),
+            None => CliError::new(err.into(), 101),
+        }),
+    }
+}
diff --git a/src/bin/cargo/commands/build.rs b/src/bin/cargo/commands/build.rs
new file mode 100644 (file)
index 0000000..4004c25
--- /dev/null
@@ -0,0 +1,59 @@
+use command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> App {
+    subcommand("build")
+        .alias("b")
+        .about("Compile a local package and all of its dependencies")
+        .arg_package_spec(
+            "Package to build (see `cargo help pkgid`)",
+            "Build all packages in the workspace",
+            "Exclude packages from the build",
+        )
+        .arg_jobs()
+        .arg_targets_all(
+            "Build only this package's library",
+            "Build only the specified binary",
+            "Build all binaries",
+            "Build only the specified example",
+            "Build all examples",
+            "Build only the specified test target",
+            "Build all tests",
+            "Build only the specified bench target",
+            "Build all benches",
+            "Build all targets",
+        )
+        .arg_release("Build artifacts in release mode, with optimizations")
+        .arg_features()
+        .arg_target_triple("Build for the target triple")
+        .arg_target_dir()
+        .arg(opt("out-dir", "Copy final artifacts to this directory").value_name("PATH"))
+        .arg_manifest_path()
+        .arg_message_format()
+        .arg_build_plan()
+        .after_help(
+            "\
+All packages in the workspace are built if the `--all` flag is supplied. The
+`--all` flag is automatically assumed for a virtual manifest.
+Note that `--exclude` has to be specified in conjunction with the `--all` flag.
+
+Compilation can be configured via the use of profiles which are configured in
+the manifest. The default profile for this command is `dev`, but passing
+the --release flag will use the `release` profile instead.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    let mut compile_opts = args.compile_options(config, CompileMode::Build)?;
+    compile_opts.export_dir = args.value_of_path("out-dir", config);
+    if compile_opts.export_dir.is_some() && !config.cli_unstable().unstable_options {
+        Err(format_err!(
+            "`--out-dir` flag is unstable, pass `-Z unstable-options` to enable it"
+        ))?;
+    };
+    ops::compile(&ws, &compile_opts)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/check.rs b/src/bin/cargo/commands/check.rs
new file mode 100644 (file)
index 0000000..c9bbac1
--- /dev/null
@@ -0,0 +1,72 @@
+use command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> App {
+    subcommand("check")
+        .about("Check a local package and all of its dependencies for errors")
+        .arg_package_spec(
+            "Package(s) to check",
+            "Check all packages in the workspace",
+            "Exclude packages from the check",
+        )
+        .arg_jobs()
+        .arg_targets_all(
+            "Check only this package's library",
+            "Check only the specified binary",
+            "Check all binaries",
+            "Check only the specified example",
+            "Check all examples",
+            "Check only the specified test target",
+            "Check all tests",
+            "Check only the specified bench target",
+            "Check all benches",
+            "Check all targets",
+        )
+        .arg_release("Check artifacts in release mode, with optimizations")
+        .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))
+        .arg_features()
+        .arg_target_triple("Check for the target triple")
+        .arg_target_dir()
+        .arg_manifest_path()
+        .arg_message_format()
+        .after_help(
+            "\
+If the --package argument is given, then SPEC is a package id specification
+which indicates which package should be built. If it is not given, then the
+current package is built. For more information on SPEC and its format, see the
+`cargo help pkgid` command.
+
+All packages in the workspace are checked if the `--all` flag is supplied. The
+`--all` flag is automatically assumed for a virtual manifest.
+Note that `--exclude` has to be specified in conjunction with the `--all` flag.
+
+Compilation can be configured via the use of profiles which are configured in
+the manifest. The default profile for this command is `dev`, but passing
+the --release flag will use the `release` profile instead.
+
+The `--profile test` flag can be used to check unit tests with the
+`#[cfg(test)]` attribute.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    let test = match args.value_of("profile") {
+        Some("test") => true,
+        None => false,
+        Some(profile) => {
+            let err = format_err!(
+                "unknown profile: `{}`, only `test` is \
+                 currently supported",
+                profile
+            );
+            return Err(CliError::new(err, 101));
+        }
+    };
+    let mode = CompileMode::Check { test };
+    let compile_opts = args.compile_options(config, mode)?;
+    ops::compile(&ws, &compile_opts)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/clean.rs b/src/bin/cargo/commands/clean.rs
new file mode 100644 (file)
index 0000000..a7606a6
--- /dev/null
@@ -0,0 +1,35 @@
+use command_prelude::*;
+
+use cargo::ops::{self, CleanOptions};
+
+pub fn cli() -> App {
+    subcommand("clean")
+        .about("Remove artifacts that cargo has generated in the past")
+        .arg_package_spec_simple("Package to clean artifacts for")
+        .arg_manifest_path()
+        .arg_target_triple("Target triple to clean output for (default all)")
+        .arg_target_dir()
+        .arg_release("Whether or not to clean release artifacts")
+        .arg_doc("Whether or not to clean just the documentation directory")
+        .after_help(
+            "\
+If the --package argument is given, then SPEC is a package id specification
+which indicates which package's artifacts should be cleaned out. If it is not
+given, then all packages' artifacts are removed. For more information on SPEC
+and its format, see the `cargo help pkgid` command.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    let opts = CleanOptions {
+        config,
+        spec: values(args, "package"),
+        target: args.target(),
+        release: args.is_present("release"),
+        doc: args.is_present("doc"),
+    };
+    ops::clean(&ws, &opts)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/doc.rs b/src/bin/cargo/commands/doc.rs
new file mode 100644 (file)
index 0000000..3bbcae5
--- /dev/null
@@ -0,0 +1,65 @@
+use command_prelude::*;
+
+use cargo::ops::{self, DocOptions};
+
+pub fn cli() -> App {
+    subcommand("doc")
+        .about("Build a package's documentation")
+        .arg(opt(
+            "open",
+            "Opens the docs in a browser after the operation",
+        ))
+        .arg_package_spec(
+            "Package to document",
+            "Document all packages in the workspace",
+            "Exclude packages from the build",
+        )
+        .arg(opt("no-deps", "Don't build documentation for dependencies"))
+        .arg(opt("document-private-items", "Document private items"))
+        .arg_jobs()
+        .arg_targets_lib_bin(
+            "Document only this package's library",
+            "Document only the specified binary",
+            "Document all binaries",
+        )
+        .arg_release("Build artifacts in release mode, with optimizations")
+        .arg_features()
+        .arg_target_triple("Build for the target triple")
+        .arg_target_dir()
+        .arg_manifest_path()
+        .arg_message_format()
+        .after_help(
+            "\
+By default the documentation for the local package and all dependencies is
+built. The output is all placed in `target/doc` in rustdoc's usual format.
+
+All packages in the workspace are documented if the `--all` flag is supplied. The
+`--all` flag is automatically assumed for a virtual manifest.
+Note that `--exclude` has to be specified in conjunction with the `--all` flag.
+
+If the --package argument is given, then SPEC is a package id specification
+which indicates which package should be documented. If it is not given, then the
+current package is documented. For more information on SPEC and its format, see
+the `cargo help pkgid` command.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    let mode = CompileMode::Doc {
+        deps: !args.is_present("no-deps"),
+    };
+    let mut compile_opts = args.compile_options(config, mode)?;
+    compile_opts.local_rustdoc_args = if args.is_present("document-private-items") {
+        Some(vec!["--document-private-items".to_string()])
+    } else {
+        None
+    };
+    let doc_opts = DocOptions {
+        open_result: args.is_present("open"),
+        compile_opts,
+    };
+    ops::doc(&ws, &doc_opts)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/fetch.rs b/src/bin/cargo/commands/fetch.rs
new file mode 100644 (file)
index 0000000..f69ed25
--- /dev/null
@@ -0,0 +1,34 @@
+use command_prelude::*;
+
+use cargo::ops;
+use cargo::ops::FetchOptions;
+
+pub fn cli() -> App {
+    subcommand("fetch")
+        .about("Fetch dependencies of a package from the network")
+        .arg_manifest_path()
+        .arg_target_triple("Fetch dependencies for the target triple")
+        .after_help(
+            "\
+If a lockfile is available, this command will ensure that all of the git
+dependencies and/or registries dependencies are downloaded and locally
+available. The network is never touched after a `cargo fetch` unless
+the lockfile changes.
+
+If the lockfile is not available, then this is the equivalent of
+`cargo generate-lockfile`. A lockfile is generated and dependencies are also
+all updated.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+
+    let opts = FetchOptions {
+        config,
+        target: args.target(),
+    };
+    ops::fetch(&ws, &opts)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/fix.rs b/src/bin/cargo/commands/fix.rs
new file mode 100644 (file)
index 0000000..b98968b
--- /dev/null
@@ -0,0 +1,147 @@
+use command_prelude::*;
+
+use cargo::ops::{self, CompileFilter, FilterRule};
+
+pub fn cli() -> App {
+    subcommand("fix")
+        .about("Automatically fix lint warnings reported by rustc")
+        .arg_package_spec(
+            "Package(s) to fix",
+            "Fix all packages in the workspace",
+            "Exclude packages from the fixes",
+        )
+        .arg_jobs()
+        .arg_targets_all(
+            "Fix only this package's library",
+            "Fix only the specified binary",
+            "Fix all binaries",
+            "Fix only the specified example",
+            "Fix all examples",
+            "Fix only the specified test target",
+            "Fix all tests",
+            "Fix only the specified bench target",
+            "Fix all benches",
+            "Fix all targets (default)",
+        )
+        .arg_release("Fix artifacts in release mode, with optimizations")
+        .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))
+        .arg_features()
+        .arg_target_triple("Fix for the target triple")
+        .arg_target_dir()
+        .arg_manifest_path()
+        .arg_message_format()
+        .arg(
+            Arg::with_name("broken-code")
+                .long("broken-code")
+                .help("Fix code even if it already has compiler errors"),
+        )
+        .arg(
+            Arg::with_name("edition")
+                .long("edition")
+                .help("Fix in preparation for the next edition"),
+        )
+        .arg(
+            // This is a deprecated argument, we'll want to phase it out
+            // eventually.
+            Arg::with_name("prepare-for")
+                .long("prepare-for")
+                .help("Fix warnings in preparation of an edition upgrade")
+                .takes_value(true)
+                .possible_values(&["2018"])
+                .conflicts_with("edition")
+                .hidden(true),
+        )
+        .arg(
+            Arg::with_name("idioms")
+                .long("edition-idioms")
+                .help("Fix warnings to migrate to the idioms of an edition")
+        )
+        .arg(
+            Arg::with_name("allow-no-vcs")
+                .long("allow-no-vcs")
+                .help("Fix code even if a VCS was not detected"),
+        )
+        .arg(
+            Arg::with_name("allow-dirty")
+                .long("allow-dirty")
+                .help("Fix code even if the working directory is dirty"),
+        )
+        .arg(
+            Arg::with_name("allow-staged")
+                .long("allow-staged")
+                .help("Fix code even if the working directory has staged changes"),
+        )
+        .after_help(
+            "\
+This Cargo subcommand will automatically take rustc's suggestions from
+diagnostics like warnings and apply them to your source code. This is intended
+to help automate tasks that rustc itself already knows how to tell you to fix!
+The `cargo fix` subcommand is also being developed for the Rust 2018 edition
+to provide code the ability to easily opt-in to the new edition without having
+to worry about any breakage.
+
+Executing `cargo fix` will under the hood execute `cargo check`. Any warnings
+applicable to your crate will be automatically fixed (if possible) and all
+remaining warnings will be displayed when the check process is finished. For
+example if you'd like to prepare for the 2018 edition, you can do so by
+executing:
+
+    cargo fix --edition
+
+which behaves the same as `cargo check --all-targets`. Similarly if you'd like
+to fix code for different platforms you can do:
+
+    cargo fix --edition --target x86_64-pc-windows-gnu
+
+or if your crate has optional features:
+
+    cargo fix --edition --no-default-features --features foo
+
+If you encounter any problems with `cargo fix` or otherwise have any questions
+or feature requests please don't hesitate to file an issue at
+https://github.com/rust-lang/cargo
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    let test = match args.value_of("profile") {
+        Some("test") => true,
+        None => false,
+        Some(profile) => {
+            let err = format_err!(
+                "unknown profile: `{}`, only `test` is \
+                 currently supported",
+                profile
+            );
+            return Err(CliError::new(err, 101));
+        }
+    };
+    let mode = CompileMode::Check { test };
+
+    // Unlike other commands default `cargo fix` to all targets to fix as much
+    // code as we can.
+    let mut opts = args.compile_options(config, mode)?;
+    if let CompileFilter::Default { .. } = opts.filter {
+        opts.filter = CompileFilter::Only {
+            all_targets: true,
+            lib: true,
+            bins: FilterRule::All,
+            examples: FilterRule::All,
+            benches: FilterRule::All,
+            tests: FilterRule::All,
+        }
+    }
+    ops::fix(&ws, &mut ops::FixOptions {
+        edition: args.is_present("edition"),
+        prepare_for: args.value_of("prepare-for"),
+        idioms: args.is_present("idioms"),
+        compile_opts: opts,
+        allow_dirty: args.is_present("allow-dirty"),
+        allow_no_vcs: args.is_present("allow-no-vcs"),
+        allow_staged: args.is_present("allow-staged"),
+        broken_code: args.is_present("broken-code"),
+    })?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/generate_lockfile.rs b/src/bin/cargo/commands/generate_lockfile.rs
new file mode 100644 (file)
index 0000000..6fa6f44
--- /dev/null
@@ -0,0 +1,15 @@
+use command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> App {
+    subcommand("generate-lockfile")
+        .about("Generate the lockfile for a package")
+        .arg_manifest_path()
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    ops::generate_lockfile(&ws)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/git_checkout.rs b/src/bin/cargo/commands/git_checkout.rs
new file mode 100644 (file)
index 0000000..a9401f1
--- /dev/null
@@ -0,0 +1,36 @@
+use command_prelude::*;
+
+use cargo::core::{GitReference, Source, SourceId};
+use cargo::sources::GitSource;
+use cargo::util::ToUrl;
+
+pub fn cli() -> App {
+    subcommand("git-checkout")
+        .about("Checkout a copy of a Git repository")
+        .arg(
+            Arg::with_name("url")
+                .long("url")
+                .value_name("URL")
+                .required(true),
+        )
+        .arg(
+            Arg::with_name("reference")
+                .long("reference")
+                .value_name("REF")
+                .required(true),
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let url = args.value_of("url").unwrap().to_url()?;
+    let reference = args.value_of("reference").unwrap();
+
+    let reference = GitReference::Branch(reference.to_string());
+    let source_id = SourceId::for_git(&url, reference)?;
+
+    let mut source = GitSource::new(&source_id, config)?;
+
+    source.update()?;
+
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/init.rs b/src/bin/cargo/commands/init.rs
new file mode 100644 (file)
index 0000000..bc4bf42
--- /dev/null
@@ -0,0 +1,20 @@
+use command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> App {
+    subcommand("init")
+        .about("Create a new cargo package in an existing directory")
+        .arg(Arg::with_name("path").default_value("."))
+        .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+        .arg_new_opts()
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let opts = args.new_options(config)?;
+    ops::init(&opts, config)?;
+    config
+        .shell()
+        .status("Created", format!("{} package", opts.kind))?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/install.rs b/src/bin/cargo/commands/install.rs
new file mode 100644 (file)
index 0000000..ba3c669
--- /dev/null
@@ -0,0 +1,131 @@
+use command_prelude::*;
+
+use cargo::core::{GitReference, SourceId};
+use cargo::ops;
+use cargo::util::ToUrl;
+
+pub fn cli() -> App {
+    subcommand("install")
+        .about("Install a Rust binary")
+        .arg(Arg::with_name("crate").empty_values(false).multiple(true))
+        .arg(
+            opt("version", "Specify a version to install from crates.io")
+                .alias("vers")
+                .value_name("VERSION"),
+        )
+        .arg(opt("git", "Git URL to install the specified crate from").value_name("URL"))
+        .arg(opt("branch", "Branch to use when installing from git").value_name("BRANCH"))
+        .arg(opt("tag", "Tag to use when installing from git").value_name("TAG"))
+        .arg(opt("rev", "Specific commit to use when installing from git").value_name("SHA"))
+        .arg(opt("path", "Filesystem path to local crate to install").value_name("PATH"))
+        .arg(opt(
+            "list",
+            "list all installed packages and their versions",
+        ))
+        .arg_jobs()
+        .arg(opt("force", "Force overwriting existing crates or binaries").short("f"))
+        .arg_features()
+        .arg(opt("debug", "Build in debug mode instead of release mode"))
+        .arg_targets_bins_examples(
+            "Install only the specified binary",
+            "Install all binaries",
+            "Install only the specified example",
+            "Install all examples",
+        )
+        .arg_target_triple("Build for the target triple")
+        .arg(opt("root", "Directory to install packages into").value_name("DIR"))
+        .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+        .after_help(
+            "\
+This command manages Cargo's local set of installed binary crates. Only packages
+which have [[bin]] targets can be installed, and all binaries are installed into
+the installation root's `bin` folder. The installation root is determined, in
+order of precedence, by `--root`, `$CARGO_INSTALL_ROOT`, the `install.root`
+configuration key, and finally the home directory (which is either
+`$CARGO_HOME` if set or `$HOME/.cargo` by default).
+
+There are multiple sources from which a crate can be installed. The default
+location is crates.io but the `--git` and `--path` flags can change this source.
+If the source contains more than one package (such as crates.io or a git
+repository with multiple crates) the `<crate>` argument is required to indicate
+which crate should be installed.
+
+Crates from crates.io can optionally specify the version they wish to install
+via the `--vers` flags, and similarly packages from git repositories can
+optionally specify the branch, tag, or revision that should be installed. If a
+crate has multiple binaries, the `--bin` argument can selectively install only
+one of them, and if you'd rather install examples the `--example` argument can
+be used as well.
+
+By default cargo will refuse to overwrite existing binaries. The `--force` flag
+enables overwriting existing binaries. Thus you can reinstall a crate with
+`cargo install --force <crate>`.
+
+Omitting the <crate> specification entirely will
+install the crate in the current directory. That is, `install` is equivalent to
+the more explicit `install --path .`.  This behaviour is deprecated, and no
+longer supported as of the Rust 2018 edition.
+
+If the source is crates.io or `--git` then by default the crate will be built
+in a temporary target directory.  To avoid this, the target directory can be
+specified by setting the `CARGO_TARGET_DIR` environment variable to a relative
+path.  In particular, this can be useful for caching build artifacts on
+continuous integration systems.",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let registry = args.registry(config)?;
+
+    config.reload_rooted_at_cargo_home()?;
+    let mut compile_opts = args.compile_options(config, CompileMode::Build)?;
+
+    compile_opts.build_config.release = !args.is_present("debug");
+
+    let krates = args.values_of("crate")
+        .unwrap_or_default()
+        .collect::<Vec<_>>();
+
+    let mut from_cwd = false;
+
+    let source = if let Some(url) = args.value_of("git") {
+        let url = url.to_url()?;
+        let gitref = if let Some(branch) = args.value_of("branch") {
+            GitReference::Branch(branch.to_string())
+        } else if let Some(tag) = args.value_of("tag") {
+            GitReference::Tag(tag.to_string())
+        } else if let Some(rev) = args.value_of("rev") {
+            GitReference::Rev(rev.to_string())
+        } else {
+            GitReference::Branch("master".to_string())
+        };
+        SourceId::for_git(&url, gitref)?
+    } else if let Some(path) = args.value_of_path("path", config) {
+        SourceId::for_path(&path)?
+    } else if krates.is_empty() {
+        from_cwd = true;
+        SourceId::for_path(config.cwd())?
+    } else if let Some(registry) = registry {
+        SourceId::alt_registry(config, &registry)?
+    } else {
+        SourceId::crates_io(config)?
+    };
+
+    let version = args.value_of("version");
+    let root = args.value_of("root");
+
+    if args.is_present("list") {
+        ops::install_list(root, config)?;
+    } else {
+        ops::install(
+            root,
+            krates,
+            &source,
+            from_cwd,
+            version,
+            &compile_opts,
+            args.is_present("force"),
+        )?;
+    }
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/locate_project.rs b/src/bin/cargo/commands/locate_project.rs
new file mode 100644 (file)
index 0000000..bf73a0a
--- /dev/null
@@ -0,0 +1,32 @@
+use command_prelude::*;
+
+use cargo::print_json;
+
+pub fn cli() -> App {
+    subcommand("locate-project")
+        .about("Print a JSON representation of a Cargo.toml file's location")
+        .arg_manifest_path()
+}
+
+#[derive(Serialize)]
+pub struct ProjectLocation<'a> {
+    root: &'a str,
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let root = args.root_manifest(config)?;
+
+    let root = root.to_str()
+        .ok_or_else(|| {
+            format_err!(
+                "your package path contains characters \
+                 not representable in Unicode"
+            )
+        })
+        .map_err(|e| CliError::new(e, 1))?;
+
+    let location = ProjectLocation { root };
+
+    print_json(&location);
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/login.rs b/src/bin/cargo/commands/login.rs
new file mode 100644 (file)
index 0000000..39f53f3
--- /dev/null
@@ -0,0 +1,58 @@
+use command_prelude::*;
+
+use std::io::{self, BufRead};
+
+use cargo::core::{Source, SourceId};
+use cargo::sources::RegistrySource;
+use cargo::util::{CargoError, CargoResultExt};
+use cargo::ops;
+
+pub fn cli() -> App {
+    subcommand("login")
+        .about(
+            "Save an api token from the registry locally. \
+             If token is not specified, it will be read from stdin.",
+        )
+        .arg(Arg::with_name("token"))
+        .arg(opt("host", "Host to set the token for").value_name("HOST"))
+        .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let registry = args.registry(config)?;
+
+    let token = match args.value_of("token") {
+        Some(token) => token.to_string(),
+        None => {
+            let host = match registry {
+                Some(ref _registry) => {
+                    return Err(format_err!(
+                        "token must be provided when \
+                         --registry is provided."
+                    ).into());
+                }
+                None => {
+                    let src = SourceId::crates_io(config)?;
+                    let mut src = RegistrySource::remote(&src, config);
+                    src.update()?;
+                    let config = src.config()?.unwrap();
+                    args.value_of("host")
+                        .map(|s| s.to_string())
+                        .unwrap_or_else(|| config.api.unwrap())
+                }
+            };
+            println!("please visit {}/me and paste the API Token below", host);
+            let mut line = String::new();
+            let input = io::stdin();
+            input
+                .lock()
+                .read_line(&mut line)
+                .chain_err(|| "failed to read stdin")
+                .map_err(CargoError::from)?;
+            line.trim().to_string()
+        }
+    };
+
+    ops::registry_login(config, token, registry)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/metadata.rs b/src/bin/cargo/commands/metadata.rs
new file mode 100644 (file)
index 0000000..2c3c428
--- /dev/null
@@ -0,0 +1,53 @@
+use command_prelude::*;
+
+use cargo::ops::{self, OutputMetadataOptions};
+use cargo::print_json;
+
+pub fn cli() -> App {
+    subcommand("metadata")
+        .about(
+            "Output the resolved dependencies of a package, \
+             the concrete used versions including overrides, \
+             in machine-readable format",
+        )
+        .arg_features()
+        .arg(opt(
+            "no-deps",
+            "Output information only about the root package \
+             and don't fetch dependencies",
+        ))
+        .arg_manifest_path()
+        .arg(
+            opt("format-version", "Format version")
+                .value_name("VERSION")
+                .possible_value("1"),
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+
+    let version = match args.value_of("format-version") {
+        None => {
+            config.shell().warn(
+                "\
+                 please specify `--format-version` flag explicitly \
+                 to avoid compatibility problems",
+            )?;
+            1
+        }
+        Some(version) => version.parse().unwrap(),
+    };
+
+    let options = OutputMetadataOptions {
+        features: values(args, "features"),
+        all_features: args.is_present("all-features"),
+        no_default_features: args.is_present("no-default-features"),
+        no_deps: args.is_present("no-deps"),
+        version,
+    };
+
+    let result = ops::output_metadata(&ws, &options)?;
+    print_json(&result);
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/mod.rs b/src/bin/cargo/commands/mod.rs
new file mode 100644 (file)
index 0000000..057dc2f
--- /dev/null
@@ -0,0 +1,104 @@
+use command_prelude::*;
+
+pub fn builtin() -> Vec<App> {
+    vec![
+        bench::cli(),
+        build::cli(),
+        check::cli(),
+        clean::cli(),
+        doc::cli(),
+        fetch::cli(),
+        fix::cli(),
+        generate_lockfile::cli(),
+        git_checkout::cli(),
+        init::cli(),
+        install::cli(),
+        locate_project::cli(),
+        login::cli(),
+        metadata::cli(),
+        new::cli(),
+        owner::cli(),
+        package::cli(),
+        pkgid::cli(),
+        publish::cli(),
+        read_manifest::cli(),
+        run::cli(),
+        rustc::cli(),
+        rustdoc::cli(),
+        search::cli(),
+        test::cli(),
+        uninstall::cli(),
+        update::cli(),
+        verify_project::cli(),
+        version::cli(),
+        yank::cli(),
+    ]
+}
+
+pub fn builtin_exec(cmd: &str) -> Option<fn(&mut Config, &ArgMatches) -> CliResult> {
+    let f = match cmd {
+        "bench" => bench::exec,
+        "build" => build::exec,
+        "check" => check::exec,
+        "clean" => clean::exec,
+        "doc" => doc::exec,
+        "fetch" => fetch::exec,
+        "fix" => fix::exec,
+        "generate-lockfile" => generate_lockfile::exec,
+        "git-checkout" => git_checkout::exec,
+        "init" => init::exec,
+        "install" => install::exec,
+        "locate-project" => locate_project::exec,
+        "login" => login::exec,
+        "metadata" => metadata::exec,
+        "new" => new::exec,
+        "owner" => owner::exec,
+        "package" => package::exec,
+        "pkgid" => pkgid::exec,
+        "publish" => publish::exec,
+        "read-manifest" => read_manifest::exec,
+        "run" => run::exec,
+        "rustc" => rustc::exec,
+        "rustdoc" => rustdoc::exec,
+        "search" => search::exec,
+        "test" => test::exec,
+        "uninstall" => uninstall::exec,
+        "update" => update::exec,
+        "verify-project" => verify_project::exec,
+        "version" => version::exec,
+        "yank" => yank::exec,
+        _ => return None,
+    };
+    Some(f)
+}
+
+pub mod bench;
+pub mod build;
+pub mod check;
+pub mod clean;
+pub mod doc;
+pub mod fetch;
+pub mod fix;
+pub mod generate_lockfile;
+pub mod git_checkout;
+pub mod init;
+pub mod install;
+pub mod locate_project;
+pub mod login;
+pub mod metadata;
+pub mod new;
+pub mod owner;
+pub mod package;
+pub mod pkgid;
+pub mod publish;
+pub mod read_manifest;
+pub mod run;
+pub mod rustc;
+pub mod rustdoc;
+pub mod search;
+pub mod test;
+pub mod uninstall;
+pub mod update;
+pub mod verify_project;
+pub mod version;
+pub mod yank;
diff --git a/src/bin/cargo/commands/new.rs b/src/bin/cargo/commands/new.rs
new file mode 100644 (file)
index 0000000..417cebc
--- /dev/null
@@ -0,0 +1,27 @@
+use command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> App {
+    subcommand("new")
+        .about("Create a new cargo package at <path>")
+        .arg(Arg::with_name("path").required(true))
+        .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+        .arg_new_opts()
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let opts = args.new_options(config)?;
+
+    ops::new(&opts, config)?;
+    let path = args.value_of("path").unwrap();
+    let package_name = if let Some(name) = args.value_of("name") {
+        name
+    } else {
+        path
+    };
+    config
+        .shell()
+        .status("Created", format!("{} `{}` package", opts.kind, package_name))?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/owner.rs b/src/bin/cargo/commands/owner.rs
new file mode 100644 (file)
index 0000000..fb25124
--- /dev/null
@@ -0,0 +1,46 @@
+use command_prelude::*;
+
+use cargo::ops::{self, OwnersOptions};
+
+pub fn cli() -> App {
+    subcommand("owner")
+        .about("Manage the owners of a crate on the registry")
+        .arg(Arg::with_name("crate"))
+        .arg(multi_opt("add", "LOGIN", "Name of a user or team to add as an owner").short("a"))
+        .arg(
+            multi_opt(
+                "remove",
+                "LOGIN",
+                "Name of a user or team to remove as an owner",
+            ).short("r"),
+        )
+        .arg(opt("list", "List owners of a crate").short("l"))
+        .arg(opt("index", "Registry index to modify owners for").value_name("INDEX"))
+        .arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
+        .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+        .after_help("\
+This command will modify the owners for a crate on the specified registry (or
+default). Owners of a crate can upload new versions and yank old versions.
+Explicitly named owners can also modify the set of owners, so take care!
+
+    See https://doc.rust-lang.org/cargo/reference/publishing.html#cargo-owner
+    for detailed documentation and troubleshooting.",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let registry = args.registry(config)?;
+    let opts = OwnersOptions {
+        krate: args.value_of("crate").map(|s| s.to_string()),
+        token: args.value_of("token").map(|s| s.to_string()),
+        index: args.value_of("index").map(|s| s.to_string()),
+        to_add: args.values_of("add")
+            .map(|xs| xs.map(|s| s.to_string()).collect()),
+        to_remove: args.values_of("remove")
+            .map(|xs| xs.map(|s| s.to_string()).collect()),
+        list: args.is_present("list"),
+        registry,
+    };
+    ops::modify_owners(config, &opts)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/package.rs b/src/bin/cargo/commands/package.rs
new file mode 100644 (file)
index 0000000..f5e9d91
--- /dev/null
@@ -0,0 +1,48 @@
+use command_prelude::*;
+
+use cargo::ops::{self, PackageOpts};
+
+pub fn cli() -> App {
+    subcommand("package")
+        .about("Assemble the local package into a distributable tarball")
+        .arg(
+            opt(
+                "list",
+                "Print files included in a package without making one",
+            ).short("l"),
+        )
+        .arg(opt(
+            "no-verify",
+            "Don't verify the contents by building them",
+        ))
+        .arg(opt(
+            "no-metadata",
+            "Ignore warnings about a lack of human-usable metadata",
+        ))
+        .arg(opt(
+            "allow-dirty",
+            "Allow dirty working directories to be packaged",
+        ))
+        .arg_target_triple("Build for the target triple")
+        .arg_target_dir()
+        .arg_manifest_path()
+        .arg_jobs()
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    ops::package(
+        &ws,
+        &PackageOpts {
+            config,
+            verify: !args.is_present("no-verify"),
+            list: args.is_present("list"),
+            check_metadata: !args.is_present("no-metadata"),
+            allow_dirty: args.is_present("allow-dirty"),
+            target: args.target(),
+            jobs: args.jobs()?,
+            registry: None,
+        },
+    )?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/pkgid.rs b/src/bin/cargo/commands/pkgid.rs
new file mode 100644 (file)
index 0000000..fd47a54
--- /dev/null
@@ -0,0 +1,41 @@
+use command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> App {
+    subcommand("pkgid")
+        .about("Print a fully qualified package specification")
+        .arg(Arg::with_name("spec"))
+        .arg_package("Argument to get the package id specifier for")
+        .arg_manifest_path()
+        .after_help(
+            "\
+Given a <spec> argument, print out the fully qualified package id specifier.
+This command will generate an error if <spec> is ambiguous as to which package
+it refers to in the dependency graph. If no <spec> is given, then the pkgid for
+the local package is printed.
+
+This command requires that a lockfile is available and dependencies have been
+fetched.
+
+Example Package IDs
+
+           pkgid                  |  name  |  version  |          url
+    |-----------------------------|--------|-----------|---------------------|
+     foo                          | foo    | *         | *
+     foo:1.2.3                    | foo    | 1.2.3     | *
+     crates.io/foo                | foo    | *         | *://crates.io/foo
+     crates.io/foo#1.2.3          | foo    | 1.2.3     | *://crates.io/foo
+     crates.io/bar#foo:1.2.3      | foo    | 1.2.3     | *://crates.io/bar
+     http://crates.io/foo#1.2.3   | foo    | 1.2.3     | http://crates.io/foo
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    let spec = args.value_of("spec").or_else(|| args.value_of("package"));
+    let spec = ops::pkgid(&ws, spec)?;
+    println!("{}", spec);
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/publish.rs b/src/bin/cargo/commands/publish.rs
new file mode 100644 (file)
index 0000000..b50d361
--- /dev/null
@@ -0,0 +1,46 @@
+use command_prelude::*;
+
+use cargo::ops::{self, PublishOpts};
+
+pub fn cli() -> App {
+    subcommand("publish")
+        .about("Upload a package to the registry")
+        .arg_index()
+        .arg(opt("token", "Token to use when uploading").value_name("TOKEN"))
+        .arg(opt(
+            "no-verify",
+            "Don't verify the contents by building them",
+        ))
+        .arg(opt(
+            "allow-dirty",
+            "Allow dirty working directories to be packaged",
+        ))
+        .arg_target_triple("Build for the target triple")
+        .arg_target_dir()
+        .arg_manifest_path()
+        .arg_jobs()
+        .arg(opt("dry-run", "Perform all checks without uploading"))
+        .arg(opt("registry", "Registry to publish to").value_name("REGISTRY"))
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let registry = args.registry(config)?;
+    let ws = args.workspace(config)?;
+    let index = args.index(config)?;
+
+    ops::publish(
+        &ws,
+        &PublishOpts {
+            config,
+            token: args.value_of("token").map(|s| s.to_string()),
+            index,
+            verify: !args.is_present("no-verify"),
+            allow_dirty: args.is_present("allow-dirty"),
+            target: args.target(),
+            jobs: args.jobs()?,
+            dry_run: args.is_present("dry-run"),
+            registry,
+        },
+    )?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/read_manifest.rs b/src/bin/cargo/commands/read_manifest.rs
new file mode 100644 (file)
index 0000000..300bfe9
--- /dev/null
@@ -0,0 +1,21 @@
+use command_prelude::*;
+
+use cargo::print_json;
+
+pub fn cli() -> App {
+    subcommand("read-manifest")
+        .about(
+            "\
+Print a JSON representation of a Cargo.toml manifest.
+
+Deprecated, use `cargo metadata --no-deps` instead.\
+",
+        )
+        .arg_manifest_path()
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    print_json(&ws.current()?);
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/run.rs b/src/bin/cargo/commands/run.rs
new file mode 100644 (file)
index 0000000..8ddf262
--- /dev/null
@@ -0,0 +1,94 @@
+use command_prelude::*;
+
+use cargo::core::Verbosity;
+use cargo::ops::{self, CompileFilter};
+
+pub fn cli() -> App {
+    subcommand("run")
+        .alias("r")
+        .setting(AppSettings::TrailingVarArg)
+        .about("Run the main binary of the local package (src/main.rs)")
+        .arg(Arg::with_name("args").multiple(true))
+        .arg_targets_bin_example(
+            "Name of the bin target to run",
+            "Name of the example target to run",
+        )
+        .arg_package("Package with the target to run")
+        .arg_jobs()
+        .arg_release("Build artifacts in release mode, with optimizations")
+        .arg_features()
+        .arg_target_triple("Build for the target triple")
+        .arg_target_dir()
+        .arg_manifest_path()
+        .arg_message_format()
+        .after_help(
+            "\
+If neither `--bin` nor `--example` are given, then if the package only has one
+bin target it will be run. Otherwise `--bin` specifies the bin target to run,
+and `--example` specifies the example target to run. At most one of `--bin` or
+`--example` can be provided.
+
+All the arguments following the two dashes (`--`) are passed to the binary to
+run. If you're passing arguments to both Cargo and the binary, the ones after
+`--` go to the binary, the ones before go to Cargo.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+
+    let mut compile_opts = args.compile_options(config, CompileMode::Build)?;
+    if !args.is_present("example") && !args.is_present("bin") {
+        let default_runs: Vec<_> = compile_opts
+            .spec
+            .get_packages(&ws)?
+            .iter()
+            .filter_map(|pkg| pkg.manifest().default_run())
+            .collect();
+        if default_runs.len() == 1 {
+            compile_opts.filter = CompileFilter::new(
+                false,
+                vec![default_runs[0].to_owned()],
+                false,
+                vec![],
+                false,
+                vec![],
+                false,
+                vec![],
+                false,
+                false,
+            );
+        } else {
+            // ops::run will take care of errors if len pkgs != 1.
+            compile_opts.filter = CompileFilter::Default {
+                // Force this to false because the code in ops::run is not
+                // able to pre-check features before compilation starts to
+                // enforce that only 1 binary is built.
+                required_features_filterable: false,
+            };
+        }
+    };
+    match ops::run(&ws, &compile_opts, &values(args, "args"))? {
+        None => Ok(()),
+        Some(err) => {
+            // If we never actually spawned the process then that sounds pretty
+            // bad and we always want to forward that up.
+            let exit = match err.exit {
+                Some(exit) => exit,
+                None => return Err(CliError::new(err.into(), 101)),
+            };
+
+            // If `-q` was passed then we suppress extra error information about
+            // a failed process, we assume the process itself printed out enough
+            // information about why it failed so we don't do so as well
+            let exit_code = exit.code().unwrap_or(101);
+            let is_quiet = config.shell().verbosity() == Verbosity::Quiet;
+            Err(if is_quiet {
+                CliError::code(exit_code)
+            } else {
+                CliError::new(err.into(), exit_code)
+            })
+        }
+    }
+}
diff --git a/src/bin/cargo/commands/rustc.rs b/src/bin/cargo/commands/rustc.rs
new file mode 100644 (file)
index 0000000..dd2f1aa
--- /dev/null
@@ -0,0 +1,74 @@
+use command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> App {
+    subcommand("rustc")
+        .setting(AppSettings::TrailingVarArg)
+        .about("Compile a package and all of its dependencies")
+        .arg(Arg::with_name("args").multiple(true))
+        .arg_package("Package to build")
+        .arg_jobs()
+        .arg_targets_all(
+            "Build only this package's library",
+            "Build only the specified binary",
+            "Build all binaries",
+            "Build only the specified example",
+            "Build all examples",
+            "Build only the specified test target",
+            "Build all tests",
+            "Build only the specified bench target",
+            "Build all benches",
+            "Build all targets",
+        )
+        .arg_release("Build artifacts in release mode, with optimizations")
+        .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))
+        .arg_features()
+        .arg_target_triple("Target triple which compiles will be for")
+        .arg_target_dir()
+        .arg_manifest_path()
+        .arg_message_format()
+        .after_help(
+            "\
+The specified target for the current package (or package specified by SPEC if
+provided) will be compiled along with all of its dependencies. The specified
+<args>... will all be passed to the final compiler invocation, not any of the
+dependencies. Note that the compiler will still unconditionally receive
+arguments such as -L, --extern, and --crate-type, and the specified <args>...
+will simply be added to the compiler invocation.
+
+This command requires that only one target is being compiled. If more than one
+target is available for the current package the filters of --lib, --bin, etc,
+must be used to select which target is compiled. To pass flags to all compiler
+processes spawned by Cargo, use the $RUSTFLAGS environment variable or the
+`build.rustflags` configuration option.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    let mode = match args.value_of("profile") {
+        Some("dev") | None => CompileMode::Build,
+        Some("test") => CompileMode::Test,
+        Some("bench") => CompileMode::Bench,
+        Some("check") => CompileMode::Check { test: false },
+        Some(mode) => {
+            let err = format_err!(
+                "unknown profile: `{}`, use dev,
+                                   test, or bench",
+                mode
+            );
+            return Err(CliError::new(err, 101));
+        }
+    };
+    let mut compile_opts = args.compile_options_for_single_package(config, mode)?;
+    let target_args = values(args, "args");
+    compile_opts.target_rustc_args = if target_args.is_empty() {
+        None
+    } else {
+        Some(target_args)
+    };
+    ops::compile(&ws, &compile_opts)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/rustdoc.rs b/src/bin/cargo/commands/rustdoc.rs
new file mode 100644 (file)
index 0000000..8593bd2
--- /dev/null
@@ -0,0 +1,67 @@
+use command_prelude::*;
+
+use cargo::ops::{self, DocOptions};
+
+pub fn cli() -> App {
+    subcommand("rustdoc")
+        .setting(AppSettings::TrailingVarArg)
+        .about("Build a package's documentation, using specified custom flags.")
+        .arg(Arg::with_name("args").multiple(true))
+        .arg(opt(
+            "open",
+            "Opens the docs in a browser after the operation",
+        ))
+        .arg_package("Package to document")
+        .arg_jobs()
+        .arg_targets_all(
+            "Build only this package's library",
+            "Build only the specified binary",
+            "Build all binaries",
+            "Build only the specified example",
+            "Build all examples",
+            "Build only the specified test target",
+            "Build all tests",
+            "Build only the specified bench target",
+            "Build all benches",
+            "Build all targets",
+        )
+        .arg_release("Build artifacts in release mode, with optimizations")
+        .arg_features()
+        .arg_target_triple("Build for the target triple")
+        .arg_target_dir()
+        .arg_manifest_path()
+        .arg_message_format()
+        .after_help(
+            "\
+The specified target for the current package (or package specified by SPEC if
+provided) will be documented with the specified <opts>... being passed to the
+final rustdoc invocation. Dependencies will not be documented as part of this
+command.  Note that rustdoc will still unconditionally receive arguments such
+as -L, --extern, and --crate-type, and the specified <opts>...  will simply be
+added to the rustdoc invocation.
+
+If the --package argument is given, then SPEC is a package id specification
+which indicates which package should be documented. If it is not given, then the
+current package is documented. For more information on SPEC and its format, see
+the `cargo help pkgid` command.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+    let mut compile_opts =
+        args.compile_options_for_single_package(config, CompileMode::Doc { deps: false })?;
+    let target_args = values(args, "args");
+    compile_opts.target_rustdoc_args = if target_args.is_empty() {
+        None
+    } else {
+        Some(target_args)
+    };
+    let doc_opts = DocOptions {
+        open_result: args.is_present("open"),
+        compile_opts,
+    };
+    ops::doc(&ws, &doc_opts)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/search.rs b/src/bin/cargo/commands/search.rs
new file mode 100644 (file)
index 0000000..0501d8e
--- /dev/null
@@ -0,0 +1,30 @@
+use command_prelude::*;
+
+use std::cmp::min;
+
+use cargo::ops;
+
+pub fn cli() -> App {
+    subcommand("search")
+        .about("Search packages in crates.io")
+        .arg(Arg::with_name("query").multiple(true))
+        .arg_index()
+        .arg(
+            opt(
+                "limit",
+                "Limit the number of results (default: 10, max: 100)",
+            ).value_name("LIMIT"),
+        )
+        .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let registry = args.registry(config)?;
+    let index = args.index(config)?;
+    let limit = args.value_of_u32("limit")?;
+    let limit = min(100, limit.unwrap_or(10));
+    let query: Vec<&str> = args.values_of("query").unwrap_or_default().collect();
+    let query: String = query.join("+");
+    ops::search(&query, config, index, limit, registry)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/test.rs b/src/bin/cargo/commands/test.rs
new file mode 100644 (file)
index 0000000..0e9b577
--- /dev/null
@@ -0,0 +1,140 @@
+use command_prelude::*;
+
+use cargo::ops::{self, CompileFilter};
+
+pub fn cli() -> App {
+    subcommand("test")
+        .alias("t")
+        .setting(AppSettings::TrailingVarArg)
+        .about("Execute all unit and integration tests of a local package")
+        .arg(
+            Arg::with_name("TESTNAME")
+                .help("If specified, only run tests containing this string in their names"),
+        )
+        .arg(
+            Arg::with_name("args")
+                .help("Arguments for the test binary")
+                .multiple(true)
+                .last(true),
+        )
+        .arg_targets_all(
+            "Test only this package's library",
+            "Test only the specified binary",
+            "Test all binaries",
+            "Test only the specified example",
+            "Test all examples",
+            "Test only the specified test target",
+            "Test all tests",
+            "Test only the specified bench target",
+            "Test all benches",
+            "Test all targets",
+        )
+        .arg(opt("doc", "Test only this library's documentation"))
+        .arg(opt("no-run", "Compile, but don't run tests"))
+        .arg(opt("no-fail-fast", "Run all tests regardless of failure"))
+        .arg_package_spec(
+            "Package to run tests for",
+            "Test all packages in the workspace",
+            "Exclude packages from the test",
+        )
+        .arg_jobs()
+        .arg_release("Build artifacts in release mode, with optimizations")
+        .arg_features()
+        .arg_target_triple("Build for the target triple")
+        .arg_target_dir()
+        .arg_manifest_path()
+        .arg_message_format()
+        .after_help(
+            "\
+The test filtering argument `TESTNAME` and all the arguments following the
+two dashes (`--`) are passed to the test binaries and thus to libtest
+(rustc's built in unit-test and micro-benchmarking framework).  If you're
+passing arguments to both Cargo and the binary, the ones after `--` go to the
+binary, the ones before go to Cargo.  For details about libtest's arguments see
+the output of `cargo test -- --help`.  As an example, this will run all
+tests with `foo` in their name on 3 threads in parallel:
+
+    cargo test foo -- --test-threads 3
+
+If the --package argument is given, then SPEC is a package id specification
+which indicates which package should be tested. If it is not given, then the
+current package is tested. For more information on SPEC and its format, see the
+`cargo help pkgid` command.
+
+All packages in the workspace are tested if the `--all` flag is supplied. The
+`--all` flag is automatically assumed for a virtual manifest.
+Note that `--exclude` has to be specified in conjunction with the `--all` flag.
+
+The --jobs argument affects the building of the test executable but does
+not affect how many jobs are used when running the tests. The default value
+for the --jobs argument is the number of CPUs. If you want to control the
+number of simultaneous running test cases, pass the `--test-threads` option
+to the test binaries:
+
+    cargo test -- --test-threads=1
+
+Compilation can be configured via the `test` profile in the manifest.
+
+By default the rust test harness hides output from test execution to
+keep results readable. Test output can be recovered (e.g. for debugging)
+by passing `--nocapture` to the test binaries:
+
+    cargo test -- --nocapture
+
+To get the list of all options available for the test binaries use this:
+
+    cargo test -- --help
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+
+    let mut compile_opts = args.compile_options(config, CompileMode::Test)?;
+
+    let doc = args.is_present("doc");
+    if doc {
+        if let CompileFilter::Only { .. } = compile_opts.filter {
+            return Err(CliError::new(format_err!("Can't mix --doc with other target selecting options"), 101))
+        }
+        compile_opts.build_config.mode = CompileMode::Doctest;
+        compile_opts.filter = ops::CompileFilter::new(
+            true,
+            Vec::new(),
+            false,
+            Vec::new(),
+            false,
+            Vec::new(),
+            false,
+            Vec::new(),
+            false,
+            false,
+        );
+    }
+
+    let ops = ops::TestOptions {
+        no_run: args.is_present("no-run"),
+        no_fail_fast: args.is_present("no-fail-fast"),
+        compile_opts,
+    };
+
+    // TESTNAME is actually an argument of the test binary, but it's
+    // important so we explicitly mention it and reconfigure
+    let mut test_args = vec![];
+    test_args.extend(args.value_of("TESTNAME").into_iter().map(|s| s.to_string()));
+    test_args.extend(
+        args.values_of("args")
+            .unwrap_or_default()
+            .map(|s| s.to_string()),
+    );
+
+    let err = ops::run_tests(&ws, &ops, &test_args)?;
+    match err {
+        None => Ok(()),
+        Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) {
+            Some(i) => CliError::new(format_err!("{}", err.hint(&ws)), i),
+            None => CliError::new(err.into(), 101),
+        }),
+    }
+}
diff --git a/src/bin/cargo/commands/uninstall.rs b/src/bin/cargo/commands/uninstall.rs
new file mode 100644 (file)
index 0000000..fb8fbfb
--- /dev/null
@@ -0,0 +1,30 @@
+use command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> App {
+    subcommand("uninstall")
+        .about("Remove a Rust binary")
+        .arg(Arg::with_name("spec").multiple(true))
+        .arg_package_spec_simple("Package to uninstall")
+        .arg(multi_opt("bin", "NAME", "Only uninstall the binary NAME"))
+        .arg(opt("root", "Directory to uninstall packages from").value_name("DIR"))
+        .after_help(
+            "\
+The argument SPEC is a package id specification (see `cargo help pkgid`) to
+specify which crate should be uninstalled. By default all binaries are
+uninstalled for a crate but the `--bin` and `--example` flags can be used to
+only uninstall particular binaries.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let root = args.value_of("root");
+    let specs = args
+        .values_of("spec")
+        .unwrap_or_else(|| args.values_of("package").unwrap_or_default())
+        .collect();
+    ops::uninstall(root, specs, &values(args, "bin"), config)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/update.rs b/src/bin/cargo/commands/update.rs
new file mode 100644 (file)
index 0000000..c5a992a
--- /dev/null
@@ -0,0 +1,51 @@
+use command_prelude::*;
+
+use cargo::ops::{self, UpdateOptions};
+
+pub fn cli() -> App {
+    subcommand("update")
+        .about("Update dependencies as recorded in the local lock file")
+        .arg_package_spec_simple("Package to update")
+        .arg(opt(
+            "aggressive",
+            "Force updating all dependencies of <name> as well",
+        ))
+        .arg(opt("precise", "Update a single dependency to exactly PRECISE").value_name("PRECISE"))
+        .arg_manifest_path()
+        .after_help(
+            "\
+This command requires that a `Cargo.lock` already exists as generated by
+`cargo build` or related commands.
+
+If SPEC is given, then a conservative update of the lockfile will be
+performed. This means that only the dependency specified by SPEC will be
+updated. Its transitive dependencies will be updated only if SPEC cannot be
+updated without updating dependencies.  All other dependencies will remain
+locked at their currently recorded versions.
+
+If PRECISE is specified, then --aggressive must not also be specified. The
+argument PRECISE is a string representing a precise revision that the package
+being updated should be updated to. For example, if the package comes from a git
+repository, then PRECISE would be the exact revision that the repository should
+be updated to.
+
+If SPEC is not given, then all dependencies will be re-resolved and
+updated.
+
+For more information about package id specifications, see `cargo help pkgid`.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let ws = args.workspace(config)?;
+
+    let update_opts = UpdateOptions {
+        aggressive: args.is_present("aggressive"),
+        precise: args.value_of("precise"),
+        to_update: values(args, "package"),
+        config,
+    };
+    ops::update_lockfile(&ws, &update_opts)?;
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/verify_project.rs b/src/bin/cargo/commands/verify_project.rs
new file mode 100644 (file)
index 0000000..eea65c7
--- /dev/null
@@ -0,0 +1,45 @@
+use command_prelude::*;
+
+use std::collections::HashMap;
+use std::process;
+use std::fs::File;
+use std::io::Read;
+
+use toml;
+
+use cargo::print_json;
+
+pub fn cli() -> App {
+    subcommand("verify-project")
+        .about("Check correctness of crate manifest")
+        .arg_manifest_path()
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    fn fail(reason: &str, value: &str) -> ! {
+        let mut h = HashMap::new();
+        h.insert(reason.to_string(), value.to_string());
+        print_json(&h);
+        process::exit(1)
+    }
+
+    let mut contents = String::new();
+    let filename = match args.root_manifest(config) {
+        Ok(filename) => filename,
+        Err(e) => fail("invalid", &e.to_string()),
+    };
+
+    let file = File::open(&filename);
+    match file.and_then(|mut f| f.read_to_string(&mut contents)) {
+        Ok(_) => {}
+        Err(e) => fail("invalid", &format!("error reading file: {}", e)),
+    };
+    if contents.parse::<toml::Value>().is_err() {
+        fail("invalid", "invalid-format");
+    }
+
+    let mut h = HashMap::new();
+    h.insert("success".to_string(), "true".to_string());
+    print_json(&h);
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/version.rs b/src/bin/cargo/commands/version.rs
new file mode 100644 (file)
index 0000000..350480c
--- /dev/null
@@ -0,0 +1,14 @@
+use command_prelude::*;
+
+use cli;
+
+pub fn cli() -> App {
+    subcommand("version").about("Show version information")
+}
+
+pub fn exec(_config: &mut Config, args: &ArgMatches) -> CliResult {
+    let verbose = args.occurrences_of("verbose") > 0;
+    let version = cli::get_version_string(verbose);
+    print!("{}", version);
+    Ok(())
+}
diff --git a/src/bin/cargo/commands/yank.rs b/src/bin/cargo/commands/yank.rs
new file mode 100644 (file)
index 0000000..150474b
--- /dev/null
@@ -0,0 +1,43 @@
+use command_prelude::*;
+
+use cargo::ops;
+
+pub fn cli() -> App {
+    subcommand("yank")
+        .about("Remove a pushed crate from the index")
+        .arg(Arg::with_name("crate"))
+        .arg(opt("vers", "The version to yank or un-yank").value_name("VERSION"))
+        .arg(opt(
+            "undo",
+            "Undo a yank, putting a version back into the index",
+        ))
+        .arg(opt("index", "Registry index to yank from").value_name("INDEX"))
+        .arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
+        .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+        .after_help(
+            "\
+The yank command removes a previously pushed crate's version from the server's
+index. This command does not delete any data, and the crate will still be
+available for download via the registry's download link.
+
+Note that existing crates locked to a yanked version will still be able to
+download the yanked version to use it. Cargo will, however, not allow any new
+crates to be locked to any yanked version.
+",
+        )
+}
+
+pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
+    let registry = args.registry(config)?;
+
+    ops::yank(
+        config,
+        args.value_of("crate").map(|s| s.to_string()),
+        args.value_of("vers").map(|s| s.to_string()),
+        args.value_of("token").map(|s| s.to_string()),
+        args.value_of("index").map(|s| s.to_string()),
+        args.is_present("undo"),
+        registry,
+    )?;
+    Ok(())
+}
diff --git a/src/bin/cargo/main.rs b/src/bin/cargo/main.rs
new file mode 100644 (file)
index 0000000..e0333ee
--- /dev/null
@@ -0,0 +1,225 @@
+#![cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))] // large project
+#![cfg_attr(feature = "cargo-clippy", allow(redundant_closure))]  // there's a false positive
+
+extern crate cargo;
+extern crate clap;
+extern crate env_logger;
+#[macro_use]
+extern crate failure;
+extern crate git2_curl;
+extern crate log;
+#[macro_use]
+extern crate serde_derive;
+extern crate serde_json;
+extern crate toml;
+
+use std::env;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::collections::BTreeSet;
+
+use cargo::core::shell::Shell;
+use cargo::util::{self, lev_distance, CargoResult, CliResult, Config};
+use cargo::util::{CliError, ProcessError};
+
+mod cli;
+mod command_prelude;
+mod commands;
+
+use command_prelude::*;
+
+fn main() {
+    env_logger::init();
+    cargo::core::maybe_allow_nightly_features();
+
+    let mut config = match Config::default() {
+        Ok(cfg) => cfg,
+        Err(e) => {
+            let mut shell = Shell::new();
+            cargo::exit_with_error(e.into(), &mut shell)
+        }
+    };
+
+    let result = match cargo::ops::fix_maybe_exec_rustc() {
+        Ok(true) => Ok(()),
+        Ok(false) => {
+            init_git_transports(&config);
+            let _token = cargo::util::job::setup();
+            cli::main(&mut config)
+        }
+        Err(e) => Err(CliError::from(e)),
+    };
+
+    match result {
+        Err(e) => cargo::exit_with_error(e, &mut *config.shell()),
+        Ok(()) => {}
+    }
+}
+
+fn aliased_command(config: &Config, command: &str) -> CargoResult<Option<Vec<String>>> {
+    let alias_name = format!("alias.{}", command);
+    let mut result = Ok(None);
+    match config.get_string(&alias_name) {
+        Ok(value) => {
+            if let Some(record) = value {
+                let alias_commands = record
+                    .val
+                    .split_whitespace()
+                    .map(|s| s.to_string())
+                    .collect();
+                result = Ok(Some(alias_commands));
+            }
+        }
+        Err(_) => {
+            let value = config.get_list(&alias_name)?;
+            if let Some(record) = value {
+                let alias_commands: Vec<String> =
+                    record.val.iter().map(|s| s.0.to_string()).collect();
+                result = Ok(Some(alias_commands));
+            }
+        }
+    }
+    result
+}
+
+/// List all runnable commands
+fn list_commands(config: &Config) -> BTreeSet<CommandInfo> {
+    let prefix = "cargo-";
+    let suffix = env::consts::EXE_SUFFIX;
+    let mut commands = BTreeSet::new();
+    for dir in search_directories(config) {
+        let entries = match fs::read_dir(dir) {
+            Ok(entries) => entries,
+            _ => continue,
+        };
+        for entry in entries.filter_map(|e| e.ok()) {
+            let path = entry.path();
+            let filename = match path.file_name().and_then(|s| s.to_str()) {
+                Some(filename) => filename,
+                _ => continue,
+            };
+            if !filename.starts_with(prefix) || !filename.ends_with(suffix) {
+                continue;
+            }
+            if is_executable(entry.path()) {
+                let end = filename.len() - suffix.len();
+                commands.insert(CommandInfo::External {
+                    name: filename[prefix.len()..end].to_string(),
+                    path: path.clone(),
+                });
+            }
+        }
+    }
+
+    for cmd in commands::builtin() {
+        commands.insert(CommandInfo::BuiltIn {
+            name: cmd.get_name().to_string(),
+            about: cmd.p.meta.about.map(|s| s.to_string()),
+        });
+    }
+
+    commands
+}
+
+fn find_closest(config: &Config, cmd: &str) -> Option<String> {
+    let cmds = list_commands(config);
+    // Only consider candidates with a lev_distance of 3 or less so we don't
+    // suggest out-of-the-blue options.
+    cmds.into_iter()
+        .map(|c| c.name())
+        .map(|c| (lev_distance(&c, cmd), c))
+        .filter(|&(d, _)| d < 4)
+        .min_by_key(|a| a.0)
+        .map(|slot| slot.1)
+}
+
+fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&str]) -> CliResult {
+    let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX);
+    let path = search_directories(config)
+        .iter()
+        .map(|dir| dir.join(&command_exe))
+        .find(|file| is_executable(file));
+    let command = match path {
+        Some(command) => command,
+        None => {
+            let err = match find_closest(config, cmd) {
+                Some(closest) => format_err!(
+                    "no such subcommand: `{}`\n\n\tDid you mean `{}`?\n",
+                    cmd,
+                    closest
+                ),
+                None => format_err!("no such subcommand: `{}`", cmd),
+            };
+            return Err(CliError::new(err, 101));
+        }
+    };
+
+    let cargo_exe = config.cargo_exe()?;
+    let err = match util::process(&command)
+        .env(cargo::CARGO_ENV, cargo_exe)
+        .args(args)
+        .exec_replace()
+    {
+        Ok(()) => return Ok(()),
+        Err(e) => e,
+    };
+
+    if let Some(perr) = err.downcast_ref::<ProcessError>() {
+        if let Some(code) = perr.exit.as_ref().and_then(|c| c.code()) {
+            return Err(CliError::code(code));
+        }
+    }
+    Err(CliError::new(err, 101))
+}
+
+#[cfg(unix)]
+fn is_executable<P: AsRef<Path>>(path: P) -> bool {
+    use std::os::unix::prelude::*;
+    fs::metadata(path)
+        .map(|metadata| metadata.is_file() && metadata.permissions().mode() & 0o111 != 0)
+        .unwrap_or(false)
+}
+#[cfg(windows)]
+fn is_executable<P: AsRef<Path>>(path: P) -> bool {
+    fs::metadata(path)
+        .map(|metadata| metadata.is_file())
+        .unwrap_or(false)
+}
+
+fn search_directories(config: &Config) -> Vec<PathBuf> {
+    let mut dirs = vec![config.home().clone().into_path_unlocked().join("bin")];
+    if let Some(val) = env::var_os("PATH") {
+        dirs.extend(env::split_paths(&val));
+    }
+    dirs
+}
+
+fn init_git_transports(config: &Config) {
+    // Only use a custom transport if any HTTP options are specified,
+    // such as proxies or custom certificate authorities. The custom
+    // transport, however, is not as well battle-tested.
+
+    match cargo::ops::needs_custom_http_transport(config) {
+        Ok(true) => {}
+        _ => return,
+    }
+
+    let handle = match cargo::ops::http_handle(config) {
+        Ok(handle) => handle,
+        Err(..) => return,
+    };
+
+    // The unsafety of the registration function derives from two aspects:
+    //
+    // 1. This call must be synchronized with all other registration calls as
+    //    well as construction of new transports.
+    // 2. The argument is leaked.
+    //
+    // We're clear on point (1) because this is only called at the start of this
+    // binary (we know what the state of the world looks like) and we're mostly
+    // clear on point (2) because we'd only free it after everything is done
+    // anyway
+    unsafe {
+        git2_curl::register(handle);
+    }
+}
diff --git a/src/cargo/core/compiler/build_config.rs b/src/cargo/core/compiler/build_config.rs
new file mode 100644 (file)
index 0000000..77ed087
--- /dev/null
@@ -0,0 +1,191 @@
+use std::path::Path;
+use std::cell::RefCell;
+
+use util::{CargoResult, CargoResultExt, Config, RustfixDiagnosticServer};
+
+/// Configuration information for a rustc build.
+#[derive(Debug)]
+pub struct BuildConfig {
+    /// The target arch triple, defaults to host arch
+    pub requested_target: Option<String>,
+    /// How many rustc jobs to run in parallel
+    pub jobs: u32,
+    /// Whether we are building for release
+    pub release: bool,
+    /// In what mode we are compiling
+    pub mode: CompileMode,
+    /// Whether to print std output in json format (for machine reading)
+    pub message_format: MessageFormat,
+    /// Force cargo to do a full rebuild and treat each target as changed.
+    pub force_rebuild: bool,
+    /// Output a build plan to stdout instead of actually compiling.
+    pub build_plan: bool,
+    /// Use Cargo itself as the wrapper around rustc, only used for `cargo fix`
+    pub cargo_as_rustc_wrapper: bool,
+    /// Extra env vars to inject into rustc commands
+    pub extra_rustc_env: Vec<(String, String)>,
+    /// Extra args to inject into rustc commands
+    pub extra_rustc_args: Vec<String>,
+    pub rustfix_diagnostic_server: RefCell<Option<RustfixDiagnosticServer>>,
+}
+
+impl BuildConfig {
+    /// Parse all config files to learn about build configuration. Currently
+    /// configured options are:
+    ///
+    /// * build.jobs
+    /// * build.target
+    /// * target.$target.ar
+    /// * target.$target.linker
+    /// * target.$target.libfoo.metadata
+    pub fn new(
+        config: &Config,
+        jobs: Option<u32>,
+        requested_target: &Option<String>,
+        mode: CompileMode,
+    ) -> CargoResult<BuildConfig> {
+        let requested_target = match requested_target {
+            &Some(ref target) if target.ends_with(".json") => {
+                let path = Path::new(target)
+                    .canonicalize()
+                    .chain_err(|| format_err!("Target path {:?} is not a valid file", target))?;
+                Some(path.into_os_string()
+                    .into_string()
+                    .map_err(|_| format_err!("Target path is not valid unicode"))?)
+            }
+            other => other.clone(),
+        };
+        if let Some(ref s) = requested_target {
+            if s.trim().is_empty() {
+                bail!("target was empty")
+            }
+        }
+        let cfg_target = config.get_string("build.target")?.map(|s| s.val);
+        let target = requested_target.clone().or(cfg_target);
+
+        if jobs == Some(0) {
+            bail!("jobs must be at least 1")
+        }
+        if jobs.is_some() && config.jobserver_from_env().is_some() {
+            config.shell().warn(
+                "a `-j` argument was passed to Cargo but Cargo is \
+                 also configured with an external jobserver in \
+                 its environment, ignoring the `-j` parameter",
+            )?;
+        }
+        let cfg_jobs: Option<u32> = config.get("build.jobs")?;
+        let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32);
+        Ok(BuildConfig {
+            requested_target: target,
+            jobs,
+            release: false,
+            mode,
+            message_format: MessageFormat::Human,
+            force_rebuild: false,
+            build_plan: false,
+            cargo_as_rustc_wrapper: false,
+            extra_rustc_env: Vec::new(),
+            extra_rustc_args: Vec::new(),
+            rustfix_diagnostic_server: RefCell::new(None),
+        })
+    }
+
+    pub fn json_messages(&self) -> bool {
+        self.message_format == MessageFormat::Json
+    }
+
+    pub fn test(&self) -> bool {
+        self.mode == CompileMode::Test || self.mode == CompileMode::Bench
+    }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum MessageFormat {
+    Human,
+    Json,
+    Short,
+}
+
+/// The general "mode" of what to do.
+/// This is used for two purposes.  The commands themselves pass this in to
+/// `compile_ws` to tell it the general execution strategy.  This influences
+/// the default targets selected.  The other use is in the `Unit` struct
+/// to indicate what is being done with a specific target.
+#[derive(Clone, Copy, PartialEq, Debug, Eq, Hash)]
+pub enum CompileMode {
+    /// A target being built for a test.
+    Test,
+    /// Building a target with `rustc` (lib or bin).
+    Build,
+    /// Building a target with `rustc` to emit `rmeta` metadata only. If
+    /// `test` is true, then it is also compiled with `--test` to check it like
+    /// a test.
+    Check { test: bool },
+    /// Used to indicate benchmarks should be built.  This is not used in
+    /// `Target` because it is essentially the same as `Test` (indicating
+    /// `--test` should be passed to rustc) and by using `Test` instead it
+    /// allows some de-duping of Units to occur.
+    Bench,
+    /// A target that will be documented with `rustdoc`.
+    /// If `deps` is true, then it will also document all dependencies.
+    Doc { deps: bool },
+    /// A target that will be tested with `rustdoc`.
+    Doctest,
+    /// A marker for Units that represent the execution of a `build.rs`
+    /// script.
+    RunCustomBuild,
+}
+
+impl CompileMode {
+    /// Returns true if the unit is being checked.
+    pub fn is_check(self) -> bool {
+        match self {
+            CompileMode::Check { .. } => true,
+            _ => false,
+        }
+    }
+
+    /// Returns true if this is a doc or doctest. Be careful using this.
+    /// Although both run rustdoc, the dependencies for those two modes are
+    /// very different.
+    pub fn is_doc(self) -> bool {
+        match self {
+            CompileMode::Doc { .. } | CompileMode::Doctest => true,
+            _ => false,
+        }
+    }
+
+    /// Returns true if this is any type of test (test, benchmark, doctest, or
+    /// check-test).
+    pub fn is_any_test(self) -> bool {
+        match self {
+            CompileMode::Test
+            | CompileMode::Bench
+            | CompileMode::Check { test: true }
+            | CompileMode::Doctest => true,
+            _ => false,
+        }
+    }
+
+    /// Returns true if this is the *execution* of a `build.rs` script.
+    pub fn is_run_custom_build(self) -> bool {
+        self == CompileMode::RunCustomBuild
+    }
+
+    /// List of all modes (currently used by `cargo clean -p` for computing
+    /// all possible outputs).
+    pub fn all_modes() -> &'static [CompileMode] {
+        static ALL: [CompileMode; 9] = [
+            CompileMode::Test,
+            CompileMode::Build,
+            CompileMode::Check { test: true },
+            CompileMode::Check { test: false },
+            CompileMode::Bench,
+            CompileMode::Doc { deps: true },
+            CompileMode::Doc { deps: false },
+            CompileMode::Doctest,
+            CompileMode::RunCustomBuild,
+        ];
+        &ALL
+    }
+}
diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs
new file mode 100644 (file)
index 0000000..358751c
--- /dev/null
@@ -0,0 +1,409 @@
+use std::collections::HashMap;
+use std::env;
+use std::path::{Path, PathBuf};
+use std::str;
+
+use core::profiles::Profiles;
+use core::{Dependency, Workspace};
+use core::{PackageId, PackageSet, Resolve};
+use util::errors::CargoResult;
+use util::{profile, Cfg, CfgExpr, Config, Rustc};
+
+use super::{BuildConfig, BuildOutput, Kind, Unit};
+
+mod target_info;
+pub use self::target_info::{FileFlavor, TargetInfo};
+
+/// The build context, containing all information about a build task
+pub struct BuildContext<'a, 'cfg: 'a> {
+    /// The workspace the build is for
+    pub ws: &'a Workspace<'cfg>,
+    /// The cargo configuration
+    pub config: &'cfg Config,
+    /// The dependency graph for our build
+    pub resolve: &'a Resolve,
+    pub profiles: &'a Profiles,
+    pub build_config: &'a BuildConfig,
+    /// Extra compiler args for either `rustc` or `rustdoc`.
+    pub extra_compiler_args: HashMap<Unit<'a>, Vec<String>>,
+    pub packages: &'a PackageSet<'cfg>,
+
+    /// Information about the compiler
+    pub rustc: Rustc,
+    /// Build information for the host arch
+    pub host_config: TargetConfig,
+    /// Build information for the target
+    pub target_config: TargetConfig,
+    pub target_info: TargetInfo,
+    pub host_info: TargetInfo,
+    pub incremental_env: Option<bool>,
+}
+
+impl<'a, 'cfg> BuildContext<'a, 'cfg> {
+    pub fn new(
+        ws: &'a Workspace<'cfg>,
+        resolve: &'a Resolve,
+        packages: &'a PackageSet<'cfg>,
+        config: &'cfg Config,
+        build_config: &'a BuildConfig,
+        profiles: &'a Profiles,
+        extra_compiler_args: HashMap<Unit<'a>, Vec<String>>,
+    ) -> CargoResult<BuildContext<'a, 'cfg>> {
+        let incremental_env = match env::var("CARGO_INCREMENTAL") {
+            Ok(v) => Some(v == "1"),
+            Err(_) => None,
+        };
+
+        let rustc = config.rustc(Some(ws))?;
+        let host_config = TargetConfig::new(config, &rustc.host)?;
+        let target_config = match build_config.requested_target.as_ref() {
+            Some(triple) => TargetConfig::new(config, triple)?,
+            None => host_config.clone(),
+        };
+        let (host_info, target_info) = {
+            let _p = profile::start("BuildContext::probe_target_info");
+            debug!("probe_target_info");
+            let host_info =
+                TargetInfo::new(config, &build_config.requested_target, &rustc, Kind::Host)?;
+            let target_info =
+                TargetInfo::new(config, &build_config.requested_target, &rustc, Kind::Target)?;
+            (host_info, target_info)
+        };
+
+        Ok(BuildContext {
+            ws,
+            resolve,
+            packages,
+            config,
+            rustc,
+            target_config,
+            target_info,
+            host_config,
+            host_info,
+            build_config,
+            profiles,
+            incremental_env,
+            extra_compiler_args,
+        })
+    }
+
+    pub fn extern_crate_name(&self, unit: &Unit<'a>, dep: &Unit<'a>) -> CargoResult<String> {
+        self.resolve.extern_crate_name(unit.pkg.package_id(), dep.pkg.package_id(), dep.target)
+    }
+
+    /// Whether a dependency should be compiled for the host or target platform,
+    /// specified by `Kind`.
+    pub fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool {
+        // If this dependency is only available for certain platforms,
+        // make sure we're only enabling it for that platform.
+        let platform = match dep.platform() {
+            Some(p) => p,
+            None => return true,
+        };
+        let (name, info) = match kind {
+            Kind::Host => (self.host_triple(), &self.host_info),
+            Kind::Target => (self.target_triple(), &self.target_info),
+        };
+        platform.matches(name, info.cfg())
+    }
+
+    /// Get the user-specified linker for a particular host or target
+    pub fn linker(&self, kind: Kind) -> Option<&Path> {
+        self.target_config(kind).linker.as_ref().map(|s| s.as_ref())
+    }
+
+    /// Get the user-specified `ar` program for a particular host or target
+    pub fn ar(&self, kind: Kind) -> Option<&Path> {
+        self.target_config(kind).ar.as_ref().map(|s| s.as_ref())
+    }
+
+    /// Get the list of cfg printed out from the compiler for the specified kind
+    pub fn cfg(&self, kind: Kind) -> &[Cfg] {
+        let info = match kind {
+            Kind::Host => &self.host_info,
+            Kind::Target => &self.target_info,
+        };
+        info.cfg().unwrap_or(&[])
+    }
+
+    /// The host arch triple
+    ///
+    /// e.g. x86_64-unknown-linux-gnu, would be
+    ///  - machine: x86_64
+    ///  - hardware-platform: unknown
+    ///  - operating system: linux-gnu
+    pub fn host_triple(&self) -> &str {
+        &self.rustc.host
+    }
+
+    pub fn target_triple(&self) -> &str {
+        self.build_config
+            .requested_target
+            .as_ref()
+            .map(|s| s.as_str())
+            .unwrap_or_else(|| self.host_triple())
+    }
+
+    /// Get the target configuration for a particular host or target
+    fn target_config(&self, kind: Kind) -> &TargetConfig {
+        match kind {
+            Kind::Host => &self.host_config,
+            Kind::Target => &self.target_config,
+        }
+    }
+
+    /// Number of jobs specified for this build
+    pub fn jobs(&self) -> u32 {
+        self.build_config.jobs
+    }
+
+    pub fn rustflags_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
+        env_args(
+            self.config,
+            &self.build_config.requested_target,
+            self.host_triple(),
+            self.info(unit.kind).cfg(),
+            unit.kind,
+            "RUSTFLAGS",
+        )
+    }
+
+    pub fn rustdocflags_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
+        env_args(
+            self.config,
+            &self.build_config.requested_target,
+            self.host_triple(),
+            self.info(unit.kind).cfg(),
+            unit.kind,
+            "RUSTDOCFLAGS",
+        )
+    }
+
+    pub fn show_warnings(&self, pkg: &PackageId) -> bool {
+        pkg.source_id().is_path() || self.config.extra_verbose()
+    }
+
+    fn info(&self, kind: Kind) -> &TargetInfo {
+        match kind {
+            Kind::Host => &self.host_info,
+            Kind::Target => &self.target_info,
+        }
+    }
+
+    pub fn extra_args_for(&self, unit: &Unit<'a>) -> Option<&Vec<String>> {
+        self.extra_compiler_args.get(unit)
+    }
+}
+
+/// Information required to build for a target
+#[derive(Clone, Default)]
+pub struct TargetConfig {
+    /// The path of archiver (lib builder) for this target.
+    pub ar: Option<PathBuf>,
+    /// The path of the linker for this target.
+    pub linker: Option<PathBuf>,
+    /// Special build options for any necessary input files (filename -> options)
+    pub overrides: HashMap<String, BuildOutput>,
+}
+
+impl TargetConfig {
+    pub fn new(config: &Config, triple: &str) -> CargoResult<TargetConfig> {
+        let key = format!("target.{}", triple);
+        let mut ret = TargetConfig {
+            ar: config.get_path(&format!("{}.ar", key))?.map(|v| v.val),
+            linker: config.get_path(&format!("{}.linker", key))?.map(|v| v.val),
+            overrides: HashMap::new(),
+        };
+        let table = match config.get_table(&key)? {
+            Some(table) => table.val,
+            None => return Ok(ret),
+        };
+        for (lib_name, value) in table {
+            match lib_name.as_str() {
+                "ar" | "linker" | "runner" | "rustflags" => continue,
+                _ => {}
+            }
+
+            let mut output = BuildOutput {
+                library_paths: Vec::new(),
+                library_links: Vec::new(),
+                cfgs: Vec::new(),
+                env: Vec::new(),
+                metadata: Vec::new(),
+                rerun_if_changed: Vec::new(),
+                rerun_if_env_changed: Vec::new(),
+                warnings: Vec::new(),
+            };
+            // We require deterministic order of evaluation, so we must sort the pairs by key first.
+            let mut pairs = Vec::new();
+            for (k, value) in value.table(&lib_name)?.0 {
+                pairs.push((k, value));
+            }
+            pairs.sort_by_key(|p| p.0);
+            for (k, value) in pairs {
+                let key = format!("{}.{}", key, k);
+                match &k[..] {
+                    "rustc-flags" => {
+                        let (flags, definition) = value.string(k)?;
+                        let whence = format!("in `{}` (in {})", key, definition.display());
+                        let (paths, links) = BuildOutput::parse_rustc_flags(flags, &whence)?;
+                        output.library_paths.extend(paths);
+                        output.library_links.extend(links);
+                    }
+                    "rustc-link-lib" => {
+                        let list = value.list(k)?;
+                        output
+                            .library_links
+                            .extend(list.iter().map(|v| v.0.clone()));
+                    }
+                    "rustc-link-search" => {
+                        let list = value.list(k)?;
+                        output
+                            .library_paths
+                            .extend(list.iter().map(|v| PathBuf::from(&v.0)));
+                    }
+                    "rustc-cfg" => {
+                        let list = value.list(k)?;
+                        output.cfgs.extend(list.iter().map(|v| v.0.clone()));
+                    }
+                    "rustc-env" => for (name, val) in value.table(k)?.0 {
+                        let val = val.string(name)?.0;
+                        output.env.push((name.clone(), val.to_string()));
+                    },
+                    "warning" | "rerun-if-changed" | "rerun-if-env-changed" => {
+                        bail!("`{}` is not supported in build script overrides", k);
+                    }
+                    _ => {
+                        let val = value.string(k)?.0;
+                        output.metadata.push((k.clone(), val.to_string()));
+                    }
+                }
+            }
+            ret.overrides.insert(lib_name, output);
+        }
+
+        Ok(ret)
+    }
+}
+
+/// Acquire extra flags to pass to the compiler from various locations.
+///
+/// The locations are:
+///
+///  - the `RUSTFLAGS` environment variable
+///
+/// then if this was not found
+///
+///  - `target.*.rustflags` from the manifest (Cargo.toml)
+///  - `target.cfg(..).rustflags` from the manifest
+///
+/// then if neither of these were found
+///
+///  - `build.rustflags` from the manifest
+///
+/// Note that if a `target` is specified, no args will be passed to host code (plugins, build
+/// scripts, ...), even if it is the same as the target.
+fn env_args(
+    config: &Config,
+    requested_target: &Option<String>,
+    host_triple: &str,
+    target_cfg: Option<&[Cfg]>,
+    kind: Kind,
+    name: &str,
+) -> CargoResult<Vec<String>> {
+    // We *want* to apply RUSTFLAGS only to builds for the
+    // requested target architecture, and not to things like build
+    // scripts and plugins, which may be for an entirely different
+    // architecture. Cargo's present architecture makes it quite
+    // hard to only apply flags to things that are not build
+    // scripts and plugins though, so we do something more hacky
+    // instead to avoid applying the same RUSTFLAGS to multiple targets
+    // arches:
+    //
+    // 1) If --target is not specified we just apply RUSTFLAGS to
+    // all builds; they are all going to have the same target.
+    //
+    // 2) If --target *is* specified then we only apply RUSTFLAGS
+    // to compilation units with the Target kind, which indicates
+    // it was chosen by the --target flag.
+    //
+    // This means that, e.g. even if the specified --target is the
+    // same as the host, build scripts in plugins won't get
+    // RUSTFLAGS.
+    let compiling_with_target = requested_target.is_some();
+    let is_target_kind = kind == Kind::Target;
+
+    if compiling_with_target && !is_target_kind {
+        // This is probably a build script or plugin and we're
+        // compiling with --target. In this scenario there are
+        // no rustflags we can apply.
+        return Ok(Vec::new());
+    }
+
+    // First try RUSTFLAGS from the environment
+    if let Ok(a) = env::var(name) {
+        let args = a.split(' ')
+            .map(str::trim)
+            .filter(|s| !s.is_empty())
+            .map(str::to_string);
+        return Ok(args.collect());
+    }
+
+    let mut rustflags = Vec::new();
+
+    let name = name.chars()
+        .flat_map(|c| c.to_lowercase())
+        .collect::<String>();
+    // Then the target.*.rustflags value...
+    let target = requested_target
+        .as_ref()
+        .map(|s| s.as_str())
+        .unwrap_or(host_triple);
+    let key = format!("target.{}.{}", target, name);
+    if let Some(args) = config.get_list_or_split_string(&key)? {
+        let args = args.val.into_iter();
+        rustflags.extend(args);
+    }
+    // ...including target.'cfg(...)'.rustflags
+    if let Some(target_cfg) = target_cfg {
+        if let Some(table) = config.get_table("target")? {
+            let cfgs = table.val.keys().filter_map(|key| {
+                if CfgExpr::matches_key(key, target_cfg) {
+                    Some(key)
+                } else {
+                    None
+                }
+            });
+
+            // Note that we may have multiple matching `[target]` sections and
+            // because we're passing flags to the compiler this can affect
+            // cargo's caching and whether it rebuilds. Ensure a deterministic
+            // ordering through sorting for now. We may perhaps one day wish to
+            // ensure a deterministic ordering via the order keys were defined
+            // in files perhaps.
+            let mut cfgs = cfgs.collect::<Vec<_>>();
+            cfgs.sort();
+
+            for n in cfgs {
+                let key = format!("target.{}.{}", n, name);
+                if let Some(args) = config.get_list_or_split_string(&key)? {
+                    let args = args.val.into_iter();
+                    rustflags.extend(args);
+                }
+            }
+        }
+    }
+
+    if !rustflags.is_empty() {
+        return Ok(rustflags);
+    }
+
+    // Then the build.rustflags value
+    let key = format!("build.{}", name);
+    if let Some(args) = config.get_list_or_split_string(&key)? {
+        let args = args.val.into_iter();
+        return Ok(args.collect());
+    }
+
+    Ok(Vec::new())
+}
diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs
new file mode 100644 (file)
index 0000000..84950bd
--- /dev/null
@@ -0,0 +1,291 @@
+use std::cell::RefCell;
+use std::collections::hash_map::{Entry, HashMap};
+use std::path::PathBuf;
+use std::str::{self, FromStr};
+
+use super::env_args;
+use util::{CargoResult, CargoResultExt, Cfg, Config, ProcessBuilder, Rustc};
+use core::TargetKind;
+use super::Kind;
+
+#[derive(Clone)]
+pub struct TargetInfo {
+    crate_type_process: Option<ProcessBuilder>,
+    crate_types: RefCell<HashMap<String, Option<(String, String)>>>,
+    cfg: Option<Vec<Cfg>>,
+    pub sysroot_libdir: Option<PathBuf>,
+}
+
+/// Type of each file generated by a Unit.
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub enum FileFlavor {
+    /// Not a special file type.
+    Normal,
+    /// It is something you can link against (e.g. a library)
+    Linkable,
+    /// It is a piece of external debug information (e.g. *.dSYM and *.pdb)
+    DebugInfo,
+}
+
+pub struct FileType {
+    pub flavor: FileFlavor,
+    suffix: String,
+    prefix: String,
+    // wasm bin target will generate two files in deps such as
+    // "web-stuff.js" and "web_stuff.wasm". Note the different usages of
+    // "-" and "_". should_replace_hyphens is a flag to indicate that
+    // we need to convert the stem "web-stuff" to "web_stuff", so we
+    // won't miss "web_stuff.wasm".
+    should_replace_hyphens: bool,
+}
+
+impl FileType {
+    pub fn filename(&self, stem: &str) -> String {
+        let stem = if self.should_replace_hyphens {
+            stem.replace("-", "_")
+        } else {
+            stem.to_string()
+        };
+        format!("{}{}{}", self.prefix, stem, self.suffix)
+    }
+}
+
+impl TargetInfo {
+    pub fn new(
+        config: &Config,
+        requested_target: &Option<String>,
+        rustc: &Rustc,
+        kind: Kind,
+    ) -> CargoResult<TargetInfo> {
+        let rustflags = env_args(
+            config,
+            requested_target,
+            &rustc.host,
+            None,
+            kind,
+            "RUSTFLAGS",
+        )?;
+        let mut process = rustc.process();
+        process
+            .arg("-")
+            .arg("--crate-name")
+            .arg("___")
+            .arg("--print=file-names")
+            .args(&rustflags)
+            .env_remove("RUST_LOG");
+
+        let target_triple = requested_target
+            .as_ref()
+            .map(|s| s.as_str())
+            .unwrap_or(&rustc.host);
+        if kind == Kind::Target {
+            process.arg("--target").arg(target_triple);
+        }
+
+        let crate_type_process = process.clone();
+        const KNOWN_CRATE_TYPES: &[&str] =
+            &["bin", "rlib", "dylib", "cdylib", "staticlib", "proc-macro"];
+        for crate_type in KNOWN_CRATE_TYPES.iter() {
+            process.arg("--crate-type").arg(crate_type);
+        }
+
+        let mut with_cfg = process.clone();
+        with_cfg.arg("--print=sysroot");
+        with_cfg.arg("--print=cfg");
+
+        let mut has_cfg_and_sysroot = true;
+        let (output, error) = rustc
+            .cached_output(&with_cfg)
+            .or_else(|_| {
+                has_cfg_and_sysroot = false;
+                rustc.cached_output(&process)
+            })
+            .chain_err(|| "failed to run `rustc` to learn about target-specific information")?;
+
+        let mut lines = output.lines();
+        let mut map = HashMap::new();
+        for crate_type in KNOWN_CRATE_TYPES {
+            let out = parse_crate_type(crate_type, &error, &mut lines)?;
+            map.insert(crate_type.to_string(), out);
+        }
+
+        let mut sysroot_libdir = None;
+        if has_cfg_and_sysroot {
+            let line = match lines.next() {
+                Some(line) => line,
+                None => bail!(
+                    "output of --print=sysroot missing when learning about \
+                     target-specific information from rustc"
+                ),
+            };
+            let mut rustlib = PathBuf::from(line);
+            if kind == Kind::Host {
+                if cfg!(windows) {
+                    rustlib.push("bin");
+                } else {
+                    rustlib.push("lib");
+                }
+                sysroot_libdir = Some(rustlib);
+            } else {
+                rustlib.push("lib");
+                rustlib.push("rustlib");
+                rustlib.push(target_triple);
+                rustlib.push("lib");
+                sysroot_libdir = Some(rustlib);
+            }
+        }
+
+        let cfg = if has_cfg_and_sysroot {
+            Some(lines.map(Cfg::from_str).collect::<CargoResult<_>>()?)
+        } else {
+            None
+        };
+
+        Ok(TargetInfo {
+            crate_type_process: Some(crate_type_process),
+            crate_types: RefCell::new(map),
+            cfg,
+            sysroot_libdir,
+        })
+    }
+
+    pub fn cfg(&self) -> Option<&[Cfg]> {
+        self.cfg.as_ref().map(|v| v.as_ref())
+    }
+
+    pub fn file_types(
+        &self,
+        crate_type: &str,
+        flavor: FileFlavor,
+        kind: &TargetKind,
+        target_triple: &str,
+    ) -> CargoResult<Option<Vec<FileType>>> {
+        let mut crate_types = self.crate_types.borrow_mut();
+        let entry = crate_types.entry(crate_type.to_string());
+        let crate_type_info = match entry {
+            Entry::Occupied(o) => &*o.into_mut(),
+            Entry::Vacant(v) => {
+                let value = self.discover_crate_type(v.key())?;
+                &*v.insert(value)
+            }
+        };
+        let (prefix, suffix) = match *crate_type_info {
+            Some((ref prefix, ref suffix)) => (prefix, suffix),
+            None => return Ok(None),
+        };
+        let mut ret = vec![
+            FileType {
+                suffix: suffix.clone(),
+                prefix: prefix.clone(),
+                flavor,
+                should_replace_hyphens: false,
+            },
+        ];
+
+        // rust-lang/cargo#4500
+        if target_triple.ends_with("pc-windows-msvc") && crate_type.ends_with("dylib")
+            && suffix == ".dll"
+        {
+            ret.push(FileType {
+                suffix: ".dll.lib".to_string(),
+                prefix: prefix.clone(),
+                flavor: FileFlavor::Normal,
+                should_replace_hyphens: false,
+            })
+        }
+
+        // rust-lang/cargo#4535
+        if target_triple.starts_with("wasm32-") && crate_type == "bin" && suffix == ".js" {
+            ret.push(FileType {
+                suffix: ".wasm".to_string(),
+                prefix: prefix.clone(),
+                flavor: FileFlavor::Normal,
+                should_replace_hyphens: true,
+            })
+        }
+
+        // rust-lang/cargo#4490, rust-lang/cargo#4960
+        //  - only uplift debuginfo for binaries.
+        //    tests are run directly from target/debug/deps/
+        //    and examples are inside target/debug/examples/ which already have symbols next to them
+        //    so no need to do anything.
+        if *kind == TargetKind::Bin {
+            if target_triple.contains("-apple-") {
+                ret.push(FileType {
+                    suffix: ".dSYM".to_string(),
+                    prefix: prefix.clone(),
+                    flavor: FileFlavor::DebugInfo,
+                    should_replace_hyphens: false,
+                })
+            } else if target_triple.ends_with("-msvc") {
+                ret.push(FileType {
+                    suffix: ".pdb".to_string(),
+                    prefix: prefix.clone(),
+                    flavor: FileFlavor::DebugInfo,
+                    should_replace_hyphens: false,
+                })
+            }
+        }
+
+        Ok(Some(ret))
+    }
+
+    fn discover_crate_type(&self, crate_type: &str) -> CargoResult<Option<(String, String)>> {
+        let mut process = self.crate_type_process.clone().unwrap();
+
+        process.arg("--crate-type").arg(crate_type);
+
+        let output = process.exec_with_output().chain_err(|| {
+            format!(
+                "failed to run `rustc` to learn about \
+                 crate-type {} information",
+                crate_type
+            )
+        })?;
+
+        let error = str::from_utf8(&output.stderr).unwrap();
+        let output = str::from_utf8(&output.stdout).unwrap();
+        Ok(parse_crate_type(crate_type, error, &mut output.lines())?)
+    }
+}
+
+/// Takes rustc output (using specialized command line args), and calculates the file prefix and
+/// suffix for the given crate type, or returns None if the type is not supported. (e.g. for a
+/// rust library like libcargo.rlib, prefix = "lib", suffix = "rlib").
+///
+/// The caller needs to ensure that the lines object is at the correct line for the given crate
+/// type: this is not checked.
+// This function can not handle more than 1 file per type (with wasm32-unknown-emscripten, there
+// are 2 files for bin (.wasm and .js))
+fn parse_crate_type(
+    crate_type: &str,
+    error: &str,
+    lines: &mut str::Lines,
+) -> CargoResult<Option<(String, String)>> {
+    let not_supported = error.lines().any(|line| {
+        (line.contains("unsupported crate type") || line.contains("unknown crate type"))
+            && line.contains(crate_type)
+    });
+    if not_supported {
+        return Ok(None);
+    }
+    let line = match lines.next() {
+        Some(line) => line,
+        None => bail!(
+            "malformed output when learning about \
+             crate-type {} information",
+            crate_type
+        ),
+    };
+    let mut parts = line.trim().split("___");
+    let prefix = parts.next().unwrap();
+    let suffix = match parts.next() {
+        Some(part) => part,
+        None => bail!(
+            "output of --print=file-names has changed in \
+             the compiler, cannot parse"
+        ),
+    };
+
+    Ok(Some((prefix.to_string(), suffix.to_string())))
+}
diff --git a/src/cargo/core/compiler/build_plan.rs b/src/cargo/core/compiler/build_plan.rs
new file mode 100644 (file)
index 0000000..5813e29
--- /dev/null
@@ -0,0 +1,158 @@
+//! A graph-like structure used to represent the rustc commands to build the package and the
+//! interdependencies between them.
+//!
+//! The BuildPlan structure is used to store the dependency graph of a dry run so that it can be
+//! shared with an external build system. Each Invocation in the BuildPlan comprises a single
+//! subprocess and defines the build environment, the outputs produced by the subprocess, and the
+//! dependencies on other Invocations.
+
+use std::collections::BTreeMap;
+
+use core::TargetKind;
+use super::{Context, Kind, Unit};
+use super::context::OutputFile;
+use util::{internal, CargoResult, ProcessBuilder};
+use std::path::PathBuf;
+use serde_json;
+use semver;
+
+#[derive(Debug, Serialize)]
+struct Invocation {
+    package_name: String,
+    package_version: semver::Version,
+    target_kind: TargetKind,
+    kind: Kind,
+    deps: Vec<usize>,
+    outputs: Vec<PathBuf>,
+    links: BTreeMap<PathBuf, PathBuf>,
+    program: String,
+    args: Vec<String>,
+    env: BTreeMap<String, String>,
+    cwd: Option<PathBuf>,
+}
+
+#[derive(Debug)]
+pub struct BuildPlan {
+    invocation_map: BTreeMap<String, usize>,
+    plan: SerializedBuildPlan,
+}
+
+#[derive(Debug, Serialize)]
+struct SerializedBuildPlan {
+    invocations: Vec<Invocation>,
+    inputs: Vec<PathBuf>,
+}
+
+impl Invocation {
+    pub fn new(unit: &Unit, deps: Vec<usize>) -> Invocation {
+        let id = unit.pkg.package_id();
+        Invocation {
+            package_name: id.name().to_string(),
+            package_version: id.version().clone(),
+            kind: unit.kind,
+            target_kind: unit.target.kind().clone(),
+            deps,
+            outputs: Vec::new(),
+            links: BTreeMap::new(),
+            program: String::new(),
+            args: Vec::new(),
+            env: BTreeMap::new(),
+            cwd: None,
+        }
+    }
+
+    pub fn add_output(&mut self, path: &PathBuf, link: &Option<PathBuf>) {
+        self.outputs.push(path.clone());
+        if let Some(ref link) = *link {
+            self.links.insert(link.clone(), path.clone());
+        }
+    }
+
+    pub fn update_cmd(&mut self, cmd: &ProcessBuilder) -> CargoResult<()> {
+        self.program = cmd.get_program()
+            .to_str()
+            .ok_or_else(|| format_err!("unicode program string required"))?
+            .to_string();
+        self.cwd = Some(cmd.get_cwd().unwrap().to_path_buf());
+        for arg in cmd.get_args().iter() {
+            self.args.push(
+                arg.to_str()
+                    .ok_or_else(|| format_err!("unicode argument string required"))?
+                    .to_string(),
+            );
+        }
+        for (var, value) in cmd.get_envs() {
+            let value = match value {
+                Some(s) => s,
+                None => continue,
+            };
+            self.env.insert(
+                var.clone(),
+                value
+                    .to_str()
+                    .ok_or_else(|| format_err!("unicode environment value required"))?
+                    .to_string(),
+            );
+        }
+        Ok(())
+    }
+}
+
+impl BuildPlan {
+    pub fn new() -> BuildPlan {
+        BuildPlan {
+            invocation_map: BTreeMap::new(),
+            plan: SerializedBuildPlan::new(),
+        }
+    }
+
+    pub fn add(&mut self, cx: &Context, unit: &Unit) -> CargoResult<()> {
+        let id = self.plan.invocations.len();
+        self.invocation_map.insert(unit.buildkey(), id);
+        let deps = cx.dep_targets(&unit)
+            .iter()
+            .map(|dep| self.invocation_map[&dep.buildkey()])
+            .collect();
+        let invocation = Invocation::new(unit, deps);
+        self.plan.invocations.push(invocation);
+        Ok(())
+    }
+
+    pub fn update(
+        &mut self,
+        invocation_name: &str,
+        cmd: &ProcessBuilder,
+        outputs: &[OutputFile],
+    ) -> CargoResult<()> {
+        let id = self.invocation_map[invocation_name];
+        let invocation = self.plan
+            .invocations
+            .get_mut(id)
+            .ok_or_else(|| internal(format!("couldn't find invocation for {}", invocation_name)))?;
+
+        invocation.update_cmd(cmd)?;
+        for output in outputs.iter() {
+            invocation.add_output(&output.path, &output.hardlink);
+        }
+
+        Ok(())
+    }
+
+    pub fn set_inputs(&mut self, inputs: Vec<PathBuf>) {
+        self.plan.inputs = inputs;
+    }
+
+    pub fn output_plan(self) {
+        let encoded = serde_json::to_string(&self.plan).unwrap();
+        println!("{}", encoded);
+    }
+}
+
+impl SerializedBuildPlan {
+    pub fn new() -> SerializedBuildPlan {
+        SerializedBuildPlan {
+            invocations: Vec::new(),
+            inputs: Vec::new(),
+        }
+    }
+}
diff --git a/src/cargo/core/compiler/compilation.rs b/src/cargo/core/compiler/compilation.rs
new file mode 100644 (file)
index 0000000..72dcbdc
--- /dev/null
@@ -0,0 +1,293 @@
+use std::collections::{BTreeSet, HashMap, HashSet};
+use std::env;
+use std::ffi::OsStr;
+use std::path::PathBuf;
+
+use semver::Version;
+
+use core::{Edition, Package, PackageId, Target, TargetKind};
+use util::{self, join_paths, process, CargoResult, CfgExpr, Config, ProcessBuilder};
+use super::BuildContext;
+
+pub struct Doctest {
+    /// The package being doctested.
+    pub package: Package,
+    /// The target being tested (currently always the package's lib).
+    pub target: Target,
+    /// Extern dependencies needed by `rustdoc`. The path is the location of
+    /// the compiled lib.
+    pub deps: Vec<(String, PathBuf)>,
+}
+
+/// A structure returning the result of a compilation.
+pub struct Compilation<'cfg> {
+    /// An array of all tests created during this compilation.
+    pub tests: Vec<(Package, TargetKind, String, PathBuf)>,
+
+    /// An array of all binaries created.
+    pub binaries: Vec<PathBuf>,
+
+    /// All directories for the output of native build commands.
+    ///
+    /// This is currently used to drive some entries which are added to the
+    /// LD_LIBRARY_PATH as appropriate.
+    ///
+    /// The order should be deterministic.
+    pub native_dirs: BTreeSet<PathBuf>,
+
+    /// Root output directory (for the local package's artifacts)
+    pub root_output: PathBuf,
+
+    /// Output directory for rust dependencies.
+    /// May be for the host or for a specific target.
+    pub deps_output: PathBuf,
+
+    /// Output directory for the rust host dependencies.
+    pub host_deps_output: PathBuf,
+
+    /// The path to rustc's own libstd
+    pub host_dylib_path: Option<PathBuf>,
+
+    /// The path to libstd for the target
+    pub target_dylib_path: Option<PathBuf>,
+
+    /// Extra environment variables that were passed to compilations and should
+    /// be passed to future invocations of programs.
+    pub extra_env: HashMap<PackageId, Vec<(String, String)>>,
+
+    /// Libraries to test with rustdoc.
+    pub to_doc_test: Vec<Doctest>,
+
+    /// Features per package enabled during this compilation.
+    pub cfgs: HashMap<PackageId, HashSet<String>>,
+
+    /// Flags to pass to rustdoc when invoked from cargo test, per package.
+    pub rustdocflags: HashMap<PackageId, Vec<String>>,
+
+    pub host: String,
+    pub target: String,
+
+    config: &'cfg Config,
+    rustc_process: ProcessBuilder,
+
+    target_runner: Option<(PathBuf, Vec<String>)>,
+}
+
+impl<'cfg> Compilation<'cfg> {
+    pub fn new<'a>(bcx: &BuildContext<'a, 'cfg>) -> CargoResult<Compilation<'cfg>> {
+        // If we're using cargo as a rustc wrapper then we're in a situation
+        // like `cargo fix`. For now just disregard the `RUSTC_WRAPPER` env var
+        // (which is typically set to `sccache` for now). Eventually we'll
+        // probably want to implement `RUSTC_WRAPPER` for `cargo fix`, but we'll
+        // leave that open as a bug for now.
+        let mut rustc = if bcx.build_config.cargo_as_rustc_wrapper {
+            let mut rustc = bcx.rustc.process_no_wrapper();
+            let prog = rustc.get_program().to_owned();
+            rustc.env("RUSTC", prog);
+            rustc.program(env::current_exe()?);
+            rustc
+        } else {
+            bcx.rustc.process()
+        };
+        for (k, v) in bcx.build_config.extra_rustc_env.iter() {
+            rustc.env(k, v);
+        }
+        for arg in bcx.build_config.extra_rustc_args.iter() {
+            rustc.arg(arg);
+        }
+        let srv = bcx.build_config.rustfix_diagnostic_server.borrow();
+        if let Some(server) = &*srv {
+            server.configure(&mut rustc);
+        }
+        Ok(Compilation {
+            native_dirs: BTreeSet::new(), // TODO: deprecated, remove
+            root_output: PathBuf::from("/"),
+            deps_output: PathBuf::from("/"),
+            host_deps_output: PathBuf::from("/"),
+            host_dylib_path: bcx.host_info.sysroot_libdir.clone(),
+            target_dylib_path: bcx.target_info.sysroot_libdir.clone(),
+            tests: Vec::new(),
+            binaries: Vec::new(),
+            extra_env: HashMap::new(),
+            to_doc_test: Vec::new(),
+            cfgs: HashMap::new(),
+            rustdocflags: HashMap::new(),
+            config: bcx.config,
+            rustc_process: rustc,
+            host: bcx.host_triple().to_string(),
+            target: bcx.target_triple().to_string(),
+            target_runner: target_runner(&bcx)?,
+        })
+    }
+
+    /// See `process`.
+    pub fn rustc_process(&self, pkg: &Package, target: &Target) -> CargoResult<ProcessBuilder> {
+        let mut p = self.fill_env(self.rustc_process.clone(), pkg, true)?;
+        if target.edition() != Edition::Edition2015 {
+            p.arg(format!("--edition={}", target.edition()));
+        }
+        Ok(p)
+    }
+
+    /// See `process`.
+    pub fn rustdoc_process(&self, pkg: &Package, target: &Target) -> CargoResult<ProcessBuilder> {
+        let mut p = self.fill_env(process(&*self.config.rustdoc()?), pkg, false)?;
+        if target.edition() != Edition::Edition2015 {
+            p.arg(format!("--edition={}", target.edition()));
+        }
+        Ok(p)
+    }
+
+    /// See `process`.
+    pub fn host_process<T: AsRef<OsStr>>(
+        &self,
+        cmd: T,
+        pkg: &Package,
+    ) -> CargoResult<ProcessBuilder> {
+        self.fill_env(process(cmd), pkg, true)
+    }
+
+    fn target_runner(&self) -> &Option<(PathBuf, Vec<String>)> {
+        &self.target_runner
+    }
+
+    /// See `process`.
+    pub fn target_process<T: AsRef<OsStr>>(
+        &self,
+        cmd: T,
+        pkg: &Package,
+    ) -> CargoResult<ProcessBuilder> {
+        let builder = if let Some((ref runner, ref args)) = *self.target_runner() {
+            let mut builder = process(runner);
+            builder.args(args);
+            builder.arg(cmd);
+            builder
+        } else {
+            process(cmd)
+        };
+        self.fill_env(builder, pkg, false)
+    }
+
+    /// Prepares a new process with an appropriate environment to run against
+    /// the artifacts produced by the build process.
+    ///
+    /// The package argument is also used to configure environment variables as
+    /// well as the working directory of the child process.
+    fn fill_env(
+        &self,
+        mut cmd: ProcessBuilder,
+        pkg: &Package,
+        is_host: bool,
+    ) -> CargoResult<ProcessBuilder> {
+        let mut search_path = if is_host {
+            let mut search_path = vec![self.host_deps_output.clone()];
+            search_path.extend(self.host_dylib_path.clone());
+            search_path
+        } else {
+            let mut search_path =
+                super::filter_dynamic_search_path(self.native_dirs.iter(), &self.root_output);
+            search_path.push(self.deps_output.clone());
+            search_path.push(self.root_output.clone());
+            search_path.extend(self.target_dylib_path.clone());
+            search_path
+        };
+
+        search_path.extend(util::dylib_path().into_iter());
+        let search_path = join_paths(&search_path, util::dylib_path_envvar())?;
+
+        cmd.env(util::dylib_path_envvar(), &search_path);
+        if let Some(env) = self.extra_env.get(pkg.package_id()) {
+            for &(ref k, ref v) in env {
+                cmd.env(k, v);
+            }
+        }
+
+        let metadata = pkg.manifest().metadata();
+
+        let cargo_exe = self.config.cargo_exe()?;
+        cmd.env(::CARGO_ENV, cargo_exe);
+
+        // When adding new environment variables depending on
+        // crate properties which might require rebuild upon change
+        // consider adding the corresponding properties to the hash
+        // in BuildContext::target_metadata()
+        cmd.env("CARGO_MANIFEST_DIR", pkg.root())
+            .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string())
+            .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string())
+            .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string())
+            .env(
+                "CARGO_PKG_VERSION_PRE",
+                &pre_version_component(pkg.version()),
+            )
+            .env("CARGO_PKG_VERSION", &pkg.version().to_string())
+            .env("CARGO_PKG_NAME", &*pkg.name())
+            .env(
+                "CARGO_PKG_DESCRIPTION",
+                metadata.description.as_ref().unwrap_or(&String::new()),
+            )
+            .env(
+                "CARGO_PKG_HOMEPAGE",
+                metadata.homepage.as_ref().unwrap_or(&String::new()),
+            )
+            .env(
+                "CARGO_PKG_REPOSITORY",
+                metadata.repository.as_ref().unwrap_or(&String::new()),
+            )
+            .env("CARGO_PKG_AUTHORS", &pkg.authors().join(":"))
+            .cwd(pkg.root());
+        Ok(cmd)
+    }
+}
+
+fn pre_version_component(v: &Version) -> String {
+    if v.pre.is_empty() {
+        return String::new();
+    }
+
+    let mut ret = String::new();
+
+    for (i, x) in v.pre.iter().enumerate() {
+        if i != 0 {
+            ret.push('.')
+        };
+        ret.push_str(&x.to_string());
+    }
+
+    ret
+}
+
+fn target_runner(bcx: &BuildContext) -> CargoResult<Option<(PathBuf, Vec<String>)>> {
+    let target = bcx.target_triple();
+
+    // try target.{}.runner
+    let key = format!("target.{}.runner", target);
+    if let Some(v) = bcx.config.get_path_and_args(&key)? {
+        return Ok(Some(v.val));
+    }
+
+    // try target.'cfg(...)'.runner
+    if let Some(target_cfg) = bcx.target_info.cfg() {
+        if let Some(table) = bcx.config.get_table("target")? {
+            let mut matching_runner = None;
+
+            for key in table.val.keys() {
+                if CfgExpr::matches_key(key, target_cfg) {
+                    let key = format!("target.{}.runner", key);
+                    if let Some(runner) = bcx.config.get_path_and_args(&key)? {
+                        // more than one match, error out
+                        if matching_runner.is_some() {
+                            bail!("several matching instances of `target.'cfg(..)'.runner` \
+                                   in `.cargo/config`")
+                        }
+
+                        matching_runner = Some(runner.val);
+                    }
+                }
+            }
+
+            return Ok(matching_runner);
+        }
+    }
+
+    Ok(None)
+}
diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs
new file mode 100644 (file)
index 0000000..cbb479e
--- /dev/null
@@ -0,0 +1,453 @@
+use std::collections::HashMap;
+use std::env;
+use std::fmt;
+use std::hash::{Hash, Hasher, SipHasher};
+use std::path::{Path, PathBuf};
+use std::sync::Arc;
+
+use lazycell::LazyCell;
+
+use super::{BuildContext, Context, FileFlavor, Kind, Layout, Unit};
+use core::{TargetKind, Workspace};
+use util::{self, CargoResult};
+
+#[derive(Clone, Hash, Eq, PartialEq, Ord, PartialOrd)]
+pub struct Metadata(u64);
+
+impl fmt::Display for Metadata {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "{:016x}", self.0)
+    }
+}
+
+pub struct CompilationFiles<'a, 'cfg: 'a> {
+    /// The target directory layout for the host (and target if it is the same as host)
+    pub(super) host: Layout,
+    /// The target directory layout for the target (if different from then host)
+    pub(super) target: Option<Layout>,
+    /// Additional directory to include a copy of the outputs.
+    export_dir: Option<PathBuf>,
+    /// The root targets requested by the user on the command line (does not
+    /// include dependencies).
+    roots: Vec<Unit<'a>>,
+    ws: &'a Workspace<'cfg>,
+    metas: HashMap<Unit<'a>, Option<Metadata>>,
+    /// For each Unit, a list all files produced.
+    outputs: HashMap<Unit<'a>, LazyCell<Arc<Vec<OutputFile>>>>,
+}
+
+#[derive(Debug)]
+pub struct OutputFile {
+    /// File name that will be produced by the build process (in `deps`).
+    pub path: PathBuf,
+    /// If it should be linked into `target`, and what it should be called
+    /// (e.g. without metadata).
+    pub hardlink: Option<PathBuf>,
+    /// Type of the file (library / debug symbol / else).
+    pub flavor: FileFlavor,
+}
+
+impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
+    pub(super) fn new(
+        roots: &[Unit<'a>],
+        host: Layout,
+        target: Option<Layout>,
+        export_dir: Option<PathBuf>,
+        ws: &'a Workspace<'cfg>,
+        cx: &Context<'a, 'cfg>,
+    ) -> CompilationFiles<'a, 'cfg> {
+        let mut metas = HashMap::new();
+        for unit in roots {
+            metadata_of(unit, cx, &mut metas);
+        }
+        let outputs = metas
+            .keys()
+            .cloned()
+            .map(|unit| (unit, LazyCell::new()))
+            .collect();
+        CompilationFiles {
+            ws,
+            host,
+            target,
+            export_dir,
+            roots: roots.to_vec(),
+            metas,
+            outputs,
+        }
+    }
+
+    /// Returns the appropriate directory layout for either a plugin or not.
+    pub fn layout(&self, kind: Kind) -> &Layout {
+        match kind {
+            Kind::Host => &self.host,
+            Kind::Target => self.target.as_ref().unwrap_or(&self.host),
+        }
+    }
+
+    /// Get the metadata for a target in a specific profile
+    /// We build to the path: "{filename}-{target_metadata}"
+    /// We use a linking step to link/copy to a predictable filename
+    /// like `target/debug/libfoo.{a,so,rlib}` and such.
+    pub fn metadata(&self, unit: &Unit<'a>) -> Option<Metadata> {
+        self.metas[unit].clone()
+    }
+
+    /// Get the short hash based only on the PackageId
+    /// Used for the metadata when target_metadata returns None
+    pub fn target_short_hash(&self, unit: &Unit) -> String {
+        let hashable = unit.pkg.package_id().stable_hash(self.ws.root());
+        util::short_hash(&hashable)
+    }
+
+    /// Returns the appropriate output directory for the specified package and
+    /// target.
+    pub fn out_dir(&self, unit: &Unit<'a>) -> PathBuf {
+        if unit.mode.is_doc() {
+            self.layout(unit.kind).root().parent().unwrap().join("doc")
+        } else if unit.target.is_custom_build() {
+            self.build_script_dir(unit)
+        } else if unit.target.is_example() {
+            self.layout(unit.kind).examples().to_path_buf()
+        } else {
+            self.deps_dir(unit).to_path_buf()
+        }
+    }
+
+    pub fn export_dir(&self) -> Option<PathBuf> {
+        self.export_dir.clone()
+    }
+
+    pub fn pkg_dir(&self, unit: &Unit<'a>) -> String {
+        let name = unit.pkg.package_id().name();
+        match self.metas[unit] {
+            Some(ref meta) => format!("{}-{}", name, meta),
+            None => format!("{}-{}", name, self.target_short_hash(unit)),
+        }
+    }
+
+    /// Return the root of the build output tree
+    pub fn target_root(&self) -> &Path {
+        self.host.dest()
+    }
+
+    pub fn host_deps(&self) -> &Path {
+        self.host.deps()
+    }
+
+    /// Returns the directories where Rust crate dependencies are found for the
+    /// specified unit.
+    pub fn deps_dir(&self, unit: &Unit) -> &Path {
+        self.layout(unit.kind).deps()
+    }
+
+    pub fn fingerprint_dir(&self, unit: &Unit<'a>) -> PathBuf {
+        let dir = self.pkg_dir(unit);
+        self.layout(unit.kind).fingerprint().join(dir)
+    }
+
+    /// Returns the appropriate directory layout for either a plugin or not.
+    pub fn build_script_dir(&self, unit: &Unit<'a>) -> PathBuf {
+        assert!(unit.target.is_custom_build());
+        assert!(!unit.mode.is_run_custom_build());
+        let dir = self.pkg_dir(unit);
+        self.layout(Kind::Host).build().join(dir)
+    }
+
+    /// Returns the appropriate directory layout for either a plugin or not.
+    pub fn build_script_out_dir(&self, unit: &Unit<'a>) -> PathBuf {
+        assert!(unit.target.is_custom_build());
+        assert!(unit.mode.is_run_custom_build());
+        let dir = self.pkg_dir(unit);
+        self.layout(unit.kind).build().join(dir).join("out")
+    }
+
+    /// Returns the file stem for a given target/profile combo (with metadata)
+    pub fn file_stem(&self, unit: &Unit<'a>) -> String {
+        match self.metas[unit] {
+            Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), metadata),
+            None => self.bin_stem(unit),
+        }
+    }
+
+    pub(super) fn outputs(
+        &self,
+        unit: &Unit<'a>,
+        bcx: &BuildContext<'a, 'cfg>,
+    ) -> CargoResult<Arc<Vec<OutputFile>>> {
+        self.outputs[unit]
+            .try_borrow_with(|| self.calc_outputs(unit, bcx))
+            .map(Arc::clone)
+    }
+
+    /// Returns the bin stem for a given target (without metadata)
+    fn bin_stem(&self, unit: &Unit) -> String {
+        if unit.target.allows_underscores() {
+            unit.target.name().to_string()
+        } else {
+            unit.target.crate_name()
+        }
+    }
+
+    /// Returns a tuple with the directory and name of the hard link we expect
+    /// our target to be copied to. Eg, file_stem may be out_dir/deps/foo-abcdef
+    /// and link_stem would be out_dir/foo
+    /// This function returns it in two parts so the caller can add prefix/suffix
+    /// to filename separately
+    ///
+    /// Returns an Option because in some cases we don't want to link
+    /// (eg a dependent lib)
+    fn link_stem(&self, unit: &Unit<'a>) -> Option<(PathBuf, String)> {
+        let out_dir = self.out_dir(unit);
+        let bin_stem = self.bin_stem(unit);
+        let file_stem = self.file_stem(unit);
+
+        // We currently only lift files up from the `deps` directory. If
+        // it was compiled into something like `example/` or `doc/` then
+        // we don't want to link it up.
+        if out_dir.ends_with("deps") {
+            // Don't lift up library dependencies
+            if unit.target.is_bin() || self.roots.contains(unit) {
+                Some((
+                    out_dir.parent().unwrap().to_owned(),
+                    if unit.mode.is_any_test() {
+                        file_stem
+                    } else {
+                        bin_stem
+                    },
+                ))
+            } else {
+                None
+            }
+        } else if bin_stem == file_stem {
+            None
+        } else if out_dir.ends_with("examples") || out_dir.parent().unwrap().ends_with("build") {
+            Some((out_dir, bin_stem))
+        } else {
+            None
+        }
+    }
+
+    fn calc_outputs(
+        &self,
+        unit: &Unit<'a>,
+        bcx: &BuildContext<'a, 'cfg>,
+    ) -> CargoResult<Arc<Vec<OutputFile>>> {
+        let out_dir = self.out_dir(unit);
+        let file_stem = self.file_stem(unit);
+        let link_stem = self.link_stem(unit);
+        let info = if unit.target.for_host() {
+            &bcx.host_info
+        } else {
+            &bcx.target_info
+        };
+
+        let mut ret = Vec::new();
+        let mut unsupported = Vec::new();
+        {
+            if unit.mode.is_check() {
+                // This is not quite correct for non-lib targets.  rustc
+                // currently does not emit rmeta files, so there is nothing to
+                // check for!  See #3624.
+                let path = out_dir.join(format!("lib{}.rmeta", file_stem));
+                let hardlink = link_stem
+                    .clone()
+                    .map(|(ld, ls)| ld.join(format!("lib{}.rmeta", ls)));
+                ret.push(OutputFile {
+                    path,
+                    hardlink,
+                    flavor: FileFlavor::Linkable,
+                });
+            } else {
+                let mut add = |crate_type: &str, flavor: FileFlavor| -> CargoResult<()> {
+                    let crate_type = if crate_type == "lib" {
+                        "rlib"
+                    } else {
+                        crate_type
+                    };
+                    let file_types = info.file_types(
+                        crate_type,
+                        flavor,
+                        unit.target.kind(),
+                        bcx.target_triple(),
+                    )?;
+
+                    match file_types {
+                        Some(types) => for file_type in types {
+                            let path = out_dir.join(file_type.filename(&file_stem));
+                            let hardlink = link_stem
+                                .as_ref()
+                                .map(|&(ref ld, ref ls)| ld.join(file_type.filename(ls)));
+                            ret.push(OutputFile {
+                                path,
+                                hardlink,
+                                flavor: file_type.flavor,
+                            });
+                        },
+                        // not supported, don't worry about it
+                        None => {
+                            unsupported.push(crate_type.to_string());
+                        }
+                    }
+                    Ok(())
+                };
+                //info!("{:?}", unit);
+                match *unit.target.kind() {
+                    TargetKind::Bin
+                    | TargetKind::CustomBuild
+                    | TargetKind::ExampleBin
+                    | TargetKind::Bench
+                    | TargetKind::Test => {
+                        add("bin", FileFlavor::Normal)?;
+                    }
+                    TargetKind::Lib(..) | TargetKind::ExampleLib(..) if unit.mode.is_any_test() => {
+                        add("bin", FileFlavor::Normal)?;
+                    }
+                    TargetKind::ExampleLib(ref kinds) | TargetKind::Lib(ref kinds) => {
+                        for kind in kinds {
+                            add(
+                                kind.crate_type(),
+                                if kind.linkable() {
+                                    FileFlavor::Linkable
+                                } else {
+                                    FileFlavor::Normal
+                                },
+                            )?;
+                        }
+                    }
+                }
+            }
+        }
+        if ret.is_empty() {
+            if !unsupported.is_empty() {
+                bail!(
+                    "cannot produce {} for `{}` as the target `{}` \
+                     does not support these crate types",
+                    unsupported.join(", "),
+                    unit.pkg,
+                    bcx.target_triple()
+                )
+            }
+            bail!(
+                "cannot compile `{}` as the target `{}` does not \
+                 support any of the output crate types",
+                unit.pkg,
+                bcx.target_triple()
+            );
+        }
+        info!("Target filenames: {:?}", ret);
+
+        Ok(Arc::new(ret))
+    }
+}
+
+fn metadata_of<'a, 'cfg>(
+    unit: &Unit<'a>,
+    cx: &Context<'a, 'cfg>,
+    metas: &mut HashMap<Unit<'a>, Option<Metadata>>,
+) -> Option<Metadata> {
+    if !metas.contains_key(unit) {
+        let meta = compute_metadata(unit, cx, metas);
+        metas.insert(*unit, meta);
+        for unit in cx.dep_targets(unit) {
+            metadata_of(&unit, cx, metas);
+        }
+    }
+    metas[unit].clone()
+}
+
+fn compute_metadata<'a, 'cfg>(
+    unit: &Unit<'a>,
+    cx: &Context<'a, 'cfg>,
+    metas: &mut HashMap<Unit<'a>, Option<Metadata>>,
+) -> Option<Metadata> {
+    // No metadata for dylibs because of a couple issues
+    // - OSX encodes the dylib name in the executable
+    // - Windows rustc multiple files of which we can't easily link all of them
+    //
+    // No metadata for bin because of an issue
+    // - wasm32 rustc/emcc encodes the .wasm name in the .js (rust-lang/cargo#4535)
+    //
+    // Two exceptions
+    // 1) Upstream dependencies (we aren't exporting + need to resolve name conflict)
+    // 2) __CARGO_DEFAULT_LIB_METADATA env var
+    //
+    // Note, though, that the compiler's build system at least wants
+    // path dependencies (eg libstd) to have hashes in filenames. To account for
+    // that we have an extra hack here which reads the
+    // `__CARGO_DEFAULT_LIB_METADATA` environment variable and creates a
+    // hash in the filename if that's present.
+    //
+    // This environment variable should not be relied on! It's
+    // just here for rustbuild. We need a more principled method
+    // doing this eventually.
+    let bcx = &cx.bcx;
+    let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA");
+    if !(unit.mode.is_any_test() || unit.mode.is_check())
+        && (unit.target.is_dylib() || unit.target.is_cdylib()
+            || (unit.target.is_bin() && bcx.target_triple().starts_with("wasm32-")))
+        && unit.pkg.package_id().source_id().is_path()
+        && __cargo_default_lib_metadata.is_err()
+    {
+        return None;
+    }
+
+    let mut hasher = SipHasher::new_with_keys(0, 0);
+
+    // Unique metadata per (name, source, version) triple. This'll allow us
+    // to pull crates from anywhere w/o worrying about conflicts
+    unit.pkg
+        .package_id()
+        .stable_hash(bcx.ws.root())
+        .hash(&mut hasher);
+
+    // Add package properties which map to environment variables
+    // exposed by Cargo
+    let manifest_metadata = unit.pkg.manifest().metadata();
+    manifest_metadata.authors.hash(&mut hasher);
+    manifest_metadata.description.hash(&mut hasher);
+    manifest_metadata.homepage.hash(&mut hasher);
+
+    // Also mix in enabled features to our metadata. This'll ensure that
+    // when changing feature sets each lib is separately cached.
+    bcx.resolve
+        .features_sorted(unit.pkg.package_id())
+        .hash(&mut hasher);
+
+    // Mix in the target-metadata of all the dependencies of this target
+    {
+        let mut deps_metadata = cx.dep_targets(unit)
+            .iter()
+            .map(|dep| metadata_of(dep, cx, metas))
+            .collect::<Vec<_>>();
+        deps_metadata.sort();
+        deps_metadata.hash(&mut hasher);
+    }
+
+    // Throw in the profile we're compiling with. This helps caching
+    // panic=abort and panic=unwind artifacts, additionally with various
+    // settings like debuginfo and whatnot.
+    unit.profile.hash(&mut hasher);
+    unit.mode.hash(&mut hasher);
+    if let Some(ref args) = bcx.extra_args_for(unit) {
+        args.hash(&mut hasher);
+    }
+
+    // Artifacts compiled for the host should have a different metadata
+    // piece than those compiled for the target, so make sure we throw in
+    // the unit's `kind` as well
+    unit.kind.hash(&mut hasher);
+
+    // Finally throw in the target name/kind. This ensures that concurrent
+    // compiles of targets in the same crate don't collide.
+    unit.target.name().hash(&mut hasher);
+    unit.target.kind().hash(&mut hasher);
+
+    bcx.rustc.verbose_version.hash(&mut hasher);
+
+    // Seed the contents of __CARGO_DEFAULT_LIB_METADATA to the hasher if present.
+    // This should be the release channel, to get a different hash for each channel.
+    if let Ok(ref channel) = __cargo_default_lib_metadata {
+        channel.hash(&mut hasher);
+    }
+    Some(Metadata(hasher.finish()))
+}
diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs
new file mode 100644 (file)
index 0000000..225e6eb
--- /dev/null
@@ -0,0 +1,535 @@
+#![allow(deprecated)]
+use std::collections::{HashMap, HashSet};
+use std::ffi::OsStr;
+use std::fmt::Write;
+use std::path::PathBuf;
+use std::sync::Arc;
+use std::cmp::Ordering;
+
+use jobserver::Client;
+
+use core::{Package, PackageId, Resolve, Target};
+use core::compiler::compilation;
+use core::profiles::Profile;
+use util::errors::{CargoResult, CargoResultExt};
+use util::{internal, profile, Config, short_hash};
+
+use super::custom_build::{self, BuildDeps, BuildScripts, BuildState};
+use super::fingerprint::Fingerprint;
+use super::job_queue::JobQueue;
+use super::layout::Layout;
+use super::{BuildContext, Compilation, CompileMode, Executor, FileFlavor, Kind};
+use super::build_plan::BuildPlan;
+
+mod unit_dependencies;
+use self::unit_dependencies::build_unit_dependencies;
+
+mod compilation_files;
+pub use self::compilation_files::{Metadata, OutputFile};
+use self::compilation_files::CompilationFiles;
+
+/// All information needed to define a Unit.
+///
+/// A unit is an object that has enough information so that cargo knows how to build it.
+/// For example, if your package has dependencies, then every dependency will be built as a library
+/// unit. If your package is a library, then it will be built as a library unit as well, or if it
+/// is a binary with `main.rs`, then a binary will be output. There are also separate unit types
+/// for `test`ing and `check`ing, amongst others.
+///
+/// The unit also holds information about all possible metadata about the package in `pkg`.
+///
+/// A unit needs to know extra information in addition to the type and root source file. For
+/// example, it needs to know the target architecture (OS, chip arch etc.) and it needs to know
+/// whether you want a debug or release build. There is enough information in this struct to figure
+/// all that out.
+#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)]
+pub struct Unit<'a> {
+    /// Information about available targets, which files to include/exclude, etc. Basically stuff in
+    /// `Cargo.toml`.
+    pub pkg: &'a Package,
+    /// Information about the specific target to build, out of the possible targets in `pkg`. Not
+    /// to be confused with *target-triple* (or *target architecture* ...), the target arch for a
+    /// build.
+    pub target: &'a Target,
+    /// The profile contains information about *how* the build should be run, including debug
+    /// level, etc.
+    pub profile: Profile,
+    /// Whether this compilation unit is for the host or target architecture.
+    ///
+    /// For example, when
+    /// cross compiling and using a custom build script, the build script needs to be compiled for
+    /// the host architecture so the host rustc can use it (when compiling to the target
+    /// architecture).
+    pub kind: Kind,
+    /// The "mode" this unit is being compiled for.  See `CompileMode` for
+    /// more details.
+    pub mode: CompileMode,
+}
+
+impl<'a> Unit<'a> {
+    pub fn buildkey(&self) -> String {
+        format!("{}-{}", self.pkg.name(), short_hash(self))
+       }
+}
+
+impl<'a> Ord for Unit<'a> {
+    fn cmp(&self, other: &Unit) -> Ordering {
+        self.buildkey().cmp(&other.buildkey())
+    }
+}
+
+impl<'a> PartialOrd for Unit<'a> {
+    fn partial_cmp(&self, other: &Unit) -> Option<Ordering> {
+        Some(self.cmp(other))
+    }
+}
+
+pub struct Context<'a, 'cfg: 'a> {
+    pub bcx: &'a BuildContext<'a, 'cfg>,
+    pub compilation: Compilation<'cfg>,
+    pub build_state: Arc<BuildState>,
+    pub build_script_overridden: HashSet<(PackageId, Kind)>,
+    pub build_explicit_deps: HashMap<Unit<'a>, BuildDeps>,
+    pub fingerprints: HashMap<Unit<'a>, Arc<Fingerprint>>,
+    pub compiled: HashSet<Unit<'a>>,
+    pub build_scripts: HashMap<Unit<'a>, Arc<BuildScripts>>,
+    pub links: Links<'a>,
+    pub jobserver: Client,
+    primary_packages: HashSet<&'a PackageId>,
+    unit_dependencies: HashMap<Unit<'a>, Vec<Unit<'a>>>,
+    files: Option<CompilationFiles<'a, 'cfg>>,
+    package_cache: HashMap<&'a PackageId, &'a Package>,
+}
+
+impl<'a, 'cfg> Context<'a, 'cfg> {
+    pub fn new(config: &'cfg Config, bcx: &'a BuildContext<'a, 'cfg>) -> CargoResult<Self> {
+        // Load up the jobserver that we'll use to manage our parallelism. This
+        // is the same as the GNU make implementation of a jobserver, and
+        // intentionally so! It's hoped that we can interact with GNU make and
+        // all share the same jobserver.
+        //
+        // Note that if we don't have a jobserver in our environment then we
+        // create our own, and we create it with `n-1` tokens because one token
+        // is ourself, a running process.
+        let jobserver = match config.jobserver_from_env() {
+            Some(c) => c.clone(),
+            None => Client::new(bcx.build_config.jobs as usize - 1)
+                .chain_err(|| "failed to create jobserver")?,
+        };
+
+        Ok(Self {
+            bcx,
+            compilation: Compilation::new(bcx)?,
+            build_state: Arc::new(BuildState::new(&bcx.host_config, &bcx.target_config)),
+            fingerprints: HashMap::new(),
+            compiled: HashSet::new(),
+            build_scripts: HashMap::new(),
+            build_explicit_deps: HashMap::new(),
+            links: Links::new(),
+            jobserver,
+            build_script_overridden: HashSet::new(),
+
+            primary_packages: HashSet::new(),
+            unit_dependencies: HashMap::new(),
+            files: None,
+            package_cache: HashMap::new(),
+        })
+    }
+
+    // Returns a mapping of the root package plus its immediate dependencies to
+    // where the compiled libraries are all located.
+    pub fn compile(
+        mut self,
+        units: &[Unit<'a>],
+        export_dir: Option<PathBuf>,
+        exec: &Arc<Executor>,
+    ) -> CargoResult<Compilation<'cfg>> {
+        let mut queue = JobQueue::new(self.bcx);
+        let mut plan = BuildPlan::new();
+        let build_plan = self.bcx.build_config.build_plan;
+        self.prepare_units(export_dir, units)?;
+        self.prepare()?;
+        custom_build::build_map(&mut self, units)?;
+
+        for unit in units.iter() {
+            // Build up a list of pending jobs, each of which represent
+            // compiling a particular package. No actual work is executed as
+            // part of this, that's all done next as part of the `execute`
+            // function which will run everything in order with proper
+            // parallelism.
+            let force_rebuild = self.bcx.build_config.force_rebuild;
+            super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
+        }
+
+        // Now that we've figured out everything that we're going to do, do it!
+        queue.execute(&mut self, &mut plan)?;
+
+        if build_plan {
+            plan.set_inputs(self.build_plan_inputs()?);
+            plan.output_plan();
+        }
+
+        for unit in units.iter() {
+            for output in self.outputs(unit)?.iter() {
+                if output.flavor == FileFlavor::DebugInfo {
+                    continue;
+                }
+
+                let bindst = match output.hardlink {
+                    Some(ref link_dst) => link_dst,
+                    None => &output.path,
+                };
+
+                if unit.mode == CompileMode::Test {
+                    self.compilation.tests.push((
+                        unit.pkg.clone(),
+                        unit.target.kind().clone(),
+                        unit.target.name().to_string(),
+                        output.path.clone(),
+                    ));
+                } else if unit.target.is_bin() || unit.target.is_bin_example() {
+                    self.compilation.binaries.push(bindst.clone());
+                }
+            }
+
+            for dep in self.dep_targets(unit).iter() {
+                if !unit.target.is_lib() {
+                    continue;
+                }
+
+                if dep.mode.is_run_custom_build() {
+                    let out_dir = self.files().build_script_out_dir(dep).display().to_string();
+                    self.compilation
+                        .extra_env
+                        .entry(dep.pkg.package_id().clone())
+                        .or_insert_with(Vec::new)
+                        .push(("OUT_DIR".to_string(), out_dir));
+                }
+            }
+
+            if unit.mode == CompileMode::Doctest {
+                // Note that we can *only* doctest rlib outputs here.  A
+                // staticlib output cannot be linked by the compiler (it just
+                // doesn't do that). A dylib output, however, can be linked by
+                // the compiler, but will always fail. Currently all dylibs are
+                // built as "static dylibs" where the standard library is
+                // statically linked into the dylib. The doc tests fail,
+                // however, for now as they try to link the standard library
+                // dynamically as well, causing problems. As a result we only
+                // pass `--extern` for rlib deps and skip out on all other
+                // artifacts.
+                let mut doctest_deps = Vec::new();
+                for dep in self.dep_targets(unit) {
+                    if dep.target.is_lib() && dep.mode == CompileMode::Build {
+                        let outputs = self.outputs(&dep)?;
+                        let outputs = outputs.iter().filter(|output| {
+                            output.path.extension() == Some(OsStr::new("rlib"))
+                                || dep.target.for_host()
+                        });
+                        for output in outputs {
+                            doctest_deps.push((
+                                self.bcx.extern_crate_name(unit, &dep)?,
+                                output.path.clone(),
+                            ));
+                        }
+                    }
+                }
+                self.compilation.to_doc_test.push(compilation::Doctest {
+                    package: unit.pkg.clone(),
+                    target: unit.target.clone(),
+                    deps: doctest_deps,
+                });
+            }
+
+            let feats = self.bcx.resolve.features(unit.pkg.package_id());
+            if !feats.is_empty() {
+                self.compilation
+                    .cfgs
+                    .entry(unit.pkg.package_id().clone())
+                    .or_insert_with(|| {
+                        feats
+                            .iter()
+                            .map(|feat| format!("feature=\"{}\"", feat))
+                            .collect()
+                    });
+            }
+            let rustdocflags = self.bcx.rustdocflags_args(unit)?;
+            if !rustdocflags.is_empty() {
+                self.compilation
+                    .rustdocflags
+                    .entry(unit.pkg.package_id().clone())
+                    .or_insert(rustdocflags);
+            }
+
+            super::output_depinfo(&mut self, unit)?;
+        }
+
+        for (&(ref pkg, _), output) in self.build_state.outputs.lock().unwrap().iter() {
+            self.compilation
+                .cfgs
+                .entry(pkg.clone())
+                .or_insert_with(HashSet::new)
+                .extend(output.cfgs.iter().cloned());
+
+            self.compilation
+                .extra_env
+                .entry(pkg.clone())
+                .or_insert_with(Vec::new)
+                .extend(output.env.iter().cloned());
+
+            for dir in output.library_paths.iter() {
+                self.compilation.native_dirs.insert(dir.clone());
+            }
+        }
+        Ok(self.compilation)
+    }
+
+    pub fn prepare_units(
+        &mut self,
+        export_dir: Option<PathBuf>,
+        units: &[Unit<'a>],
+    ) -> CargoResult<()> {
+        let dest = if self.bcx.build_config.release {
+            "release"
+        } else {
+            "debug"
+        };
+        let host_layout = Layout::new(self.bcx.ws, None, dest)?;
+        let target_layout = match self.bcx.build_config.requested_target.as_ref() {
+            Some(target) => Some(Layout::new(self.bcx.ws, Some(target), dest)?),
+            None => None,
+        };
+        self.primary_packages.extend(units.iter().map(|u| u.pkg.package_id()));
+
+        build_unit_dependencies(
+            units,
+            self.bcx,
+            &mut self.unit_dependencies,
+            &mut self.package_cache,
+        )?;
+        let files = CompilationFiles::new(
+            units,
+            host_layout,
+            target_layout,
+            export_dir,
+            self.bcx.ws,
+            self,
+        );
+        self.files = Some(files);
+        Ok(())
+    }
+
+    /// Prepare this context, ensuring that all filesystem directories are in
+    /// place.
+    pub fn prepare(&mut self) -> CargoResult<()> {
+        let _p = profile::start("preparing layout");
+
+        self.files_mut()
+            .host
+            .prepare()
+            .chain_err(|| internal("couldn't prepare build directories"))?;
+        if let Some(ref mut target) = self.files.as_mut().unwrap().target {
+            target
+                .prepare()
+                .chain_err(|| internal("couldn't prepare build directories"))?;
+        }
+
+        self.compilation.host_deps_output = self.files_mut().host.deps().to_path_buf();
+
+        let files = self.files.as_ref().unwrap();
+        let layout = files.target.as_ref().unwrap_or(&files.host);
+        self.compilation.root_output = layout.dest().to_path_buf();
+        self.compilation.deps_output = layout.deps().to_path_buf();
+        Ok(())
+    }
+
+    pub fn files(&self) -> &CompilationFiles<'a, 'cfg> {
+        self.files.as_ref().unwrap()
+    }
+
+    fn files_mut(&mut self) -> &mut CompilationFiles<'a, 'cfg> {
+        self.files.as_mut().unwrap()
+    }
+
+    /// Return the filenames that the given target for the given profile will
+    /// generate as a list of 3-tuples (filename, link_dst, linkable)
+    ///
+    ///  - filename: filename rustc compiles to. (Often has metadata suffix).
+    ///  - link_dst: Optional file to link/copy the result to (without metadata suffix)
+    ///  - linkable: Whether possible to link against file (eg it's a library)
+    pub fn outputs(&mut self, unit: &Unit<'a>) -> CargoResult<Arc<Vec<OutputFile>>> {
+        self.files.as_ref().unwrap().outputs(unit, self.bcx)
+    }
+
+    /// For a package, return all targets which are registered as dependencies
+    /// for that package.
+    // TODO: this ideally should be `-> &[Unit<'a>]`
+    pub fn dep_targets(&self, unit: &Unit<'a>) -> Vec<Unit<'a>> {
+        // If this build script's execution has been overridden then we don't
+        // actually depend on anything, we've reached the end of the dependency
+        // chain as we've got all the info we're gonna get.
+        //
+        // Note there's a subtlety about this piece of code! The
+        // `build_script_overridden` map here is populated in
+        // `custom_build::build_map` which you need to call before inspecting
+        // dependencies. However, that code itself calls this method and
+        // gets a full pre-filtered set of dependencies. This is not super
+        // obvious, and clear, but it does work at the moment.
+        if unit.target.is_custom_build() {
+            let key = (unit.pkg.package_id().clone(), unit.kind);
+            if self.build_script_overridden.contains(&key) {
+                return Vec::new();
+            }
+        }
+        let mut deps = self.unit_dependencies[unit].clone();
+        deps.sort();
+        deps
+    }
+
+    pub fn incremental_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
+        // There's a number of ways to configure incremental compilation right
+        // now. In order of descending priority (first is highest priority) we
+        // have:
+        //
+        // * `CARGO_INCREMENTAL` - this is blanket used unconditionally to turn
+        //   on/off incremental compilation for any cargo subcommand. We'll
+        //   respect this if set.
+        // * `build.incremental` - in `.cargo/config` this blanket key can
+        //   globally for a system configure whether incremental compilation is
+        //   enabled. Note that setting this to `true` will not actually affect
+        //   all builds though. For example a `true` value doesn't enable
+        //   release incremental builds, only dev incremental builds. This can
+        //   be useful to globally disable incremental compilation like
+        //   `CARGO_INCREMENTAL`.
+        // * `profile.dev.incremental` - in `Cargo.toml` specific profiles can
+        //   be configured to enable/disable incremental compilation. This can
+        //   be primarily used to disable incremental when buggy for a package.
+        // * Finally, each profile has a default for whether it will enable
+        //   incremental compilation or not. Primarily development profiles
+        //   have it enabled by default while release profiles have it disabled
+        //   by default.
+        let global_cfg = self.bcx
+            .config
+            .get_bool("build.incremental")?
+            .map(|c| c.val);
+        let incremental = match (
+            self.bcx.incremental_env,
+            global_cfg,
+            unit.profile.incremental,
+        ) {
+            (Some(v), _, _) => v,
+            (None, Some(false), _) => false,
+            (None, _, other) => other,
+        };
+
+        if !incremental {
+            return Ok(Vec::new());
+        }
+
+        // Only enable incremental compilation for sources the user can
+        // modify (aka path sources). For things that change infrequently,
+        // non-incremental builds yield better performance in the compiler
+        // itself (aka crates.io / git dependencies)
+        //
+        // (see also https://github.com/rust-lang/cargo/issues/3972)
+        if !unit.pkg.package_id().source_id().is_path() {
+            return Ok(Vec::new());
+        }
+
+        let dir = self.files().layout(unit.kind).incremental().display();
+        Ok(vec!["-C".to_string(), format!("incremental={}", dir)])
+    }
+
+    pub fn is_primary_package(&self, unit: &Unit<'a>) -> bool {
+        self.primary_packages.contains(unit.pkg.package_id())
+    }
+
+    /// Gets a package for the given package id.
+    pub fn get_package(&self, id: &PackageId) -> CargoResult<&'a Package> {
+        self.package_cache.get(id)
+            .cloned()
+            .ok_or_else(|| format_err!("failed to find {}", id))
+    }
+
+    /// Return the list of filenames read by cargo to generate the BuildContext
+    /// (all Cargo.toml, etc).
+    pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
+        let mut inputs = Vec::new();
+        // Note that we're using the `package_cache`, which should have been
+        // populated by `build_unit_dependencies`, and only those packages are
+        // considered as all the inputs.
+        //
+        // (notably we skip dev-deps here if they aren't present)
+        for pkg in self.package_cache.values() {
+            inputs.push(pkg.manifest_path().to_path_buf());
+        }
+        inputs.sort();
+        Ok(inputs)
+    }
+}
+
+#[derive(Default)]
+pub struct Links<'a> {
+    validated: HashSet<&'a PackageId>,
+    links: HashMap<String, &'a PackageId>,
+}
+
+impl<'a> Links<'a> {
+    pub fn new() -> Links<'a> {
+        Links {
+            validated: HashSet::new(),
+            links: HashMap::new(),
+        }
+    }
+
+    pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'a>) -> CargoResult<()> {
+        if !self.validated.insert(unit.pkg.package_id()) {
+            return Ok(());
+        }
+        let lib = match unit.pkg.manifest().links() {
+            Some(lib) => lib,
+            None => return Ok(()),
+        };
+        if let Some(prev) = self.links.get(lib) {
+            let pkg = unit.pkg.package_id();
+
+            let describe_path = |pkgid: &PackageId| -> String {
+                let dep_path = resolve.path_to_top(pkgid);
+                let mut dep_path_desc = format!("package `{}`", dep_path[0]);
+                for dep in dep_path.iter().skip(1) {
+                    write!(dep_path_desc, "\n    ... which is depended on by `{}`", dep).unwrap();
+                }
+                dep_path_desc
+            };
+
+            bail!(
+                "multiple packages link to native library `{}`, \
+                 but a native library can be linked only once\n\
+                 \n\
+                 {}\nlinks to native library `{}`\n\
+                 \n\
+                 {}\nalso links to native library `{}`",
+                lib,
+                describe_path(prev),
+                lib,
+                describe_path(pkg),
+                lib
+            )
+        }
+        if !unit.pkg
+            .manifest()
+            .targets()
+            .iter()
+            .any(|t| t.is_custom_build())
+        {
+            bail!(
+                "package `{}` specifies that it links to `{}` but does not \
+                 have a custom build script",
+                unit.pkg.package_id(),
+                lib
+            )
+        }
+        self.links.insert(lib.to_string(), unit.pkg.package_id());
+        Ok(())
+    }
+}
diff --git a/src/cargo/core/compiler/context/unit_dependencies.rs b/src/cargo/core/compiler/context/unit_dependencies.rs
new file mode 100644 (file)
index 0000000..afc819d
--- /dev/null
@@ -0,0 +1,543 @@
+//! Constructs the dependency graph for compilation.
+//!
+//! Rust code is typically organized as a set of Cargo packages. The
+//! dependencies between the packages themselves are stored in the
+//! `Resolve` struct. However, we can't use that information as is for
+//! compilation! A package typically contains several targets, or crates,
+//! and these targets has inter-dependencies. For example, you need to
+//! compile the `lib` target before the `bin` one, and you need to compile
+//! `build.rs` before either of those.
+//!
+//! So, we need to lower the `Resolve`, which specifies dependencies between
+//! *packages*, to a graph of dependencies between their *targets*, and this
+//! is exactly what this module is doing! Well, almost exactly: another
+//! complication is that we might want to compile the same target several times
+//! (for example, with and without tests), so we actually build a dependency
+//! graph of `Unit`s, which capture these properties.
+
+use std::cell::RefCell;
+use std::collections::{HashMap, HashSet};
+
+use CargoResult;
+use core::dependency::Kind as DepKind;
+use core::profiles::UnitFor;
+use core::{Package, Target, PackageId};
+use core::package::Downloads;
+use super::{BuildContext, CompileMode, Kind, Unit};
+
+struct State<'a: 'tmp, 'cfg: 'a, 'tmp> {
+    bcx: &'tmp BuildContext<'a, 'cfg>,
+    deps: &'tmp mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
+    pkgs: RefCell<&'tmp mut HashMap<&'a PackageId, &'a Package>>,
+    waiting_on_download: HashSet<&'a PackageId>,
+    downloads: Downloads<'a, 'cfg>,
+}
+
+pub fn build_unit_dependencies<'a, 'cfg>(
+    roots: &[Unit<'a>],
+    bcx: &BuildContext<'a, 'cfg>,
+    deps: &mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
+    pkgs: &mut HashMap<&'a PackageId, &'a Package>,
+) -> CargoResult<()> {
+    assert!(deps.is_empty(), "can only build unit deps once");
+
+    let mut state = State {
+        bcx,
+        deps,
+        pkgs: RefCell::new(pkgs),
+        waiting_on_download: HashSet::new(),
+        downloads: bcx.packages.enable_download()?,
+    };
+
+    loop {
+        for unit in roots.iter() {
+            state.get(unit.pkg.package_id())?;
+
+            // Dependencies of tests/benches should not have `panic` set.
+            // We check the global test mode to see if we are running in `cargo
+            // test` in which case we ensure all dependencies have `panic`
+            // cleared, and avoid building the lib thrice (once with `panic`, once
+            // without, once for --test).  In particular, the lib included for
+            // doctests and examples are `Build` mode here.
+            let unit_for = if unit.mode.is_any_test() || bcx.build_config.test() {
+                UnitFor::new_test()
+            } else if unit.target.is_custom_build() {
+                // This normally doesn't happen, except `clean` aggressively
+                // generates all units.
+                UnitFor::new_build()
+            } else if unit.target.for_host() {
+                // proc-macro/plugin should never have panic set.
+                UnitFor::new_compiler()
+            } else {
+                UnitFor::new_normal()
+            };
+            deps_of(unit, &mut state, unit_for)?;
+        }
+
+        if state.waiting_on_download.len() > 0 {
+            state.finish_some_downloads()?;
+            state.deps.clear();
+        } else {
+            break
+        }
+    }
+    trace!("ALL UNIT DEPENDENCIES {:#?}", state.deps);
+
+    connect_run_custom_build_deps(&mut state);
+
+    Ok(())
+}
+
+fn deps_of<'a, 'cfg, 'tmp>(
+    unit: &Unit<'a>,
+    state: &mut State<'a, 'cfg, 'tmp>,
+    unit_for: UnitFor,
+) -> CargoResult<()> {
+    // Currently the `deps` map does not include `unit_for`.  This should
+    // be safe for now.  `TestDependency` only exists to clear the `panic`
+    // flag, and you'll never ask for a `unit` with `panic` set as a
+    // `TestDependency`.  `CustomBuild` should also be fine since if the
+    // requested unit's settings are the same as `Any`, `CustomBuild` can't
+    // affect anything else in the hierarchy.
+    if !state.deps.contains_key(unit) {
+        let unit_deps = compute_deps(unit, state, unit_for)?;
+        let to_insert: Vec<_> = unit_deps.iter().map(|&(unit, _)| unit).collect();
+        state.deps.insert(*unit, to_insert);
+        for (unit, unit_for) in unit_deps {
+            deps_of(&unit, state, unit_for)?;
+        }
+    }
+    Ok(())
+}
+
+/// For a package, return all targets which are registered as dependencies
+/// for that package.
+/// This returns a vec of `(Unit, UnitFor)` pairs.  The `UnitFor`
+/// is the profile type that should be used for dependencies of the unit.
+fn compute_deps<'a, 'cfg, 'tmp>(
+    unit: &Unit<'a>,
+    state: &mut State<'a, 'cfg, 'tmp>,
+    unit_for: UnitFor,
+) -> CargoResult<Vec<(Unit<'a>, UnitFor)>> {
+    if unit.mode.is_run_custom_build() {
+        return compute_deps_custom_build(unit, state.bcx);
+    } else if unit.mode.is_doc() && !unit.mode.is_any_test() {
+        // Note: This does not include Doctest.
+        return compute_deps_doc(unit, state);
+    }
+
+    let bcx = state.bcx;
+    let id = unit.pkg.package_id();
+    let deps = bcx.resolve.deps(id)
+        .filter(|&(_id, deps)| {
+            assert!(!deps.is_empty());
+            deps.iter().any(|dep| {
+                // If this target is a build command, then we only want build
+                // dependencies, otherwise we want everything *other than* build
+                // dependencies.
+                if unit.target.is_custom_build() != dep.is_build() {
+                    return false;
+                }
+
+                // If this dependency is *not* a transitive dependency, then it
+                // only applies to test/example targets
+                if !dep.is_transitive() &&
+                    !unit.target.is_test() &&
+                    !unit.target.is_example() &&
+                    !unit.mode.is_any_test()
+                {
+                    return false;
+                }
+
+                // If this dependency is only available for certain platforms,
+                // make sure we're only enabling it for that platform.
+                if !bcx.dep_platform_activated(dep, unit.kind) {
+                    return false;
+                }
+
+                // If the dependency is optional, then we're only activating it
+                // if the corresponding feature was activated
+                if dep.is_optional() &&
+                    !bcx.resolve.features(id).contains(&*dep.name_in_toml())
+                {
+                    return false;
+                }
+
+                // If we've gotten past all that, then this dependency is
+                // actually used!
+                true
+            })
+        });
+
+    let mut ret = Vec::new();
+    for (id, _) in deps {
+        let pkg = match state.get(id)? {
+            Some(pkg) => pkg,
+            None => continue,
+        };
+        let lib = match pkg.targets().iter().find(|t| t.is_lib()) {
+            Some(t) => t,
+            None => continue,
+        };
+        let mode = check_or_build_mode(unit.mode, lib);
+        let dep_unit_for = unit_for.with_for_host(lib.for_host());
+        let unit = new_unit(
+            bcx,
+            pkg,
+            lib,
+            dep_unit_for,
+            unit.kind.for_target(lib),
+            mode,
+        );
+        ret.push((unit, dep_unit_for));
+    }
+
+    // If this target is a build script, then what we've collected so far is
+    // all we need. If this isn't a build script, then it depends on the
+    // build script if there is one.
+    if unit.target.is_custom_build() {
+        return Ok(ret);
+    }
+    ret.extend(dep_build_script(unit, bcx));
+
+    // If this target is a binary, test, example, etc, then it depends on
+    // the library of the same package. The call to `resolve.deps` above
+    // didn't include `pkg` in the return values, so we need to special case
+    // it here and see if we need to push `(pkg, pkg_lib_target)`.
+    if unit.target.is_lib() && unit.mode != CompileMode::Doctest {
+        return Ok(ret);
+    }
+    ret.extend(maybe_lib(unit, bcx, unit_for));
+
+    // If any integration tests/benches are being run, make sure that
+    // binaries are built as well.
+    if !unit.mode.is_check() && unit.mode.is_any_test()
+        && (unit.target.is_test() || unit.target.is_bench())
+    {
+        ret.extend(
+            unit.pkg
+                .targets()
+                .iter()
+                .filter(|t| {
+                    let no_required_features = Vec::new();
+
+                    t.is_bin() &&
+                        // Skip binaries with required features that have not been selected.
+                        t.required_features().unwrap_or(&no_required_features).iter().all(|f| {
+                            bcx.resolve.features(id).contains(f)
+                        })
+                })
+                .map(|t| {
+                    (
+                        new_unit(
+                            bcx,
+                            unit.pkg,
+                            t,
+                            UnitFor::new_normal(),
+                            unit.kind.for_target(t),
+                            CompileMode::Build,
+                        ),
+                        UnitFor::new_normal(),
+                    )
+                }),
+        );
+    }
+
+    Ok(ret)
+}
+
+/// Returns the dependencies needed to run a build script.
+///
+/// The `unit` provided must represent an execution of a build script, and
+/// the returned set of units must all be run before `unit` is run.
+fn compute_deps_custom_build<'a, 'cfg>(
+    unit: &Unit<'a>,
+    bcx: &BuildContext<'a, 'cfg>,
+) -> CargoResult<Vec<(Unit<'a>, UnitFor)>> {
+    // When not overridden, then the dependencies to run a build script are:
+    //
+    // 1. Compiling the build script itself
+    // 2. For each immediate dependency of our package which has a `links`
+    //    key, the execution of that build script.
+    //
+    // We don't have a great way of handling (2) here right now so this is
+    // deferred until after the graph of all unit dependencies has been
+    // constructed.
+    let unit = new_unit(
+        bcx,
+        unit.pkg,
+        unit.target,
+        UnitFor::new_build(),
+        Kind::Host, // build scripts always compiled for the host
+        CompileMode::Build,
+    );
+    // All dependencies of this unit should use profiles for custom
+    // builds.
+    Ok(vec![(unit, UnitFor::new_build())])
+}
+
+/// Returns the dependencies necessary to document a package
+fn compute_deps_doc<'a, 'cfg, 'tmp>(
+    unit: &Unit<'a>,
+    state: &mut State<'a, 'cfg, 'tmp>,
+) -> CargoResult<Vec<(Unit<'a>, UnitFor)>> {
+    let bcx = state.bcx;
+    let deps = bcx.resolve
+        .deps(unit.pkg.package_id())
+        .filter(|&(_id, deps)| {
+            deps.iter().any(|dep| match dep.kind() {
+                DepKind::Normal => bcx.dep_platform_activated(dep, unit.kind),
+                _ => false,
+            })
+        });
+
+    // To document a library, we depend on dependencies actually being
+    // built. If we're documenting *all* libraries, then we also depend on
+    // the documentation of the library being built.
+    let mut ret = Vec::new();
+    for (id, _deps) in deps {
+        let dep = match state.get(id)? {
+            Some(dep) => dep,
+            None => continue,
+        };
+        let lib = match dep.targets().iter().find(|t| t.is_lib()) {
+            Some(lib) => lib,
+            None => continue,
+        };
+        // rustdoc only needs rmeta files for regular dependencies.
+        // However, for plugins/proc-macros, deps should be built like normal.
+        let mode = check_or_build_mode(unit.mode, lib);
+        let dep_unit_for = UnitFor::new_normal().with_for_host(lib.for_host());
+        let lib_unit = new_unit(
+            bcx,
+            dep,
+            lib,
+            dep_unit_for,
+            unit.kind.for_target(lib),
+            mode,
+        );
+        ret.push((lib_unit, dep_unit_for));
+        if let CompileMode::Doc { deps: true } = unit.mode {
+            // Document this lib as well.
+            let doc_unit = new_unit(
+                bcx,
+                dep,
+                lib,
+                dep_unit_for,
+                unit.kind.for_target(lib),
+                unit.mode,
+            );
+            ret.push((doc_unit, dep_unit_for));
+        }
+    }
+
+    // Be sure to build/run the build script for documented libraries as
+    ret.extend(dep_build_script(unit, bcx));
+
+    // If we document a binary, we need the library available
+    if unit.target.is_bin() {
+        ret.extend(maybe_lib(unit, bcx, UnitFor::new_normal()));
+    }
+    Ok(ret)
+}
+
+fn maybe_lib<'a>(
+    unit: &Unit<'a>,
+    bcx: &BuildContext,
+    unit_for: UnitFor,
+) -> Option<(Unit<'a>, UnitFor)> {
+    unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| {
+        let mode = check_or_build_mode(unit.mode, t);
+        let unit = new_unit(
+            bcx,
+            unit.pkg,
+            t,
+            unit_for,
+            unit.kind.for_target(t),
+            mode,
+        );
+        (unit, unit_for)
+    })
+}
+
+/// If a build script is scheduled to be run for the package specified by
+/// `unit`, this function will return the unit to run that build script.
+///
+/// Overriding a build script simply means that the running of the build
+/// script itself doesn't have any dependencies, so even in that case a unit
+/// of work is still returned. `None` is only returned if the package has no
+/// build script.
+fn dep_build_script<'a>(unit: &Unit<'a>, bcx: &BuildContext) -> Option<(Unit<'a>, UnitFor)> {
+    unit.pkg
+        .targets()
+        .iter()
+        .find(|t| t.is_custom_build())
+        .map(|t| {
+            // The profile stored in the Unit is the profile for the thing
+            // the custom build script is running for.
+            (
+                Unit {
+                    pkg: unit.pkg,
+                    target: t,
+                    profile: bcx.profiles.get_profile_run_custom_build(&unit.profile),
+                    kind: unit.kind,
+                    mode: CompileMode::RunCustomBuild,
+                },
+                UnitFor::new_build(),
+            )
+        })
+}
+
+/// Choose the correct mode for dependencies.
+fn check_or_build_mode(mode: CompileMode, target: &Target) -> CompileMode {
+    match mode {
+        CompileMode::Check { .. } | CompileMode::Doc { .. } => {
+            if target.for_host() {
+                // Plugin and proc-macro targets should be compiled like
+                // normal.
+                CompileMode::Build
+            } else {
+                // Regular dependencies should not be checked with --test.
+                // Regular dependencies of doc targets should emit rmeta only.
+                CompileMode::Check { test: false }
+            }
+        }
+        _ => CompileMode::Build,
+    }
+}
+
+fn new_unit<'a>(
+    bcx: &BuildContext,
+    pkg: &'a Package,
+    target: &'a Target,
+    unit_for: UnitFor,
+    kind: Kind,
+    mode: CompileMode,
+) -> Unit<'a> {
+    let profile = bcx.profiles.get_profile(
+        &pkg.package_id(),
+        bcx.ws.is_member(pkg),
+        unit_for,
+        mode,
+        bcx.build_config.release,
+    );
+    Unit {
+        pkg,
+        target,
+        profile,
+        kind,
+        mode,
+    }
+}
+
+/// Fill in missing dependencies for units of the `RunCustomBuild`
+///
+/// As mentioned above in `compute_deps_custom_build` each build script
+/// execution has two dependencies. The first is compiling the build script
+/// itself (already added) and the second is that all crates the package of the
+/// build script depends on with `links` keys, their build script execution. (a
+/// bit confusing eh?)
+///
+/// Here we take the entire `deps` map and add more dependencies from execution
+/// of one build script to execution of another build script.
+fn connect_run_custom_build_deps(state: &mut State) {
+    let mut new_deps = Vec::new();
+
+    {
+        // First up build a reverse dependency map. This is a mapping of all
+        // `RunCustomBuild` known steps to the unit which depends on them. For
+        // example a library might depend on a build script, so this map will
+        // have the build script as the key and the library would be in the
+        // value's set.
+        let mut reverse_deps = HashMap::new();
+        for (unit, deps) in state.deps.iter() {
+            for dep in deps {
+                if dep.mode == CompileMode::RunCustomBuild {
+                    reverse_deps.entry(dep)
+                        .or_insert_with(HashSet::new)
+                        .insert(unit);
+                }
+            }
+        }
+
+        // And next we take a look at all build scripts executions listed in the
+        // dependency map. Our job here is to take everything that depends on
+        // this build script (from our reverse map above) and look at the other
+        // package dependencies of these parents.
+        //
+        // If we depend on a linkable target and the build script mentions
+        // `links`, then we depend on that package's build script! Here we use
+        // `dep_build_script` to manufacture an appropriate build script unit to
+        // depend on.
+        for unit in state.deps.keys().filter(|k| k.mode == CompileMode::RunCustomBuild) {
+            let reverse_deps = match reverse_deps.get(unit) {
+                Some(set) => set,
+                None => continue,
+            };
+
+            let to_add = reverse_deps
+                .iter()
+                .flat_map(|reverse_dep| state.deps[reverse_dep].iter())
+                .filter(|other| {
+                    other.pkg != unit.pkg &&
+                        other.target.linkable() &&
+                        other.pkg.manifest().links().is_some()
+                })
+                .filter_map(|other| dep_build_script(other, state.bcx).map(|p| p.0))
+                .collect::<HashSet<_>>();
+
+            if !to_add.is_empty() {
+                new_deps.push((*unit, to_add));
+            }
+        }
+    }
+
+    // And finally, add in all the missing dependencies!
+    for (unit, new_deps) in new_deps {
+        state.deps.get_mut(&unit).unwrap().extend(new_deps);
+    }
+}
+
+impl<'a, 'cfg, 'tmp> State<'a, 'cfg, 'tmp> {
+    fn get(&mut self, id: &'a PackageId) -> CargoResult<Option<&'a Package>> {
+        let mut pkgs = self.pkgs.borrow_mut();
+        if let Some(pkg) = pkgs.get(id) {
+            return Ok(Some(pkg))
+        }
+        if !self.waiting_on_download.insert(id) {
+            return Ok(None)
+        }
+        if let Some(pkg) = self.downloads.start(id)? {
+            pkgs.insert(id, pkg);
+            self.waiting_on_download.remove(id);
+            return Ok(Some(pkg))
+        }
+        Ok(None)
+    }
+
+    /// Completes at least one downloading, maybe waiting for more to complete.
+    ///
+    /// This function will block the current thread waiting for at least one
+    /// crate to finish downloading. The function may continue to download more
+    /// crates if it looks like there's a long enough queue of crates to keep
+    /// downloading. When only a handful of packages remain this function
+    /// returns, and it's hoped that by returning we'll be able to push more
+    /// packages to download into the queue.
+    fn finish_some_downloads(&mut self) -> CargoResult<()> {
+        assert!(self.downloads.remaining() > 0);
+        loop {
+            let pkg = self.downloads.wait()?;
+            self.waiting_on_download.remove(pkg.package_id());
+            self.pkgs.borrow_mut().insert(pkg.package_id(), pkg);
+
+            // Arbitrarily choose that 5 or more packages concurrently download
+            // is a good enough number to "fill the network pipe". If we have
+            // less than this let's recompute the whole unit dependency graph
+            // again and try to find some more packages to download.
+            if self.downloads.remaining() < 5 {
+                break
+            }
+        }
+        Ok(())
+    }
+}
diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs
new file mode 100644 (file)
index 0000000..cb4a453
--- /dev/null
@@ -0,0 +1,675 @@
+use std::collections::hash_map::{Entry, HashMap};
+use std::collections::{BTreeSet, HashSet};
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::str;
+use std::sync::{Arc, Mutex};
+
+use core::PackageId;
+use util::errors::{CargoResult, CargoResultExt};
+use util::machine_message;
+use util::{self, internal, paths, profile};
+use util::{Cfg, Freshness};
+
+use super::job::Work;
+use super::{fingerprint, Context, Kind, TargetConfig, Unit};
+
+/// Contains the parsed output of a custom build script.
+#[derive(Clone, Debug, Hash)]
+pub struct BuildOutput {
+    /// Paths to pass to rustc with the `-L` flag
+    pub library_paths: Vec<PathBuf>,
+    /// Names and link kinds of libraries, suitable for the `-l` flag
+    pub library_links: Vec<String>,
+    /// Various `--cfg` flags to pass to the compiler
+    pub cfgs: Vec<String>,
+    /// Additional environment variables to run the compiler with.
+    pub env: Vec<(String, String)>,
+    /// Metadata to pass to the immediate dependencies
+    pub metadata: Vec<(String, String)>,
+    /// Paths to trigger a rerun of this build script.
+    pub rerun_if_changed: Vec<PathBuf>,
+    /// Environment variables which, when changed, will cause a rebuild.
+    pub rerun_if_env_changed: Vec<String>,
+    /// Warnings generated by this build,
+    pub warnings: Vec<String>,
+}
+
+/// Map of packages to build info
+pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
+
+/// Build info and overrides
+pub struct BuildState {
+    pub outputs: Mutex<BuildMap>,
+    overrides: HashMap<(String, Kind), BuildOutput>,
+}
+
+#[derive(Default)]
+pub struct BuildScripts {
+    // Cargo will use this `to_link` vector to add -L flags to compiles as we
+    // propagate them upwards towards the final build. Note, however, that we
+    // need to preserve the ordering of `to_link` to be topologically sorted.
+    // This will ensure that build scripts which print their paths properly will
+    // correctly pick up the files they generated (if there are duplicates
+    // elsewhere).
+    //
+    // To preserve this ordering, the (id, kind) is stored in two places, once
+    // in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain
+    // this as we're building interactively below to ensure that the memory
+    // usage here doesn't blow up too much.
+    //
+    // For more information, see #2354
+    pub to_link: Vec<(PackageId, Kind)>,
+    seen_to_link: HashSet<(PackageId, Kind)>,
+    pub plugins: BTreeSet<PackageId>,
+}
+
+pub struct BuildDeps {
+    pub build_script_output: PathBuf,
+    pub rerun_if_changed: Vec<PathBuf>,
+    pub rerun_if_env_changed: Vec<String>,
+}
+
+/// Prepares a `Work` that executes the target as a custom build script.
+///
+/// The `req` given is the requirement which this run of the build script will
+/// prepare work for. If the requirement is specified as both the target and the
+/// host platforms it is assumed that the two are equal and the build script is
+/// only run once (not twice).
+pub fn prepare<'a, 'cfg>(
+    cx: &mut Context<'a, 'cfg>,
+    unit: &Unit<'a>,
+) -> CargoResult<(Work, Work, Freshness)> {
+    let _p = profile::start(format!(
+        "build script prepare: {}/{}",
+        unit.pkg,
+        unit.target.name()
+    ));
+
+    let key = (unit.pkg.package_id().clone(), unit.kind);
+    let overridden = cx.build_script_overridden.contains(&key);
+    let (work_dirty, work_fresh) = if overridden {
+        (Work::noop(), Work::noop())
+    } else {
+        build_work(cx, unit)?
+    };
+
+    if cx.bcx.build_config.build_plan {
+        Ok((work_dirty, work_fresh, Freshness::Dirty))
+    } else {
+        // Now that we've prep'd our work, build the work needed to manage the
+        // fingerprint and then start returning that upwards.
+        let (freshness, dirty, fresh) = fingerprint::prepare_build_cmd(cx, unit)?;
+
+        Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
+    }
+}
+
+fn emit_build_output(output: &BuildOutput, id: &PackageId) {
+    let library_paths = output
+        .library_paths
+        .iter()
+        .map(|l| l.display().to_string())
+        .collect::<Vec<_>>();
+
+    machine_message::emit(&machine_message::BuildScript {
+        package_id: id,
+        linked_libs: &output.library_links,
+        linked_paths: &library_paths,
+        cfgs: &output.cfgs,
+        env: &output.env,
+    });
+}
+
+fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work)> {
+    assert!(unit.mode.is_run_custom_build());
+    let bcx = &cx.bcx;
+    let dependencies = cx.dep_targets(unit);
+    let build_script_unit = dependencies
+        .iter()
+        .find(|d| !d.mode.is_run_custom_build() && d.target.is_custom_build())
+        .expect("running a script not depending on an actual script");
+    let script_output = cx.files().build_script_dir(build_script_unit);
+    let build_output = cx.files().build_script_out_dir(unit);
+    let build_plan = bcx.build_config.build_plan;
+    let invocation_name = unit.buildkey();
+
+    if let Some(deps) = unit.pkg.manifest().metabuild() {
+        prepare_metabuild(cx, build_script_unit, deps)?;
+    }
+
+    // Building the command to execute
+    let to_exec = script_output.join(unit.target.name());
+
+    // Start preparing the process to execute, starting out with some
+    // environment variables. Note that the profile-related environment
+    // variables are not set with this the build script's profile but rather the
+    // package's library profile.
+    // NOTE: If you add any profile flags, be sure to update
+    // `Profiles::get_profile_run_custom_build` so that those flags get
+    // carried over.
+    let to_exec = to_exec.into_os_string();
+    let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?;
+    let debug = unit.profile.debuginfo.unwrap_or(0) != 0;
+    cmd.env("OUT_DIR", &build_output)
+        .env("CARGO_MANIFEST_DIR", unit.pkg.root())
+        .env("NUM_JOBS", &bcx.jobs().to_string())
+        .env(
+            "TARGET",
+            &match unit.kind {
+                Kind::Host => &bcx.host_triple(),
+                Kind::Target => bcx.target_triple(),
+            },
+        )
+        .env("DEBUG", debug.to_string())
+        .env("OPT_LEVEL", &unit.profile.opt_level.to_string())
+        .env(
+            "PROFILE",
+            if bcx.build_config.release {
+                "release"
+            } else {
+                "debug"
+            },
+        )
+        .env("HOST", &bcx.host_triple())
+        .env("RUSTC", &bcx.rustc.path)
+        .env("RUSTDOC", &*bcx.config.rustdoc()?)
+        .inherit_jobserver(&cx.jobserver);
+
+    if let Some(ref linker) = bcx.target_config.linker {
+        cmd.env("RUSTC_LINKER", linker);
+    }
+
+    if let Some(links) = unit.pkg.manifest().links() {
+        cmd.env("CARGO_MANIFEST_LINKS", links);
+    }
+
+    // Be sure to pass along all enabled features for this package, this is the
+    // last piece of statically known information that we have.
+    for feat in bcx.resolve.features(unit.pkg.package_id()).iter() {
+        cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
+    }
+
+    let mut cfg_map = HashMap::new();
+    for cfg in bcx.cfg(unit.kind) {
+        match *cfg {
+            Cfg::Name(ref n) => {
+                cfg_map.insert(n.clone(), None);
+            }
+            Cfg::KeyPair(ref k, ref v) => {
+                if let Some(ref mut values) =
+                    *cfg_map.entry(k.clone()).or_insert_with(|| Some(Vec::new()))
+                {
+                    values.push(v.clone())
+                }
+            }
+        }
+    }
+    for (k, v) in cfg_map {
+        let k = format!("CARGO_CFG_{}", super::envify(&k));
+        match v {
+            Some(list) => {
+                cmd.env(&k, list.join(","));
+            }
+            None => {
+                cmd.env(&k, "");
+            }
+        }
+    }
+
+    // Gather the set of native dependencies that this package has along with
+    // some other variables to close over.
+    //
+    // This information will be used at build-time later on to figure out which
+    // sorts of variables need to be discovered at that time.
+    let lib_deps = {
+        dependencies
+            .iter()
+            .filter_map(|unit| {
+                if unit.mode.is_run_custom_build() {
+                    Some((
+                        unit.pkg.manifest().links().unwrap().to_string(),
+                        unit.pkg.package_id().clone(),
+                    ))
+                } else {
+                    None
+                }
+            })
+            .collect::<Vec<_>>()
+    };
+    let pkg_name = unit.pkg.to_string();
+    let build_state = Arc::clone(&cx.build_state);
+    let id = unit.pkg.package_id().clone();
+    let (output_file, err_file, root_output_file) = {
+        let build_output_parent = build_output.parent().unwrap();
+        let output_file = build_output_parent.join("output");
+        let err_file = build_output_parent.join("stderr");
+        let root_output_file = build_output_parent.join("root-output");
+        (output_file, err_file, root_output_file)
+    };
+    let root_output = cx.files().target_root().to_path_buf();
+    let all = (
+        id.clone(),
+        pkg_name.clone(),
+        Arc::clone(&build_state),
+        output_file.clone(),
+        root_output.clone(),
+    );
+    let build_scripts = super::load_build_deps(cx, unit);
+    let kind = unit.kind;
+    let json_messages = bcx.build_config.json_messages();
+    let extra_verbose = bcx.config.extra_verbose();
+
+    // Check to see if the build script has already run, and if it has keep
+    // track of whether it has told us about some explicit dependencies
+    let prev_root_output = paths::read_bytes(&root_output_file)
+        .and_then(|bytes| util::bytes2path(&bytes))
+        .unwrap_or_else(|_| cmd.get_cwd().unwrap().to_path_buf());
+    let prev_output =
+        BuildOutput::parse_file(&output_file, &pkg_name, &prev_root_output, &root_output).ok();
+    let deps = BuildDeps::new(&output_file, prev_output.as_ref());
+    cx.build_explicit_deps.insert(*unit, deps);
+
+    fs::create_dir_all(&script_output)?;
+    fs::create_dir_all(&build_output)?;
+
+    // Prepare the unit of "dirty work" which will actually run the custom build
+    // command.
+    //
+    // Note that this has to do some extra work just before running the command
+    // to determine extra environment variables and such.
+    let dirty = Work::new(move |state| {
+        // Make sure that OUT_DIR exists.
+        //
+        // If we have an old build directory, then just move it into place,
+        // otherwise create it!
+        if fs::metadata(&build_output).is_err() {
+            fs::create_dir(&build_output).chain_err(|| {
+                internal(
+                    "failed to create script output directory for \
+                     build command",
+                )
+            })?;
+        }
+
+        // For all our native lib dependencies, pick up their metadata to pass
+        // along to this custom build command. We're also careful to augment our
+        // dynamic library search path in case the build script depended on any
+        // native dynamic libraries.
+        if !build_plan {
+            let build_state = build_state.outputs.lock().unwrap();
+            for (name, id) in lib_deps {
+                let key = (id.clone(), kind);
+                let state = build_state.get(&key).ok_or_else(|| {
+                    internal(format!(
+                        "failed to locate build state for env \
+                         vars: {}/{:?}",
+                        id, kind
+                    ))
+                })?;
+                let data = &state.metadata;
+                for &(ref key, ref value) in data.iter() {
+                    cmd.env(
+                        &format!("DEP_{}_{}", super::envify(&name), super::envify(key)),
+                        value,
+                    );
+                }
+            }
+            if let Some(build_scripts) = build_scripts {
+                super::add_plugin_deps(&mut cmd, &build_state, &build_scripts, &root_output)?;
+            }
+        }
+
+        // And now finally, run the build command itself!
+        if build_plan {
+            state.build_plan(invocation_name, cmd.clone(), Arc::new(Vec::new()));
+        } else {
+            state.running(&cmd);
+            let output = if extra_verbose {
+                let prefix = format!("[{} {}] ", id.name(), id.version());
+                state.capture_output(&cmd, Some(prefix), true)
+            } else {
+                cmd.exec_with_output()
+            };
+            let output = output.map_err(|e| {
+                format_err!(
+                    "failed to run custom build command for `{}`\n{}",
+                    pkg_name,
+                    e
+                )
+            })?;
+
+            // After the build command has finished running, we need to be sure to
+            // remember all of its output so we can later discover precisely what it
+            // was, even if we don't run the build command again (due to freshness).
+            //
+            // This is also the location where we provide feedback into the build
+            // state informing what variables were discovered via our script as
+            // well.
+            paths::write(&output_file, &output.stdout)?;
+            paths::write(&err_file, &output.stderr)?;
+            paths::write(&root_output_file, util::path2bytes(&root_output)?)?;
+            let parsed_output =
+                BuildOutput::parse(&output.stdout, &pkg_name, &root_output, &root_output)?;
+
+            if json_messages {
+                emit_build_output(&parsed_output, &id);
+            }
+            build_state.insert(id, kind, parsed_output);
+        }
+        Ok(())
+    });
+
+    // Now that we've prepared our work-to-do, we need to prepare the fresh work
+    // itself to run when we actually end up just discarding what we calculated
+    // above.
+    let fresh = Work::new(move |_tx| {
+        let (id, pkg_name, build_state, output_file, root_output) = all;
+        let output = match prev_output {
+            Some(output) => output,
+            None => {
+                BuildOutput::parse_file(&output_file, &pkg_name, &prev_root_output, &root_output)?
+            }
+        };
+
+        if json_messages {
+            emit_build_output(&output, &id);
+        }
+
+        build_state.insert(id, kind, output);
+        Ok(())
+    });
+
+    Ok((dirty, fresh))
+}
+
+impl BuildState {
+    pub fn new(host_config: &TargetConfig, target_config: &TargetConfig) -> BuildState {
+        let mut overrides = HashMap::new();
+        let i1 = host_config.overrides.iter().map(|p| (p, Kind::Host));
+        let i2 = target_config.overrides.iter().map(|p| (p, Kind::Target));
+        for ((name, output), kind) in i1.chain(i2) {
+            overrides.insert((name.clone(), kind), output.clone());
+        }
+        BuildState {
+            outputs: Mutex::new(HashMap::new()),
+            overrides,
+        }
+    }
+
+    fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {
+        self.outputs.lock().unwrap().insert((id, kind), output);
+    }
+}
+
+impl BuildOutput {
+    pub fn parse_file(
+        path: &Path,
+        pkg_name: &str,
+        root_output_when_generated: &Path,
+        root_output: &Path,
+    ) -> CargoResult<BuildOutput> {
+        let contents = paths::read_bytes(path)?;
+        BuildOutput::parse(&contents, pkg_name, root_output_when_generated, root_output)
+    }
+
+    // Parses the output of a script.
+    // The `pkg_name` is used for error messages.
+    pub fn parse(
+        input: &[u8],
+        pkg_name: &str,
+        root_output_when_generated: &Path,
+        root_output: &Path,
+    ) -> CargoResult<BuildOutput> {
+        let mut library_paths = Vec::new();
+        let mut library_links = Vec::new();
+        let mut cfgs = Vec::new();
+        let mut env = Vec::new();
+        let mut metadata = Vec::new();
+        let mut rerun_if_changed = Vec::new();
+        let mut rerun_if_env_changed = Vec::new();
+        let mut warnings = Vec::new();
+        let whence = format!("build script of `{}`", pkg_name);
+
+        for line in input.split(|b| *b == b'\n') {
+            let line = match str::from_utf8(line) {
+                Ok(line) => line.trim(),
+                Err(..) => continue,
+            };
+            let mut iter = line.splitn(2, ':');
+            if iter.next() != Some("cargo") {
+                // skip this line since it doesn't start with "cargo:"
+                continue;
+            }
+            let data = match iter.next() {
+                Some(val) => val,
+                None => continue,
+            };
+
+            // getting the `key=value` part of the line
+            let mut iter = data.splitn(2, '=');
+            let key = iter.next();
+            let value = iter.next();
+            let (key, value) = match (key, value) {
+                (Some(a), Some(b)) => (a, b.trim_right()),
+                // line started with `cargo:` but didn't match `key=value`
+                _ => bail!("Wrong output in {}: `{}`", whence, line),
+            };
+
+            let path = |val: &str| match Path::new(val).strip_prefix(root_output_when_generated) {
+                Ok(path) => root_output.join(path),
+                Err(_) => PathBuf::from(val),
+            };
+
+            match key {
+                "rustc-flags" => {
+                    let (paths, links) = BuildOutput::parse_rustc_flags(value, &whence)?;
+                    library_links.extend(links.into_iter());
+                    library_paths.extend(paths.into_iter());
+                }
+                "rustc-link-lib" => library_links.push(value.to_string()),
+                "rustc-link-search" => library_paths.push(path(value)),
+                "rustc-cfg" => cfgs.push(value.to_string()),
+                "rustc-env" => env.push(BuildOutput::parse_rustc_env(value, &whence)?),
+                "warning" => warnings.push(value.to_string()),
+                "rerun-if-changed" => rerun_if_changed.push(path(value)),
+                "rerun-if-env-changed" => rerun_if_env_changed.push(value.to_string()),
+                _ => metadata.push((key.to_string(), value.to_string())),
+            }
+        }
+
+        Ok(BuildOutput {
+            library_paths,
+            library_links,
+            cfgs,
+            env,
+            metadata,
+            rerun_if_changed,
+            rerun_if_env_changed,
+            warnings,
+        })
+    }
+
+    pub fn parse_rustc_flags(
+        value: &str,
+        whence: &str,
+    ) -> CargoResult<(Vec<PathBuf>, Vec<String>)> {
+        let value = value.trim();
+        let mut flags_iter = value
+            .split(|c: char| c.is_whitespace())
+            .filter(|w| w.chars().any(|c| !c.is_whitespace()));
+        let (mut library_paths, mut library_links) = (Vec::new(), Vec::new());
+        while let Some(flag) = flags_iter.next() {
+            if flag != "-l" && flag != "-L" {
+                bail!(
+                    "Only `-l` and `-L` flags are allowed in {}: `{}`",
+                    whence,
+                    value
+                )
+            }
+            let value = match flags_iter.next() {
+                Some(v) => v,
+                None => bail!(
+                    "Flag in rustc-flags has no value in {}: `{}`",
+                    whence,
+                    value
+                ),
+            };
+            match flag {
+                "-l" => library_links.push(value.to_string()),
+                "-L" => library_paths.push(PathBuf::from(value)),
+
+                // was already checked above
+                _ => bail!("only -l and -L flags are allowed"),
+            };
+        }
+        Ok((library_paths, library_links))
+    }
+
+    pub fn parse_rustc_env(value: &str, whence: &str) -> CargoResult<(String, String)> {
+        let mut iter = value.splitn(2, '=');
+        let name = iter.next();
+        let val = iter.next();
+        match (name, val) {
+            (Some(n), Some(v)) => Ok((n.to_owned(), v.to_owned())),
+            _ => bail!("Variable rustc-env has no value in {}: {}", whence, value),
+        }
+    }
+}
+
+fn prepare_metabuild<'a, 'cfg>(
+    cx: &Context<'a, 'cfg>,
+    unit: &Unit<'a>,
+    deps: &[String],
+) -> CargoResult<()> {
+    let mut output = Vec::new();
+    let available_deps = cx.dep_targets(unit);
+    // Filter out optional dependencies, and look up the actual lib name.
+    let meta_deps: Vec<_> = deps
+        .iter()
+        .filter_map(|name| {
+            available_deps
+                .iter()
+                .find(|u| u.pkg.name().as_str() == name.as_str())
+                .map(|dep| dep.target.crate_name())
+        })
+        .collect();
+    for dep in &meta_deps {
+        output.push(format!("extern crate {};\n", dep));
+    }
+    output.push("fn main() {\n".to_string());
+    for dep in &meta_deps {
+        output.push(format!("    {}::metabuild();\n", dep));
+    }
+    output.push("}\n".to_string());
+    let output = output.join("");
+    let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir());
+    fs::create_dir_all(path.parent().unwrap())?;
+    paths::write_if_changed(path, &output)?;
+    Ok(())
+}
+
+impl BuildDeps {
+    pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps {
+        BuildDeps {
+            build_script_output: output_file.to_path_buf(),
+            rerun_if_changed: output
+                .map(|p| &p.rerun_if_changed)
+                .cloned()
+                .unwrap_or_default(),
+            rerun_if_env_changed: output
+                .map(|p| &p.rerun_if_env_changed)
+                .cloned()
+                .unwrap_or_default(),
+        }
+    }
+}
+
+/// Compute the `build_scripts` map in the `Context` which tracks what build
+/// scripts each package depends on.
+///
+/// The global `build_scripts` map lists for all (package, kind) tuples what set
+/// of packages' build script outputs must be considered. For example this lists
+/// all dependencies' `-L` flags which need to be propagated transitively.
+///
+/// The given set of targets to this function is the initial set of
+/// targets/profiles which are being built.
+pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CargoResult<()> {
+    let mut ret = HashMap::new();
+    for unit in units {
+        build(&mut ret, cx, unit)?;
+    }
+    cx.build_scripts
+        .extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v))));
+    return Ok(());
+
+    // Recursive function to build up the map we're constructing. This function
+    // memoizes all of its return values as it goes along.
+    fn build<'a, 'b, 'cfg>(
+        out: &'a mut HashMap<Unit<'b>, BuildScripts>,
+        cx: &mut Context<'b, 'cfg>,
+        unit: &Unit<'b>,
+    ) -> CargoResult<&'a BuildScripts> {
+        // Do a quick pre-flight check to see if we've already calculated the
+        // set of dependencies.
+        if out.contains_key(unit) {
+            return Ok(&out[unit]);
+        }
+
+        {
+            let key = unit.pkg
+                .manifest()
+                .links()
+                .map(|l| (l.to_string(), unit.kind));
+            let build_state = &cx.build_state;
+            if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) {
+                let key = (unit.pkg.package_id().clone(), unit.kind);
+                cx.build_script_overridden.insert(key.clone());
+                build_state
+                    .outputs
+                    .lock()
+                    .unwrap()
+                    .insert(key, output.clone());
+            }
+        }
+
+        let mut ret = BuildScripts::default();
+
+        if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {
+            add_to_link(&mut ret, unit.pkg.package_id(), unit.kind);
+        }
+
+        // We want to invoke the compiler deterministically to be cache-friendly
+        // to rustc invocation caching schemes, so be sure to generate the same
+        // set of build script dependency orderings via sorting the targets that
+        // come out of the `Context`.
+        let mut targets = cx.dep_targets(unit);
+        targets.sort_by_key(|u| u.pkg.package_id());
+
+        for unit in targets.iter() {
+            let dep_scripts = build(out, cx, unit)?;
+
+            if unit.target.for_host() {
+                ret.plugins
+                    .extend(dep_scripts.to_link.iter().map(|p| &p.0).cloned());
+            } else if unit.target.linkable() {
+                for &(ref pkg, kind) in dep_scripts.to_link.iter() {
+                    add_to_link(&mut ret, pkg, kind);
+                }
+            }
+        }
+
+        match out.entry(*unit) {
+            Entry::Vacant(entry) => Ok(entry.insert(ret)),
+            Entry::Occupied(_) => panic!("cyclic dependencies in `build_map`"),
+        }
+    }
+
+    // When adding an entry to 'to_link' we only actually push it on if the
+    // script hasn't seen it yet (e.g. we don't push on duplicates).
+    fn add_to_link(scripts: &mut BuildScripts, pkg: &PackageId, kind: Kind) {
+        if scripts.seen_to_link.insert((pkg.clone(), kind)) {
+            scripts.to_link.push((pkg.clone(), kind));
+        }
+    }
+}
diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs
new file mode 100644 (file)
index 0000000..6acb85a
--- /dev/null
@@ -0,0 +1,864 @@
+use std::env;
+use std::fs;
+use std::hash::{self, Hasher};
+use std::path::{Path, PathBuf};
+use std::sync::{Arc, Mutex};
+
+use filetime::FileTime;
+use serde::de::{self, Deserialize};
+use serde::ser;
+use serde_json;
+
+use core::{Edition, Package, TargetKind};
+use util;
+use util::errors::{CargoResult, CargoResultExt};
+use util::paths;
+use util::{internal, profile, Dirty, Fresh, Freshness};
+
+use super::{Context, BuildContext, FileFlavor, Unit};
+use super::custom_build::BuildDeps;
+use super::job::Work;
+
+/// A tuple result of the `prepare_foo` functions in this module.
+///
+/// The first element of the triple is whether the target in question is
+/// currently fresh or not, and the second two elements are work to perform when
+/// the target is dirty or fresh, respectively.
+///
+/// Both units of work are always generated because a fresh package may still be
+/// rebuilt if some upstream dependency changes.
+pub type Preparation = (Freshness, Work, Work);
+
+/// Prepare the necessary work for the fingerprint for a specific target.
+///
+/// When dealing with fingerprints, cargo gets to choose what granularity
+/// "freshness" is considered at. One option is considering freshness at the
+/// package level. This means that if anything in a package changes, the entire
+/// package is rebuilt, unconditionally. This simplicity comes at a cost,
+/// however, in that test-only changes will cause libraries to be rebuilt, which
+/// is quite unfortunate!
+///
+/// The cost was deemed high enough that fingerprints are now calculated at the
+/// layer of a target rather than a package. Each target can then be kept track
+/// of separately and only rebuilt as necessary. This requires cargo to
+/// understand what the inputs are to a target, so we drive rustc with the
+/// --dep-info flag to learn about all input files to a unit of compilation.
+///
+/// This function will calculate the fingerprint for a target and prepare the
+/// work necessary to either write the fingerprint or copy over all fresh files
+/// from the old directories to their new locations.
+pub fn prepare_target<'a, 'cfg>(
+    cx: &mut Context<'a, 'cfg>,
+    unit: &Unit<'a>,
+) -> CargoResult<Preparation> {
+    let _p = profile::start(format!(
+        "fingerprint: {} / {}",
+        unit.pkg.package_id(),
+        unit.target.name()
+    ));
+    let bcx = cx.bcx;
+    let new = cx.files().fingerprint_dir(unit);
+    let loc = new.join(&filename(cx, unit));
+
+    debug!("fingerprint at: {}", loc.display());
+
+    let fingerprint = calculate(cx, unit)?;
+    let compare = compare_old_fingerprint(&loc, &*fingerprint);
+    log_compare(unit, &compare);
+
+    // If our comparison failed (e.g. we're going to trigger a rebuild of this
+    // crate), then we also ensure the source of the crate passes all
+    // verification checks before we build it.
+    //
+    // The `Source::verify` method is intended to allow sources to execute
+    // pre-build checks to ensure that the relevant source code is all
+    // up-to-date and as expected. This is currently used primarily for
+    // directory sources which will use this hook to perform an integrity check
+    // on all files in the source to ensure they haven't changed. If they have
+    // changed then an error is issued.
+    if compare.is_err() {
+        let source_id = unit.pkg.package_id().source_id();
+        let sources = bcx.packages.sources();
+        let source = sources
+            .get(source_id)
+            .ok_or_else(|| internal("missing package source"))?;
+        source.verify(unit.pkg.package_id())?;
+    }
+
+    let root = cx.files().out_dir(unit);
+    let missing_outputs = {
+        if unit.mode.is_doc() {
+            !root.join(unit.target.crate_name())
+                .join("index.html")
+                .exists()
+        } else {
+            match cx.outputs(unit)?
+                .iter()
+                .filter(|output| output.flavor != FileFlavor::DebugInfo)
+                .find(|output| !output.path.exists())
+            {
+                None => false,
+                Some(output) => {
+                    info!("missing output path {:?}", output.path);
+                    true
+                }
+            }
+        }
+    };
+
+    let allow_failure = bcx.extra_args_for(unit).is_some();
+    let target_root = cx.files().target_root().to_path_buf();
+    let write_fingerprint = Work::new(move |_| {
+        match fingerprint.update_local(&target_root) {
+            Ok(()) => {}
+            Err(..) if allow_failure => return Ok(()),
+            Err(e) => return Err(e),
+        }
+        write_fingerprint(&loc, &*fingerprint)
+    });
+
+    let fresh = compare.is_ok() && !missing_outputs;
+    Ok((
+        if fresh { Fresh } else { Dirty },
+        write_fingerprint,
+        Work::noop(),
+    ))
+}
+
+/// A compilation unit dependency has a fingerprint that is comprised of:
+/// * its package id
+/// * its extern crate name
+/// * its calculated fingerprint for the dependency
+type DepFingerprint = (String, String, Arc<Fingerprint>);
+
+/// A fingerprint can be considered to be a "short string" representing the
+/// state of a world for a package.
+///
+/// If a fingerprint ever changes, then the package itself needs to be
+/// recompiled. Inputs to the fingerprint include source code modifications,
+/// compiler flags, compiler version, etc. This structure is not simply a
+/// `String` due to the fact that some fingerprints cannot be calculated lazily.
+///
+/// Path sources, for example, use the mtime of the corresponding dep-info file
+/// as a fingerprint (all source files must be modified *before* this mtime).
+/// This dep-info file is not generated, however, until after the crate is
+/// compiled. As a result, this structure can be thought of as a fingerprint
+/// to-be. The actual value can be calculated via `hash()`, but the operation
+/// may fail as some files may not have been generated.
+///
+/// Note that dependencies are taken into account for fingerprints because rustc
+/// requires that whenever an upstream crate is recompiled that all downstream
+/// dependants are also recompiled. This is typically tracked through
+/// `DependencyQueue`, but it also needs to be retained here because Cargo can
+/// be interrupted while executing, losing the state of the `DependencyQueue`
+/// graph.
+#[derive(Serialize, Deserialize)]
+pub struct Fingerprint {
+    rustc: u64,
+    features: String,
+    target: u64,
+    profile: u64,
+    path: u64,
+    #[serde(serialize_with = "serialize_deps", deserialize_with = "deserialize_deps")]
+    deps: Vec<DepFingerprint>,
+    local: Vec<LocalFingerprint>,
+    #[serde(skip_serializing, skip_deserializing)]
+    memoized_hash: Mutex<Option<u64>>,
+    rustflags: Vec<String>,
+    edition: Edition,
+}
+
+fn serialize_deps<S>(deps: &[DepFingerprint], ser: S) -> Result<S::Ok, S::Error>
+where
+    S: ser::Serializer,
+{
+    ser.collect_seq(deps.iter()
+       .map(|&(ref a, ref b, ref c)| (a, b, c.hash())))
+}
+
+fn deserialize_deps<'de, D>(d: D) -> Result<Vec<DepFingerprint>, D::Error>
+where
+    D: de::Deserializer<'de>,
+{
+    let decoded = <Vec<(String, String, u64)>>::deserialize(d)?;
+    Ok(decoded
+        .into_iter()
+        .map(|(pkg_id, name, hash)| {
+            (
+                pkg_id,
+                name,
+                Arc::new(Fingerprint {
+                    rustc: 0,
+                    target: 0,
+                    profile: 0,
+                    path: 0,
+                    local: vec![LocalFingerprint::Precalculated(String::new())],
+                    features: String::new(),
+                    deps: Vec::new(),
+                    memoized_hash: Mutex::new(Some(hash)),
+                    edition: Edition::Edition2015,
+                    rustflags: Vec::new(),
+                }),
+            )
+        })
+        .collect())
+}
+
+#[derive(Serialize, Deserialize, Hash)]
+enum LocalFingerprint {
+    Precalculated(String),
+    MtimeBased(MtimeSlot, PathBuf),
+    EnvBased(String, Option<String>),
+}
+
+impl LocalFingerprint {
+    fn mtime(root: &Path, mtime: Option<FileTime>, path: &Path) -> LocalFingerprint {
+        let mtime = MtimeSlot(Mutex::new(mtime));
+        assert!(path.is_absolute());
+        let path = path.strip_prefix(root).unwrap_or(path);
+        LocalFingerprint::MtimeBased(mtime, path.to_path_buf())
+    }
+}
+
+struct MtimeSlot(Mutex<Option<FileTime>>);
+
+impl Fingerprint {
+    fn update_local(&self, root: &Path) -> CargoResult<()> {
+        let mut hash_busted = false;
+        for local in self.local.iter() {
+            match *local {
+                LocalFingerprint::MtimeBased(ref slot, ref path) => {
+                    let path = root.join(path);
+                    let mtime = paths::mtime(&path)?;
+                    *slot.0.lock().unwrap() = Some(mtime);
+                }
+                LocalFingerprint::EnvBased(..) | LocalFingerprint::Precalculated(..) => continue,
+            }
+            hash_busted = true;
+        }
+
+        if hash_busted {
+            *self.memoized_hash.lock().unwrap() = None;
+        }
+        Ok(())
+    }
+
+    fn hash(&self) -> u64 {
+        if let Some(s) = *self.memoized_hash.lock().unwrap() {
+            return s;
+        }
+        let ret = util::hash_u64(self);
+        *self.memoized_hash.lock().unwrap() = Some(ret);
+        ret
+    }
+
+    fn compare(&self, old: &Fingerprint) -> CargoResult<()> {
+        if self.rustc != old.rustc {
+            bail!("rust compiler has changed")
+        }
+        if self.features != old.features {
+            bail!(
+                "features have changed: {} != {}",
+                self.features,
+                old.features
+            )
+        }
+        if self.target != old.target {
+            bail!("target configuration has changed")
+        }
+        if self.path != old.path {
+            bail!("path to the compiler has changed")
+        }
+        if self.profile != old.profile {
+            bail!("profile configuration has changed")
+        }
+        if self.rustflags != old.rustflags {
+            bail!("RUSTFLAGS has changed")
+        }
+        if self.local.len() != old.local.len() {
+            bail!("local lens changed");
+        }
+        if self.edition != old.edition {
+            bail!("edition changed")
+        }
+        for (new, old) in self.local.iter().zip(&old.local) {
+            match (new, old) {
+                (
+                    &LocalFingerprint::Precalculated(ref a),
+                    &LocalFingerprint::Precalculated(ref b),
+                ) => {
+                    if a != b {
+                        bail!("precalculated components have changed: {} != {}", a, b)
+                    }
+                }
+                (
+                    &LocalFingerprint::MtimeBased(ref on_disk_mtime, ref ap),
+                    &LocalFingerprint::MtimeBased(ref previously_built_mtime, ref bp),
+                ) => {
+                    let on_disk_mtime = on_disk_mtime.0.lock().unwrap();
+                    let previously_built_mtime = previously_built_mtime.0.lock().unwrap();
+
+                    let should_rebuild = match (*on_disk_mtime, *previously_built_mtime) {
+                        (None, None) => false,
+                        (Some(_), None) | (None, Some(_)) => true,
+                        (Some(on_disk), Some(previously_built)) => on_disk > previously_built,
+                    };
+
+                    if should_rebuild {
+                        bail!(
+                            "mtime based components have changed: previously {:?} now {:?}, \
+                             paths are {:?} and {:?}",
+                            *previously_built_mtime,
+                            *on_disk_mtime,
+                            ap,
+                            bp
+                        )
+                    }
+                }
+                (
+                    &LocalFingerprint::EnvBased(ref akey, ref avalue),
+                    &LocalFingerprint::EnvBased(ref bkey, ref bvalue),
+                ) => {
+                    if *akey != *bkey {
+                        bail!("env vars changed: {} != {}", akey, bkey);
+                    }
+                    if *avalue != *bvalue {
+                        bail!(
+                            "env var `{}` changed: previously {:?} now {:?}",
+                            akey,
+                            bvalue,
+                            avalue
+                        )
+                    }
+                }
+                _ => bail!("local fingerprint type has changed"),
+            }
+        }
+
+        if self.deps.len() != old.deps.len() {
+            bail!("number of dependencies has changed")
+        }
+        for (a, b) in self.deps.iter().zip(old.deps.iter()) {
+            if a.1 != b.1 || a.2.hash() != b.2.hash() {
+                bail!("new ({}) != old ({})", a.0, b.0)
+            }
+        }
+        Ok(())
+    }
+}
+
+impl hash::Hash for Fingerprint {
+    fn hash<H: Hasher>(&self, h: &mut H) {
+        let Fingerprint {
+            rustc,
+            ref features,
+            target,
+            path,
+            profile,
+            ref deps,
+            ref local,
+            edition,
+            ref rustflags,
+            ..
+        } = *self;
+        (
+            rustc, features, target, path, profile, local, edition, rustflags,
+        ).hash(h);
+
+        h.write_usize(deps.len());
+        for &(ref pkg_id, ref name, ref fingerprint) in deps {
+            pkg_id.hash(h);
+            name.hash(h);
+            // use memoized dep hashes to avoid exponential blowup
+            h.write_u64(Fingerprint::hash(fingerprint));
+        }
+    }
+}
+
+impl hash::Hash for MtimeSlot {
+    fn hash<H: Hasher>(&self, h: &mut H) {
+        self.0.lock().unwrap().hash(h)
+    }
+}
+
+impl ser::Serialize for MtimeSlot {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        self.0
+            .lock()
+            .unwrap()
+            .map(|ft| (ft.unix_seconds(), ft.nanoseconds()))
+            .serialize(s)
+    }
+}
+
+impl<'de> de::Deserialize<'de> for MtimeSlot {
+    fn deserialize<D>(d: D) -> Result<MtimeSlot, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        let kind: Option<(i64, u32)> = de::Deserialize::deserialize(d)?;
+        Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| {
+            FileTime::from_unix_time(s, n)
+        }))))
+    }
+}
+
+/// Calculates the fingerprint for a package/target pair.
+///
+/// This fingerprint is used by Cargo to learn about when information such as:
+///
+/// * A non-path package changes (changes version, changes revision, etc).
+/// * Any dependency changes
+/// * The compiler changes
+/// * The set of features a package is built with changes
+/// * The profile a target is compiled with changes (e.g. opt-level changes)
+///
+/// Information like file modification time is only calculated for path
+/// dependencies and is calculated in `calculate_target_fresh`.
+fn calculate<'a, 'cfg>(
+    cx: &mut Context<'a, 'cfg>,
+    unit: &Unit<'a>,
+) -> CargoResult<Arc<Fingerprint>> {
+    let bcx = cx.bcx;
+    if let Some(s) = cx.fingerprints.get(unit) {
+        return Ok(Arc::clone(s));
+    }
+
+    // Next, recursively calculate the fingerprint for all of our dependencies.
+    //
+    // Skip the fingerprints of build scripts as they may not always be
+    // available and the dirtiness propagation for modification is tracked
+    // elsewhere. Also skip fingerprints of binaries because they don't actually
+    // induce a recompile, they're just dependencies in the sense that they need
+    // to be built.
+    let deps = cx.dep_targets(unit);
+    let deps = deps.iter()
+        .filter(|u| !u.target.is_custom_build() && !u.target.is_bin())
+        .map(|dep| {
+            calculate(cx, dep).and_then(|fingerprint| {
+                let name = cx.bcx.extern_crate_name(unit, dep)?;
+                Ok((dep.pkg.package_id().to_string(), name, fingerprint))
+            })
+        })
+        .collect::<CargoResult<Vec<_>>>()?;
+
+    // And finally, calculate what our own local fingerprint is
+    let local = if use_dep_info(unit) {
+        let dep_info = dep_info_loc(cx, unit);
+        let mtime = dep_info_mtime_if_fresh(unit.pkg, &dep_info)?;
+        LocalFingerprint::mtime(cx.files().target_root(), mtime, &dep_info)
+    } else {
+        let fingerprint = pkg_fingerprint(&cx.bcx, unit.pkg)?;
+        LocalFingerprint::Precalculated(fingerprint)
+    };
+    let mut deps = deps;
+    deps.sort_by(|&(ref a, _, _), &(ref b, _, _)| a.cmp(b));
+    let extra_flags = if unit.mode.is_doc() {
+        bcx.rustdocflags_args(unit)?
+    } else {
+        bcx.rustflags_args(unit)?
+    };
+    let profile_hash = util::hash_u64(&(
+        &unit.profile,
+        unit.mode,
+        bcx.extra_args_for(unit),
+        cx.incremental_args(unit)?,
+    ));
+    let fingerprint = Arc::new(Fingerprint {
+        rustc: util::hash_u64(&bcx.rustc.verbose_version),
+        target: util::hash_u64(&unit.target),
+        profile: profile_hash,
+        // Note that .0 is hashed here, not .1 which is the cwd. That doesn't
+        // actually affect the output artifact so there's no need to hash it.
+        path: util::hash_u64(&super::path_args(&cx.bcx, unit).0),
+        features: format!("{:?}", bcx.resolve.features_sorted(unit.pkg.package_id())),
+        deps,
+        local: vec![local],
+        memoized_hash: Mutex::new(None),
+        edition: unit.target.edition(),
+        rustflags: extra_flags,
+    });
+    cx.fingerprints.insert(*unit, Arc::clone(&fingerprint));
+    Ok(fingerprint)
+}
+
+// We want to use the mtime for files if we're a path source, but if we're a
+// git/registry source, then the mtime of files may fluctuate, but they won't
+// change so long as the source itself remains constant (which is the
+// responsibility of the source)
+fn use_dep_info(unit: &Unit) -> bool {
+    let path = unit.pkg.summary().source_id().is_path();
+    !unit.mode.is_doc() && path
+}
+
+/// Prepare the necessary work for the fingerprint of a build command.
+///
+/// Build commands are located on packages, not on targets. Additionally, we
+/// don't have --dep-info to drive calculation of the fingerprint of a build
+/// command. This brings up an interesting predicament which gives us a few
+/// options to figure out whether a build command is dirty or not:
+///
+/// 1. A build command is dirty if *any* file in a package changes. In theory
+///    all files are candidate for being used by the build command.
+/// 2. A build command is dirty if any file in a *specific directory* changes.
+///    This may lose information as it may require files outside of the specific
+///    directory.
+/// 3. A build command must itself provide a dep-info-like file stating how it
+///    should be considered dirty or not.
+///
+/// The currently implemented solution is option (1), although it is planned to
+/// migrate to option (2) in the near future.
+pub fn prepare_build_cmd<'a, 'cfg>(
+    cx: &mut Context<'a, 'cfg>,
+    unit: &Unit<'a>,
+) -> CargoResult<Preparation> {
+    let _p = profile::start(format!("fingerprint build cmd: {}", unit.pkg.package_id()));
+    let new = cx.files().fingerprint_dir(unit);
+    let loc = new.join("build");
+
+    debug!("fingerprint at: {}", loc.display());
+
+    let (local, output_path) = build_script_local_fingerprints(cx, unit)?;
+    let mut fingerprint = Fingerprint {
+        rustc: 0,
+        target: 0,
+        profile: 0,
+        path: 0,
+        features: String::new(),
+        deps: Vec::new(),
+        local,
+        memoized_hash: Mutex::new(None),
+        edition: Edition::Edition2015,
+        rustflags: Vec::new(),
+    };
+    let compare = compare_old_fingerprint(&loc, &fingerprint);
+    log_compare(unit, &compare);
+
+    // When we write out the fingerprint, we may want to actually change the
+    // kind of fingerprint being recorded. If we started out, then the previous
+    // run of the build script (or if it had never run before) may indicate to
+    // use the `Precalculated` variant with the `pkg_fingerprint`. If the build
+    // script then prints `rerun-if-changed`, however, we need to record what's
+    // necessary for that fingerprint.
+    //
+    // Hence, if there were some `rerun-if-changed` directives forcibly change
+    // the kind of fingerprint by reinterpreting the dependencies output by the
+    // build script.
+    let state = Arc::clone(&cx.build_state);
+    let key = (unit.pkg.package_id().clone(), unit.kind);
+    let pkg_root = unit.pkg.root().to_path_buf();
+    let target_root = cx.files().target_root().to_path_buf();
+    let write_fingerprint = Work::new(move |_| {
+        if let Some(output_path) = output_path {
+            let outputs = state.outputs.lock().unwrap();
+            let outputs = &outputs[&key];
+            if !outputs.rerun_if_changed.is_empty() || !outputs.rerun_if_env_changed.is_empty() {
+                let deps = BuildDeps::new(&output_path, Some(outputs));
+                fingerprint.local = local_fingerprints_deps(&deps, &target_root, &pkg_root);
+                fingerprint.update_local(&target_root)?;
+            }
+        }
+        write_fingerprint(&loc, &fingerprint)
+    });
+
+    Ok((
+        if compare.is_ok() { Fresh } else { Dirty },
+        write_fingerprint,
+        Work::noop(),
+    ))
+}
+
+fn build_script_local_fingerprints<'a, 'cfg>(
+    cx: &mut Context<'a, 'cfg>,
+    unit: &Unit<'a>,
+) -> CargoResult<(Vec<LocalFingerprint>, Option<PathBuf>)> {
+    let state = cx.build_state.outputs.lock().unwrap();
+    // First up, if this build script is entirely overridden, then we just
+    // return the hash of what we overrode it with.
+    //
+    // Note that the `None` here means that we don't want to update the local
+    // fingerprint afterwards because this is all just overridden.
+    if let Some(output) = state.get(&(unit.pkg.package_id().clone(), unit.kind)) {
+        debug!("override local fingerprints deps");
+        let s = format!(
+            "overridden build state with hash: {}",
+            util::hash_u64(output)
+        );
+        return Ok((vec![LocalFingerprint::Precalculated(s)], None));
+    }
+
+    // Next up we look at the previously listed dependencies for the build
+    // script. If there are none then we're in the "old mode" where we just
+    // assume that we're changed if anything in the packaged changed. The
+    // `Some` here though means that we want to update our local fingerprints
+    // after we're done as running this build script may have created more
+    // dependencies.
+    let deps = &cx.build_explicit_deps[unit];
+    let output = deps.build_script_output.clone();
+    if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() {
+        debug!("old local fingerprints deps");
+        let s = pkg_fingerprint(&cx.bcx, unit.pkg)?;
+        return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output)));
+    }
+
+    // Ok so now we're in "new mode" where we can have files listed as
+    // dependencies as well as env vars listed as dependencies. Process them all
+    // here.
+    Ok((
+        local_fingerprints_deps(deps, cx.files().target_root(), unit.pkg.root()),
+        Some(output),
+    ))
+}
+
+fn local_fingerprints_deps(
+    deps: &BuildDeps,
+    target_root: &Path,
+    pkg_root: &Path,
+) -> Vec<LocalFingerprint> {
+    debug!("new local fingerprints deps");
+    let mut local = Vec::new();
+    if !deps.rerun_if_changed.is_empty() {
+        let output = &deps.build_script_output;
+        let deps = deps.rerun_if_changed.iter().map(|p| pkg_root.join(p));
+        let mtime = mtime_if_fresh(output, deps);
+        local.push(LocalFingerprint::mtime(target_root, mtime, output));
+    }
+
+    for var in deps.rerun_if_env_changed.iter() {
+        let val = env::var(var).ok();
+        local.push(LocalFingerprint::EnvBased(var.clone(), val));
+    }
+
+    local
+}
+
+fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> {
+    let hash = fingerprint.hash();
+    debug!("write fingerprint: {}", loc.display());
+    paths::write(loc, util::to_hex(hash).as_bytes())?;
+    paths::write(
+        &loc.with_extension("json"),
+        &serde_json::to_vec(&fingerprint).unwrap(),
+    )?;
+    Ok(())
+}
+
+/// Prepare for work when a package starts to build
+pub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<()> {
+    let new1 = cx.files().fingerprint_dir(unit);
+
+    if fs::metadata(&new1).is_err() {
+        fs::create_dir(&new1)?;
+    }
+
+    Ok(())
+}
+
+pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> PathBuf {
+    cx.files()
+        .fingerprint_dir(unit)
+        .join(&format!("dep-{}", filename(cx, unit)))
+}
+
+fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult<()> {
+    let old_fingerprint_short = paths::read(loc)?;
+    let new_hash = new_fingerprint.hash();
+
+    if util::to_hex(new_hash) == old_fingerprint_short {
+        return Ok(());
+    }
+
+    let old_fingerprint_json = paths::read(&loc.with_extension("json"))?;
+    let old_fingerprint = serde_json::from_str(&old_fingerprint_json)
+        .chain_err(|| internal("failed to deserialize json"))?;
+    new_fingerprint.compare(&old_fingerprint)
+}
+
+fn log_compare(unit: &Unit, compare: &CargoResult<()>) {
+    let ce = match *compare {
+        Ok(..) => return,
+        Err(ref e) => e,
+    };
+    info!("fingerprint error for {}: {}", unit.pkg, ce);
+
+    for cause in ce.iter_causes() {
+        info!("  cause: {}", cause);
+    }
+}
+
+// Parse the dep-info into a list of paths
+pub fn parse_dep_info(pkg: &Package, dep_info: &Path) -> CargoResult<Option<Vec<PathBuf>>> {
+    let data = match paths::read_bytes(dep_info) {
+        Ok(data) => data,
+        Err(_) => return Ok(None),
+    };
+    let paths = data.split(|&x| x == 0)
+        .filter(|x| !x.is_empty())
+        .map(|p| util::bytes2path(p).map(|p| pkg.root().join(p)))
+        .collect::<Result<Vec<_>, _>>()?;
+    if paths.is_empty() {
+        Ok(None)
+    } else {
+        Ok(Some(paths))
+    }
+}
+
+fn dep_info_mtime_if_fresh(pkg: &Package, dep_info: &Path) -> CargoResult<Option<FileTime>> {
+    if let Some(paths) = parse_dep_info(pkg, dep_info)? {
+        Ok(mtime_if_fresh(dep_info, paths.iter()))
+    } else {
+        Ok(None)
+    }
+}
+
+fn pkg_fingerprint(bcx: &BuildContext, pkg: &Package) -> CargoResult<String> {
+    let source_id = pkg.package_id().source_id();
+    let sources = bcx.packages.sources();
+
+    let source = sources
+        .get(source_id)
+        .ok_or_else(|| internal("missing package source"))?;
+    source.fingerprint(pkg)
+}
+
+fn mtime_if_fresh<I>(output: &Path, paths: I) -> Option<FileTime>
+where
+    I: IntoIterator,
+    I::Item: AsRef<Path>,
+{
+    let mtime = match paths::mtime(output) {
+        Ok(mtime) => mtime,
+        Err(..) => return None,
+    };
+
+    let any_stale = paths.into_iter().any(|path| {
+        let path = path.as_ref();
+        let mtime2 = match paths::mtime(path) {
+            Ok(mtime) => mtime,
+            Err(..) => {
+                info!("stale: {} -- missing", path.display());
+                return true;
+            }
+        };
+
+        // Note that equal mtimes are considered "stale". For filesystems with
+        // not much timestamp precision like 1s this is a conservative approximation
+        // to handle the case where a file is modified within the same second after
+        // a build finishes. We want to make sure that incremental rebuilds pick that up!
+        //
+        // For filesystems with nanosecond precision it's been seen in the wild that
+        // its "nanosecond precision" isn't really nanosecond-accurate. It turns out that
+        // kernels may cache the current time so files created at different times actually
+        // list the same nanosecond precision. Some digging on #5919 picked up that the
+        // kernel caches the current time between timer ticks, which could mean that if
+        // a file is updated at most 10ms after a build finishes then Cargo may not
+        // pick up the build changes.
+        //
+        // All in all, the equality check here is a conservative assumption that,
+        // if equal, files were changed just after a previous build finished.
+        // It's hoped this doesn't cause too many issues in practice!
+        if mtime2 >= mtime {
+            info!("stale: {} -- {} vs {}", path.display(), mtime2, mtime);
+            true
+        } else {
+            false
+        }
+    });
+
+    if any_stale {
+        None
+    } else {
+        Some(mtime)
+    }
+}
+
+fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String {
+    // file_stem includes metadata hash. Thus we have a different
+    // fingerprint for every metadata hash version. This works because
+    // even if the package is fresh, we'll still link the fresh target
+    let file_stem = cx.files().file_stem(unit);
+    let kind = match *unit.target.kind() {
+        TargetKind::Lib(..) => "lib",
+        TargetKind::Bin => "bin",
+        TargetKind::Test => "integration-test",
+        TargetKind::ExampleBin | TargetKind::ExampleLib(..) => "example",
+        TargetKind::Bench => "bench",
+        TargetKind::CustomBuild => "build-script",
+    };
+    let flavor = if unit.mode.is_any_test() {
+        "test-"
+    } else if unit.mode.is_doc() {
+        "doc-"
+    } else {
+        ""
+    };
+    format!("{}{}-{}", flavor, kind, file_stem)
+}
+
+/// Parses the dep-info file coming out of rustc into a Cargo-specific format.
+///
+/// This function will parse `rustc_dep_info` as a makefile-style dep info to
+/// learn about the all files which a crate depends on. This is then
+/// re-serialized into the `cargo_dep_info` path in a Cargo-specific format.
+///
+/// The `pkg_root` argument here is the absolute path to the directory
+/// containing `Cargo.toml` for this crate that was compiled. The paths listed
+/// in the rustc dep-info file may or may not be absolute but we'll want to
+/// consider all of them relative to the `root` specified.
+///
+/// The `rustc_cwd` argument is the absolute path to the cwd of the compiler
+/// when it was invoked.
+///
+/// The serialized Cargo format will contain a list of files, all of which are
+/// relative if they're under `root`. or absolute if they're elsewhere.
+pub fn translate_dep_info(
+    rustc_dep_info: &Path,
+    cargo_dep_info: &Path,
+    pkg_root: &Path,
+    rustc_cwd: &Path,
+) -> CargoResult<()> {
+    let target = parse_rustc_dep_info(rustc_dep_info)?;
+    let deps = &target
+        .get(0)
+        .ok_or_else(|| internal("malformed dep-info format, no targets".to_string()))?
+        .1;
+
+    let mut new_contents = Vec::new();
+    for file in deps {
+        let absolute = rustc_cwd.join(file);
+        let path = absolute.strip_prefix(pkg_root).unwrap_or(&absolute);
+        new_contents.extend(util::path2bytes(path)?);
+        new_contents.push(0);
+    }
+    paths::write(cargo_dep_info, &new_contents)?;
+    Ok(())
+}
+
+pub fn parse_rustc_dep_info(rustc_dep_info: &Path) -> CargoResult<Vec<(String, Vec<String>)>> {
+    let contents = paths::read(rustc_dep_info)?;
+    contents
+        .lines()
+        .filter_map(|l| l.find(": ").map(|i| (l, i)))
+        .map(|(line, pos)| {
+            let target = &line[..pos];
+            let mut deps = line[pos + 2..].split_whitespace();
+
+            let mut ret = Vec::new();
+            while let Some(s) = deps.next() {
+                let mut file = s.to_string();
+                while file.ends_with('\\') {
+                    file.pop();
+                    file.push(' ');
+                    file.push_str(deps.next().ok_or_else(|| {
+                        internal("malformed dep-info format, trailing \\".to_string())
+                    })?);
+                }
+                ret.push(file);
+            }
+            Ok((target.to_string(), ret))
+        })
+        .collect()
+}
diff --git a/src/cargo/core/compiler/job.rs b/src/cargo/core/compiler/job.rs
new file mode 100644 (file)
index 0000000..61e979f
--- /dev/null
@@ -0,0 +1,71 @@
+use std::fmt;
+
+use util::{CargoResult, Dirty, Fresh, Freshness};
+use super::job_queue::JobState;
+
+pub struct Job {
+    dirty: Work,
+    fresh: Work,
+}
+
+/// Each proc should send its description before starting.
+/// It should send either once or close immediately.
+pub struct Work {
+    inner: Box<for<'a, 'b> FnBox<&'a JobState<'b>, CargoResult<()>> + Send>,
+}
+
+trait FnBox<A, R> {
+    fn call_box(self: Box<Self>, a: A) -> R;
+}
+
+impl<A, R, F: FnOnce(A) -> R> FnBox<A, R> for F {
+    fn call_box(self: Box<F>, a: A) -> R {
+        (*self)(a)
+    }
+}
+
+impl Work {
+    pub fn new<F>(f: F) -> Work
+    where
+        F: FnOnce(&JobState) -> CargoResult<()> + Send + 'static,
+    {
+        Work { inner: Box::new(f) }
+    }
+
+    pub fn noop() -> Work {
+        Work::new(|_| Ok(()))
+    }
+
+    pub fn call(self, tx: &JobState) -> CargoResult<()> {
+        self.inner.call_box(tx)
+    }
+
+    pub fn then(self, next: Work) -> Work {
+        Work::new(move |state| {
+            self.call(state)?;
+            next.call(state)
+        })
+    }
+}
+
+impl Job {
+    /// Create a new job representing a unit of work.
+    pub fn new(dirty: Work, fresh: Work) -> Job {
+        Job { dirty, fresh }
+    }
+
+    /// Consumes this job by running it, returning the result of the
+    /// computation.
+    pub fn run(self, fresh: Freshness, state: &JobState) -> CargoResult<()> {
+        match fresh {
+            Fresh => self.fresh.call(state),
+            Dirty => self.dirty.call(state),
+        }
+    }
+}
+
+impl fmt::Debug for Job {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "Job {{ ... }}")
+    }
+}
diff --git a/src/cargo/core/compiler/job_queue.rs b/src/cargo/core/compiler/job_queue.rs
new file mode 100644 (file)
index 0000000..1bb654d
--- /dev/null
@@ -0,0 +1,551 @@
+use std::collections::hash_map::HashMap;
+use std::collections::HashSet;
+use std::fmt;
+use std::io;
+use std::mem;
+use std::sync::mpsc::{channel, Receiver, Sender};
+use std::sync::Arc;
+use std::process::Output;
+
+use crossbeam_utils;
+use crossbeam_utils::thread::Scope;
+use jobserver::{Acquired, HelperThread};
+
+use core::profiles::Profile;
+use core::{PackageId, Target, TargetKind};
+use handle_error;
+use util;
+use util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder};
+use util::{Config, DependencyQueue, Dirty, Fresh, Freshness};
+use util::{Progress, ProgressStyle};
+use util::diagnostic_server::{self, DiagnosticPrinter};
+
+use super::job::Job;
+use super::{BuildContext, BuildPlan, CompileMode, Context, Kind, Unit};
+use super::context::OutputFile;
+
+/// A management structure of the entire dependency graph to compile.
+///
+/// This structure is backed by the `DependencyQueue` type and manages the
+/// actual compilation step of each package. Packages enqueue units of work and
+/// then later on the entire graph is processed and compiled.
+pub struct JobQueue<'a> {
+    queue: DependencyQueue<Key<'a>, Vec<(Job, Freshness)>>,
+    tx: Sender<Message<'a>>,
+    rx: Receiver<Message<'a>>,
+    active: Vec<Key<'a>>,
+    pending: HashMap<Key<'a>, PendingBuild>,
+    compiled: HashSet<&'a PackageId>,
+    documented: HashSet<&'a PackageId>,
+    counts: HashMap<&'a PackageId, usize>,
+    is_release: bool,
+}
+
+/// A helper structure for metadata about the state of a building package.
+struct PendingBuild {
+    /// Number of jobs currently active
+    amt: usize,
+    /// Current freshness state of this package. Any dirty target within a
+    /// package will cause the entire package to become dirty.
+    fresh: Freshness,
+}
+
+#[derive(Clone, Copy, Eq, PartialEq, Hash)]
+struct Key<'a> {
+    pkg: &'a PackageId,
+    target: &'a Target,
+    profile: Profile,
+    kind: Kind,
+    mode: CompileMode,
+}
+
+impl<'a> Key<'a> {
+    fn name_for_progress(&self) -> String {
+        let pkg_name = self.pkg.name();
+        match self.mode {
+            CompileMode::Doc { .. } => format!("{}(doc)", pkg_name),
+            CompileMode::RunCustomBuild => format!("{}(build)", pkg_name),
+            _ => {
+                let annotation = match self.target.kind() {
+                    TargetKind::Lib(_) => return pkg_name.to_string(),
+                    TargetKind::CustomBuild => return format!("{}(build.rs)", pkg_name),
+                    TargetKind::Bin => "bin",
+                    TargetKind::Test => "test",
+                    TargetKind::Bench => "bench",
+                    TargetKind::ExampleBin | TargetKind::ExampleLib(_) => "example",
+                };
+                format!("{}({})", self.target.name(), annotation)
+            }
+        }
+    }
+}
+
+pub struct JobState<'a> {
+    tx: Sender<Message<'a>>,
+}
+
+enum Message<'a> {
+    Run(String),
+    BuildPlanMsg(String, ProcessBuilder, Arc<Vec<OutputFile>>),
+    Stdout(String),
+    Stderr(String),
+    FixDiagnostic(diagnostic_server::Message),
+    Token(io::Result<Acquired>),
+    Finish(Key<'a>, CargoResult<()>),
+}
+
+impl<'a> JobState<'a> {
+    pub fn running(&self, cmd: &ProcessBuilder) {
+        let _ = self.tx.send(Message::Run(cmd.to_string()));
+    }
+
+    pub fn build_plan(
+        &self,
+        module_name: String,
+        cmd: ProcessBuilder,
+        filenames: Arc<Vec<OutputFile>>,
+    ) {
+        let _ = self.tx
+            .send(Message::BuildPlanMsg(module_name, cmd, filenames));
+    }
+
+    pub fn capture_output(
+        &self,
+        cmd: &ProcessBuilder,
+        prefix: Option<String>,
+        print_output: bool,
+    ) -> CargoResult<Output> {
+        let prefix = prefix.unwrap_or_else(|| String::new());
+        cmd.exec_with_streaming(
+            &mut |out| {
+                let _ = self.tx.send(Message::Stdout(format!("{}{}", prefix, out)));
+                Ok(())
+            },
+            &mut |err| {
+                let _ = self.tx.send(Message::Stderr(format!("{}{}", prefix, err)));
+                Ok(())
+            },
+            print_output,
+        )
+    }
+}
+
+impl<'a> JobQueue<'a> {
+    pub fn new<'cfg>(bcx: &BuildContext<'a, 'cfg>) -> JobQueue<'a> {
+        let (tx, rx) = channel();
+        JobQueue {
+            queue: DependencyQueue::new(),
+            tx,
+            rx,
+            active: Vec::new(),
+            pending: HashMap::new(),
+            compiled: HashSet::new(),
+            documented: HashSet::new(),
+            counts: HashMap::new(),
+            is_release: bcx.build_config.release,
+        }
+    }
+
+    pub fn enqueue<'cfg>(
+        &mut self,
+        cx: &Context<'a, 'cfg>,
+        unit: &Unit<'a>,
+        job: Job,
+        fresh: Freshness,
+    ) -> CargoResult<()> {
+        let key = Key::new(unit);
+        let deps = key.dependencies(cx)?;
+        self.queue
+            .queue(Fresh, &key, Vec::new(), &deps)
+            .push((job, fresh));
+        *self.counts.entry(key.pkg).or_insert(0) += 1;
+        Ok(())
+    }
+
+    /// Execute all jobs necessary to build the dependency graph.
+    ///
+    /// This function will spawn off `config.jobs()` workers to build all of the
+    /// necessary dependencies, in order. Freshness is propagated as far as
+    /// possible along each dependency chain.
+    pub fn execute(&mut self, cx: &mut Context, plan: &mut BuildPlan) -> CargoResult<()> {
+        let _p = profile::start("executing the job graph");
+        self.queue.queue_finished();
+
+        // We need to give a handle to the send half of our message queue to the
+        // jobserver and (optionally) diagnostic helper thread. Unfortunately
+        // though we need the handle to be `'static` as that's typically what's
+        // required when spawning a thread!
+        //
+        // To work around this we transmute the `Sender` to a static lifetime.
+        // we're only sending "longer living" messages and we should also
+        // destroy all references to the channel before this function exits as
+        // the destructor for the `helper` object will ensure the associated
+        // thread is no longer running.
+        //
+        // As a result, this `transmute` to a longer lifetime should be safe in
+        // practice.
+        let tx = self.tx.clone();
+        let tx = unsafe { mem::transmute::<Sender<Message<'a>>, Sender<Message<'static>>>(tx) };
+        let tx2 = tx.clone();
+        let helper = cx.jobserver
+            .clone()
+            .into_helper_thread(move |token| {
+                drop(tx.send(Message::Token(token)));
+            })
+            .chain_err(|| "failed to create helper thread for jobserver management")?;
+        let _diagnostic_server = cx.bcx.build_config
+            .rustfix_diagnostic_server
+            .borrow_mut()
+            .take()
+            .map(move |srv| {
+                srv.start(move |msg| drop(tx2.send(Message::FixDiagnostic(msg))))
+            });
+
+        crossbeam_utils::thread::scope(|scope| self.drain_the_queue(cx, plan, scope, &helper))
+    }
+
+    fn drain_the_queue(
+        &mut self,
+        cx: &mut Context,
+        plan: &mut BuildPlan,
+        scope: &Scope<'a>,
+        jobserver_helper: &HelperThread,
+    ) -> CargoResult<()> {
+        let mut tokens = Vec::new();
+        let mut queue = Vec::new();
+        let build_plan = cx.bcx.build_config.build_plan;
+        let mut print = DiagnosticPrinter::new(cx.bcx.config);
+        trace!("queue: {:#?}", self.queue);
+
+        // Iteratively execute the entire dependency graph. Each turn of the
+        // loop starts out by scheduling as much work as possible (up to the
+        // maximum number of parallel jobs we have tokens for). A local queue
+        // is maintained separately from the main dependency queue as one
+        // dequeue may actually dequeue quite a bit of work (e.g. 10 binaries
+        // in one package).
+        //
+        // After a job has finished we update our internal state if it was
+        // successful and otherwise wait for pending work to finish if it failed
+        // and then immediately return.
+        let mut error = None;
+        let mut progress = Progress::with_style("Building", ProgressStyle::Ratio, cx.bcx.config);
+        let total = self.queue.len();
+        loop {
+            // Dequeue as much work as we can, learning about everything
+            // possible that can run. Note that this is also the point where we
+            // start requesting job tokens. Each job after the first needs to
+            // request a token.
+            while let Some((fresh, key, jobs)) = self.queue.dequeue() {
+                let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| f.combine(fresh));
+                self.pending.insert(
+                    key,
+                    PendingBuild {
+                        amt: jobs.len(),
+                        fresh: total_fresh,
+                    },
+                );
+                for (job, f) in jobs {
+                    queue.push((key, job, f.combine(fresh)));
+                    if !self.active.is_empty() || !queue.is_empty() {
+                        jobserver_helper.request_token();
+                    }
+                }
+            }
+
+            // Now that we've learned of all possible work that we can execute
+            // try to spawn it so long as we've got a jobserver token which says
+            // we're able to perform some parallel work.
+            while error.is_none() && self.active.len() < tokens.len() + 1 && !queue.is_empty() {
+                let (key, job, fresh) = queue.remove(0);
+                self.run(key, fresh, job, cx.bcx.config, scope, build_plan)?;
+            }
+
+            // If after all that we're not actually running anything then we're
+            // done!
+            if self.active.is_empty() {
+                break;
+            }
+
+            // And finally, before we block waiting for the next event, drop any
+            // excess tokens we may have accidentally acquired. Due to how our
+            // jobserver interface is architected we may acquire a token that we
+            // don't actually use, and if this happens just relinquish it back
+            // to the jobserver itself.
+            tokens.truncate(self.active.len() - 1);
+
+            let count = total - self.queue.len();
+            let active_names = self.active.iter()
+                .map(Key::name_for_progress)
+                .collect::<Vec<_>>();
+            drop(progress.tick_now(count, total, &format!(": {}", active_names.join(", "))));
+            let event = self.rx.recv().unwrap();
+            progress.clear();
+
+            match event {
+                Message::Run(cmd) => {
+                    cx.bcx
+                        .config
+                        .shell()
+                        .verbose(|c| c.status("Running", &cmd))?;
+                }
+                Message::BuildPlanMsg(module_name, cmd, filenames) => {
+                    plan.update(&module_name, &cmd, &filenames)?;
+                }
+                Message::Stdout(out) => {
+                    println!("{}", out);
+                }
+                Message::Stderr(err) => {
+                    let mut shell = cx.bcx.config.shell();
+                    shell.print_ansi(err.as_bytes())?;
+                    shell.err().write(b"\n")?;
+                }
+                Message::FixDiagnostic(msg) => {
+                    print.print(&msg)?;
+                }
+                Message::Finish(key, result) => {
+                    info!("end: {:?}", key);
+
+                    // self.active.remove_item(&key); // <- switch to this when stabilized.
+                    let pos = self
+                        .active
+                        .iter()
+                        .position(|k| *k == key)
+                        .expect("an unrecorded package has finished compiling");
+                    self.active.remove(pos);
+                    if !self.active.is_empty() {
+                        assert!(!tokens.is_empty());
+                        drop(tokens.pop());
+                    }
+                    match result {
+                        Ok(()) => self.finish(key, cx)?,
+                        Err(e) => {
+                            let msg = "The following warnings were emitted during compilation:";
+                            self.emit_warnings(Some(msg), &key, cx)?;
+
+                            if !self.active.is_empty() {
+                                error = Some(format_err!("build failed"));
+                                handle_error(&e, &mut *cx.bcx.config.shell());
+                                cx.bcx.config.shell().warn(
+                                    "build failed, waiting for other \
+                                     jobs to finish...",
+                                )?;
+                            } else {
+                                error = Some(e);
+                            }
+                        }
+                    }
+                }
+                Message::Token(acquired_token) => {
+                    tokens.push(acquired_token.chain_err(|| "failed to acquire jobserver token")?);
+                }
+            }
+        }
+        drop(progress);
+
+        let build_type = if self.is_release { "release" } else { "dev" };
+        // NOTE: This may be a bit inaccurate, since this may not display the
+        // profile for what was actually built.  Profile overrides can change
+        // these settings, and in some cases different targets are built with
+        // different profiles.  To be accurate, it would need to collect a
+        // list of Units built, and maybe display a list of the different
+        // profiles used.  However, to keep it simple and compatible with old
+        // behavior, we just display what the base profile is.
+        let profile = cx.bcx.profiles.base_profile(self.is_release);
+        let mut opt_type = String::from(if profile.opt_level.as_str() == "0" {
+            "unoptimized"
+        } else {
+            "optimized"
+        });
+        if profile.debuginfo.is_some() {
+            opt_type += " + debuginfo";
+        }
+
+        let time_elapsed = util::elapsed(cx.bcx.config.creation_time().elapsed());
+
+        if self.queue.is_empty() {
+            let message = format!(
+                "{} [{}] target(s) in {}",
+                build_type, opt_type, time_elapsed
+            );
+            if !build_plan {
+                cx.bcx.config.shell().status("Finished", message)?;
+            }
+            Ok(())
+        } else if let Some(e) = error {
+            Err(e)
+        } else {
+            debug!("queue: {:#?}", self.queue);
+            Err(internal("finished with jobs still left in the queue"))
+        }
+    }
+
+    /// Executes a job in the `scope` given, pushing the spawned thread's
+    /// handled onto `threads`.
+    fn run(
+        &mut self,
+        key: Key<'a>,
+        fresh: Freshness,
+        job: Job,
+        config: &Config,
+        scope: &Scope<'a>,
+        build_plan: bool,
+    ) -> CargoResult<()> {
+        info!("start: {:?}", key);
+
+        self.active.push(key);
+        *self.counts.get_mut(key.pkg).unwrap() -= 1;
+
+        let my_tx = self.tx.clone();
+        let doit = move || {
+            let res = job.run(fresh, &JobState { tx: my_tx.clone() });
+            my_tx.send(Message::Finish(key, res)).unwrap();
+        };
+        match fresh {
+            Freshness::Fresh => doit(),
+            Freshness::Dirty => {
+                scope.spawn(doit);
+            }
+        }
+
+        if !build_plan {
+            // Print out some nice progress information
+            self.note_working_on(config, &key, fresh)?;
+        }
+
+        Ok(())
+    }
+
+    fn emit_warnings(&self, msg: Option<&str>, key: &Key<'a>, cx: &mut Context) -> CargoResult<()> {
+        let output = cx.build_state.outputs.lock().unwrap();
+        let bcx = &mut cx.bcx;
+        if let Some(output) = output.get(&(key.pkg.clone(), key.kind)) {
+            if let Some(msg) = msg {
+                if !output.warnings.is_empty() {
+                    writeln!(bcx.config.shell().err(), "{}\n", msg)?;
+                }
+            }
+
+            for warning in output.warnings.iter() {
+                bcx.config.shell().warn(warning)?;
+            }
+
+            if !output.warnings.is_empty() && msg.is_some() {
+                // Output an empty line.
+                writeln!(bcx.config.shell().err())?;
+            }
+        }
+
+        Ok(())
+    }
+
+    fn finish(&mut self, key: Key<'a>, cx: &mut Context) -> CargoResult<()> {
+        if key.mode.is_run_custom_build() && cx.bcx.show_warnings(key.pkg) {
+            self.emit_warnings(None, &key, cx)?;
+        }
+
+        let state = self.pending.get_mut(&key).unwrap();
+        state.amt -= 1;
+        if state.amt == 0 {
+            self.queue.finish(&key, state.fresh);
+        }
+        Ok(())
+    }
+
+    // This isn't super trivial because we don't want to print loads and
+    // loads of information to the console, but we also want to produce a
+    // faithful representation of what's happening. This is somewhat nuanced
+    // as a package can start compiling *very* early on because of custom
+    // build commands and such.
+    //
+    // In general, we try to print "Compiling" for the first nontrivial task
+    // run for a package, regardless of when that is. We then don't print
+    // out any more information for a package after we've printed it once.
+    fn note_working_on(
+        &mut self,
+        config: &Config,
+        key: &Key<'a>,
+        fresh: Freshness,
+    ) -> CargoResult<()> {
+        if (self.compiled.contains(key.pkg) && !key.mode.is_doc())
+            || (self.documented.contains(key.pkg) && key.mode.is_doc())
+        {
+            return Ok(());
+        }
+
+        match fresh {
+            // Any dirty stage which runs at least one command gets printed as
+            // being a compiled package
+            Dirty => {
+                if key.mode.is_doc() {
+                    // Skip Doctest
+                    if !key.mode.is_any_test() {
+                        self.documented.insert(key.pkg);
+                        config.shell().status("Documenting", key.pkg)?;
+                    }
+                } else {
+                    self.compiled.insert(key.pkg);
+                    if key.mode.is_check() {
+                        config.shell().status("Checking", key.pkg)?;
+                    } else {
+                        config.shell().status("Compiling", key.pkg)?;
+                    }
+                }
+            }
+            Fresh => {
+                // If doctest is last, only print "Fresh" if nothing has been printed.
+                if self.counts[key.pkg] == 0
+                    && !(key.mode == CompileMode::Doctest && self.compiled.contains(key.pkg))
+                {
+                    self.compiled.insert(key.pkg);
+                    config.shell().verbose(|c| c.status("Fresh", key.pkg))?;
+                }
+            }
+        }
+        Ok(())
+    }
+}
+
+impl<'a> Key<'a> {
+    fn new(unit: &Unit<'a>) -> Key<'a> {
+        Key {
+            pkg: unit.pkg.package_id(),
+            target: unit.target,
+            profile: unit.profile,
+            kind: unit.kind,
+            mode: unit.mode,
+        }
+    }
+
+    fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) -> CargoResult<Vec<Key<'a>>> {
+        let unit = Unit {
+            pkg: cx.get_package(self.pkg)?,
+            target: self.target,
+            profile: self.profile,
+            kind: self.kind,
+            mode: self.mode,
+        };
+        let targets = cx.dep_targets(&unit);
+        Ok(targets
+            .iter()
+            .filter_map(|unit| {
+                // Binaries aren't actually needed to *compile* tests, just to run
+                // them, so we don't include this dependency edge in the job graph.
+                if self.target.is_test() && unit.target.is_bin() {
+                    None
+                } else {
+                    Some(Key::new(unit))
+                }
+            })
+            .collect())
+    }
+}
+
+impl<'a> fmt::Debug for Key<'a> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(
+            f,
+            "{} => {}/{} => {:?}",
+            self.pkg, self.target, self.profile, self.kind
+        )
+    }
+}
diff --git a/src/cargo/core/compiler/layout.rs b/src/cargo/core/compiler/layout.rs
new file mode 100644 (file)
index 0000000..d7a3e44
--- /dev/null
@@ -0,0 +1,205 @@
+//! Management of the directory layout of a build
+//!
+//! The directory layout is a little tricky at times, hence a separate file to
+//! house this logic. The current layout looks like this:
+//!
+//! ```ignore
+//! # This is the root directory for all output, the top-level package
+//! # places all of its output here.
+//! target/
+//!
+//!     # This is the root directory for all output of *dependencies*
+//!     deps/
+//!
+//!     # Root directory for all compiled examples
+//!     examples/
+//!
+//!     # This is the location at which the output of all custom build
+//!     # commands are rooted
+//!     build/
+//!
+//!         # Each package gets its own directory where its build script and
+//!         # script output are placed
+//!         $pkg1/
+//!         $pkg2/
+//!         $pkg3/
+//!
+//!             # Each directory package has a `out` directory where output
+//!             # is placed.
+//!             out/
+//!
+//!     # This is the location at which the output of all old custom build
+//!     # commands are rooted
+//!     native/
+//!
+//!         # Each package gets its own directory for where its output is
+//!         # placed. We can't track exactly what's getting put in here, so
+//!         # we just assume that all relevant output is in these
+//!         # directories.
+//!         $pkg1/
+//!         $pkg2/
+//!         $pkg3/
+//!
+//!     # Directory used to store incremental data for the compiler (when
+//!     # incremental is enabled.
+//!     incremental/
+//!
+//!     # Hidden directory that holds all of the fingerprint files for all
+//!     # packages
+//!     .fingerprint/
+//! ```
+
+use std::fs;
+use std::io;
+use std::path::{Path, PathBuf};
+
+use core::Workspace;
+use util::{CargoResult, Config, FileLock, Filesystem};
+
+/// Contains the paths of all target output locations.
+///
+/// See module docs for more information.
+pub struct Layout {
+    root: PathBuf,
+    deps: PathBuf,
+    native: PathBuf,
+    build: PathBuf,
+    incremental: PathBuf,
+    fingerprint: PathBuf,
+    examples: PathBuf,
+    /// The lockfile for a build, will be unlocked when this struct is `drop`ped.
+    _lock: FileLock,
+}
+
+pub fn is_bad_artifact_name(name: &str) -> bool {
+    ["deps", "examples", "build", "native", "incremental"]
+        .iter()
+        .any(|&reserved| reserved == name)
+}
+
+impl Layout {
+    /// Calculate the paths for build output, lock the build directory, and return as a Layout.
+    ///
+    /// This function will block if the directory is already locked.
+    ///
+    /// Differs from `at` in that this calculates the root path from the workspace target directory,
+    /// adding the target triple and the profile (debug, release, ...).
+    pub fn new(ws: &Workspace, triple: Option<&str>, dest: &str) -> CargoResult<Layout> {
+        let mut path = ws.target_dir();
+        // Flexible target specifications often point at filenames, so interpret
+        // the target triple as a Path and then just use the file stem as the
+        // component for the directory name.
+        if let Some(triple) = triple {
+            path.push(Path::new(triple)
+                .file_stem()
+                .ok_or_else(|| format_err!("invalid target"))?);
+        }
+        path.push(dest);
+        Layout::at(ws.config(), path)
+    }
+
+    /// Calculate the paths for build output, lock the build directory, and return as a Layout.
+    ///
+    /// This function will block if the directory is already locked.
+    pub fn at(config: &Config, root: Filesystem) -> CargoResult<Layout> {
+        // For now we don't do any more finer-grained locking on the artifact
+        // directory, so just lock the entire thing for the duration of this
+        // compile.
+        let lock = root.open_rw(".cargo-lock", config, "build directory")?;
+        let root = root.into_path_unlocked();
+
+        Ok(Layout {
+            deps: root.join("deps"),
+            native: root.join("native"),
+            build: root.join("build"),
+            incremental: root.join("incremental"),
+            fingerprint: root.join(".fingerprint"),
+            examples: root.join("examples"),
+            root,
+            _lock: lock,
+        })
+    }
+
+    #[cfg(not(target_os = "macos"))]
+    fn exclude_from_backups(&self, _: &Path) {}
+
+    #[cfg(target_os = "macos")]
+    /// Marks files or directories as excluded from Time Machine on macOS
+    ///
+    /// This is recommended to prevent derived/temporary files from bloating backups.
+    fn exclude_from_backups(&self, path: &Path) {
+        use std::ptr;
+        use core_foundation::{number, string, url};
+        use core_foundation::base::TCFType;
+
+        // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey
+        let is_excluded_key: Result<string::CFString, _> = "NSURLIsExcludedFromBackupKey".parse();
+        let path = url::CFURL::from_path(path, false);
+        if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) {
+            unsafe {
+                url::CFURLSetResourcePropertyForKey(
+                    path.as_concrete_TypeRef(),
+                    is_excluded_key.as_concrete_TypeRef(),
+                    number::kCFBooleanTrue as *const _,
+                    ptr::null_mut(),
+                );
+            }
+        }
+        // Errors are ignored, since it's an optional feature and failure
+        // doesn't prevent Cargo from working
+    }
+
+    /// Make sure all directories stored in the Layout exist on the filesystem.
+    pub fn prepare(&mut self) -> io::Result<()> {
+        if fs::metadata(&self.root).is_err() {
+            fs::create_dir_all(&self.root)?;
+        }
+
+        self.exclude_from_backups(&self.root);
+
+        mkdir(&self.deps)?;
+        mkdir(&self.native)?;
+        mkdir(&self.incremental)?;
+        mkdir(&self.fingerprint)?;
+        mkdir(&self.examples)?;
+        mkdir(&self.build)?;
+
+        return Ok(());
+
+        fn mkdir(dir: &Path) -> io::Result<()> {
+            if fs::metadata(&dir).is_err() {
+                fs::create_dir(dir)?;
+            }
+            Ok(())
+        }
+    }
+
+    /// Fetch the root path.
+    pub fn dest(&self) -> &Path {
+        &self.root
+    }
+    /// Fetch the deps path.
+    pub fn deps(&self) -> &Path {
+        &self.deps
+    }
+    /// Fetch the examples path.
+    pub fn examples(&self) -> &Path {
+        &self.examples
+    }
+    /// Fetch the root path.
+    pub fn root(&self) -> &Path {
+        &self.root
+    }
+    /// Fetch the incremental path.
+    pub fn incremental(&self) -> &Path {
+        &self.incremental
+    }
+    /// Fetch the fingerprint path.
+    pub fn fingerprint(&self) -> &Path {
+        &self.fingerprint
+    }
+    /// Fetch the build path.
+    pub fn build(&self) -> &Path {
+        &self.build
+    }
+}
diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs
new file mode 100644 (file)
index 0000000..59834c4
--- /dev/null
@@ -0,0 +1,1013 @@
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fs;
+use std::io::{self, Write};
+use std::path::{self, Path, PathBuf};
+use std::sync::Arc;
+
+use same_file::is_same_file;
+use serde_json;
+
+use core::manifest::TargetSourcePath;
+use core::profiles::{Lto, Profile};
+use core::{PackageId, Target};
+use util::errors::{CargoResult, CargoResultExt, Internal};
+use util::paths;
+use util::{self, machine_message, Freshness, ProcessBuilder, process};
+use util::{internal, join_paths, profile};
+
+use self::build_plan::BuildPlan;
+use self::job::{Job, Work};
+use self::job_queue::JobQueue;
+
+use self::output_depinfo::output_depinfo;
+
+pub use self::build_context::{BuildContext, FileFlavor, TargetConfig, TargetInfo};
+pub use self::build_config::{BuildConfig, CompileMode, MessageFormat};
+pub use self::compilation::{Compilation, Doctest};
+pub use self::context::{Context, Unit};
+pub use self::custom_build::{BuildMap, BuildOutput, BuildScripts};
+pub use self::layout::is_bad_artifact_name;
+
+mod build_config;
+mod build_context;
+mod build_plan;
+mod compilation;
+mod context;
+mod custom_build;
+mod fingerprint;
+mod job;
+mod job_queue;
+mod layout;
+mod output_depinfo;
+
+/// Whether an object is for the host arch, or the target arch.
+///
+/// These will be the same unless cross-compiling.
+#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord, Serialize)]
+pub enum Kind {
+    Host,
+    Target,
+}
+
+/// A glorified callback for executing calls to rustc. Rather than calling rustc
+/// directly, we'll use an Executor, giving clients an opportunity to intercept
+/// the build calls.
+pub trait Executor: Send + Sync + 'static {
+    /// Called after a rustc process invocation is prepared up-front for a given
+    /// unit of work (may still be modified for runtime-known dependencies, when
+    /// the work is actually executed).
+    fn init(&self, _cx: &Context, _unit: &Unit) {}
+
+    /// In case of an `Err`, Cargo will not continue with the build process for
+    /// this package.
+    fn exec(
+        &self,
+        cmd: ProcessBuilder,
+        _id: &PackageId,
+        _target: &Target,
+        _mode: CompileMode
+    ) -> CargoResult<()> {
+        cmd.exec()?;
+        Ok(())
+    }
+
+    fn exec_and_capture_output(
+        &self,
+        cmd: ProcessBuilder,
+        id: &PackageId,
+        target: &Target,
+        mode: CompileMode,
+        _state: &job_queue::JobState<'_>,
+    ) -> CargoResult<()> {
+        // we forward to exec() to keep RLS working.
+        self.exec(cmd, id, target, mode)
+    }
+
+    fn exec_json(
+        &self,
+        cmd: ProcessBuilder,
+        _id: &PackageId,
+        _target: &Target,
+        _mode: CompileMode,
+        handle_stdout: &mut FnMut(&str) -> CargoResult<()>,
+        handle_stderr: &mut FnMut(&str) -> CargoResult<()>,
+    ) -> CargoResult<()> {
+        cmd.exec_with_streaming(handle_stdout, handle_stderr, false)?;
+        Ok(())
+    }
+
+    /// Queried when queuing each unit of work. If it returns true, then the
+    /// unit will always be rebuilt, independent of whether it needs to be.
+    fn force_rebuild(&self, _unit: &Unit) -> bool {
+        false
+    }
+}
+
+/// A `DefaultExecutor` calls rustc without doing anything else. It is Cargo's
+/// default behaviour.
+#[derive(Copy, Clone)]
+pub struct DefaultExecutor;
+
+impl Executor for DefaultExecutor {
+    fn exec_and_capture_output(
+        &self,
+        cmd: ProcessBuilder,
+        _id: &PackageId,
+        _target: &Target,
+        _mode: CompileMode,
+        state: &job_queue::JobState<'_>,
+    ) -> CargoResult<()> {
+        state.capture_output(&cmd, None, false).map(drop)
+    }
+}
+
+fn compile<'a, 'cfg: 'a>(
+    cx: &mut Context<'a, 'cfg>,
+    jobs: &mut JobQueue<'a>,
+    plan: &mut BuildPlan,
+    unit: &Unit<'a>,
+    exec: &Arc<Executor>,
+    force_rebuild: bool,
+) -> CargoResult<()> {
+    let bcx = cx.bcx;
+    let build_plan = bcx.build_config.build_plan;
+    if !cx.compiled.insert(*unit) {
+        return Ok(());
+    }
+
+    // Build up the work to be done to compile this unit, enqueuing it once
+    // we've got everything constructed.
+    let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name()));
+    fingerprint::prepare_init(cx, unit)?;
+    cx.links.validate(bcx.resolve, unit)?;
+
+    let (dirty, fresh, freshness) = if unit.mode.is_run_custom_build() {
+        custom_build::prepare(cx, unit)?
+    } else if unit.mode == CompileMode::Doctest {
+        // we run these targets later, so this is just a noop for now
+        (Work::noop(), Work::noop(), Freshness::Fresh)
+    } else if build_plan {
+        (
+            rustc(cx, unit, &exec.clone())?,
+            Work::noop(),
+            Freshness::Dirty,
+        )
+    } else {
+        let (mut freshness, dirty, fresh) = fingerprint::prepare_target(cx, unit)?;
+        let work = if unit.mode.is_doc() {
+            rustdoc(cx, unit)?
+        } else {
+            rustc(cx, unit, exec)?
+        };
+        // Need to link targets on both the dirty and fresh
+        let dirty = work.then(link_targets(cx, unit, false)?).then(dirty);
+        let fresh = link_targets(cx, unit, true)?.then(fresh);
+
+        if exec.force_rebuild(unit) || force_rebuild {
+            freshness = Freshness::Dirty;
+        }
+
+        (dirty, fresh, freshness)
+    };
+    jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness)?;
+    drop(p);
+
+    // Be sure to compile all dependencies of this target as well.
+    for unit in cx.dep_targets(unit).iter() {
+        compile(cx, jobs, plan, unit, exec, false)?;
+    }
+    if build_plan {
+        plan.add(cx, unit)?;
+    }
+
+    Ok(())
+}
+
+fn rustc<'a, 'cfg>(
+    cx: &mut Context<'a, 'cfg>,
+    unit: &Unit<'a>,
+    exec: &Arc<Executor>,
+) -> CargoResult<Work> {
+    let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?;
+    if cx.is_primary_package(unit) {
+        rustc.env("CARGO_PRIMARY_PACKAGE", "1");
+    }
+    let build_plan = cx.bcx.build_config.build_plan;
+
+    let name = unit.pkg.name().to_string();
+    let buildkey = unit.buildkey();
+
+    add_cap_lints(cx.bcx, unit, &mut rustc);
+
+    let outputs = cx.outputs(unit)?;
+    let root = cx.files().out_dir(unit);
+    let kind = unit.kind;
+
+    // Prepare the native lib state (extra -L and -l flags)
+    let build_state = cx.build_state.clone();
+    let current_id = unit.pkg.package_id().clone();
+    let build_deps = load_build_deps(cx, unit);
+
+    // If we are a binary and the package also contains a library, then we
+    // don't pass the `-l` flags.
+    let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib());
+    let do_rename = unit.target.allows_underscores() && !unit.mode.is_any_test();
+    let real_name = unit.target.name().to_string();
+    let crate_name = unit.target.crate_name();
+
+    // XXX(Rely on target_filenames iterator as source of truth rather than rederiving filestem)
+    let rustc_dep_info_loc = if do_rename && cx.files().metadata(unit).is_none() {
+        root.join(&crate_name)
+    } else {
+        root.join(&cx.files().file_stem(unit))
+    }.with_extension("d");
+    let dep_info_loc = fingerprint::dep_info_loc(cx, unit);
+
+    rustc.args(&cx.bcx.rustflags_args(unit)?);
+    let json_messages = cx.bcx.build_config.json_messages();
+    let package_id = unit.pkg.package_id().clone();
+    let target = unit.target.clone();
+    let mode = unit.mode;
+
+    exec.init(cx, unit);
+    let exec = exec.clone();
+
+    let root_output = cx.files().target_root().to_path_buf();
+    let pkg_root = unit.pkg.root().to_path_buf();
+    let cwd = rustc
+        .get_cwd()
+        .unwrap_or_else(|| cx.bcx.config.cwd())
+        .to_path_buf();
+
+    return Ok(Work::new(move |state| {
+        // Only at runtime have we discovered what the extra -L and -l
+        // arguments are for native libraries, so we process those here. We
+        // also need to be sure to add any -L paths for our plugins to the
+        // dynamic library load path as a plugin's dynamic library may be
+        // located somewhere in there.
+        // Finally, if custom environment variables have been produced by
+        // previous build scripts, we include them in the rustc invocation.
+        if let Some(build_deps) = build_deps {
+            let build_state = build_state.outputs.lock().unwrap();
+            if !build_plan {
+                add_native_deps(
+                    &mut rustc,
+                    &build_state,
+                    &build_deps,
+                    pass_l_flag,
+                    &current_id,
+                )?;
+                add_plugin_deps(&mut rustc, &build_state, &build_deps, &root_output)?;
+            }
+            add_custom_env(&mut rustc, &build_state, &current_id, kind)?;
+        }
+
+        for output in outputs.iter() {
+            // If there is both an rmeta and rlib, rustc will prefer to use the
+            // rlib, even if it is older. Therefore, we must delete the rlib to
+            // force using the new rmeta.
+            if output.path.extension() == Some(OsStr::new("rmeta")) {
+                let dst = root.join(&output.path).with_extension("rlib");
+                if dst.exists() {
+                    paths::remove_file(&dst)?;
+                }
+            }
+        }
+
+        state.running(&rustc);
+        if json_messages {
+            exec.exec_json(
+                rustc,
+                &package_id,
+                &target,
+                mode,
+                &mut assert_is_empty,
+                &mut |line| json_stderr(line, &package_id, &target),
+            ).map_err(Internal::new)
+            .chain_err(|| format!("Could not compile `{}`.", name))?;
+        } else if build_plan {
+            state.build_plan(buildkey, rustc.clone(), outputs.clone());
+        } else {
+            exec.exec_and_capture_output(rustc, &package_id, &target, mode, state)
+                .map_err(Internal::new)
+                .chain_err(|| format!("Could not compile `{}`.", name))?;
+        }
+
+        if do_rename && real_name != crate_name {
+            let dst = &outputs[0].path;
+            let src = dst.with_file_name(
+                dst.file_name()
+                    .unwrap()
+                    .to_str()
+                    .unwrap()
+                    .replace(&real_name, &crate_name),
+            );
+            if src.exists() && src.file_name() != dst.file_name() {
+                fs::rename(&src, &dst)
+                    .chain_err(|| internal(format!("could not rename crate {:?}", src)))?;
+            }
+        }
+
+        if rustc_dep_info_loc.exists() {
+            fingerprint::translate_dep_info(&rustc_dep_info_loc, &dep_info_loc, &pkg_root, &cwd)
+                .chain_err(|| {
+                    internal(format!(
+                        "could not parse/generate dep info at: {}",
+                        rustc_dep_info_loc.display()
+                    ))
+                })?;
+        }
+
+        Ok(())
+    }));
+
+    // Add all relevant -L and -l flags from dependencies (now calculated and
+    // present in `state`) to the command provided
+    fn add_native_deps(
+        rustc: &mut ProcessBuilder,
+        build_state: &BuildMap,
+        build_scripts: &BuildScripts,
+        pass_l_flag: bool,
+        current_id: &PackageId,
+    ) -> CargoResult<()> {
+        for key in build_scripts.to_link.iter() {
+            let output = build_state.get(key).ok_or_else(|| {
+                internal(format!(
+                    "couldn't find build state for {}/{:?}",
+                    key.0, key.1
+                ))
+            })?;
+            for path in output.library_paths.iter() {
+                rustc.arg("-L").arg(path);
+            }
+            if key.0 == *current_id {
+                for cfg in &output.cfgs {
+                    rustc.arg("--cfg").arg(cfg);
+                }
+                if pass_l_flag {
+                    for name in output.library_links.iter() {
+                        rustc.arg("-l").arg(name);
+                    }
+                }
+            }
+        }
+        Ok(())
+    }
+
+    // Add all custom environment variables present in `state` (after they've
+    // been put there by one of the `build_scripts`) to the command provided.
+    fn add_custom_env(
+        rustc: &mut ProcessBuilder,
+        build_state: &BuildMap,
+        current_id: &PackageId,
+        kind: Kind,
+    ) -> CargoResult<()> {
+        let key = (current_id.clone(), kind);
+        if let Some(output) = build_state.get(&key) {
+            for &(ref name, ref value) in output.env.iter() {
+                rustc.env(name, value);
+            }
+        }
+        Ok(())
+    }
+}
+
+/// Link the compiled target (often of form `foo-{metadata_hash}`) to the
+/// final target. This must happen during both "Fresh" and "Compile"
+fn link_targets<'a, 'cfg>(
+    cx: &mut Context<'a, 'cfg>,
+    unit: &Unit<'a>,
+    fresh: bool,
+) -> CargoResult<Work> {
+    let bcx = cx.bcx;
+    let outputs = cx.outputs(unit)?;
+    let export_dir = cx.files().export_dir();
+    let package_id = unit.pkg.package_id().clone();
+    let profile = unit.profile;
+    let unit_mode = unit.mode;
+    let features = bcx.resolve
+        .features_sorted(&package_id)
+        .into_iter()
+        .map(|s| s.to_owned())
+        .collect();
+    let json_messages = bcx.build_config.json_messages();
+    let mut target = unit.target.clone();
+    if let TargetSourcePath::Metabuild = target.src_path() {
+        // Give it something to serialize.
+        let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir());
+        target.set_src_path(TargetSourcePath::Path(path));
+    }
+
+    Ok(Work::new(move |_| {
+        // If we're a "root crate", e.g. the target of this compilation, then we
+        // hard link our outputs out of the `deps` directory into the directory
+        // above. This means that `cargo build` will produce binaries in
+        // `target/debug` which one probably expects.
+        let mut destinations = vec![];
+        for output in outputs.iter() {
+            let src = &output.path;
+            // This may have been a `cargo rustc` command which changes the
+            // output, so the source may not actually exist.
+            if !src.exists() {
+                continue;
+            }
+            let dst = match output.hardlink.as_ref() {
+                Some(dst) => dst,
+                None => {
+                    destinations.push(src.display().to_string());
+                    continue;
+                }
+            };
+            destinations.push(dst.display().to_string());
+            hardlink_or_copy(src, dst)?;
+            if let Some(ref path) = export_dir {
+                if !path.exists() {
+                    fs::create_dir_all(path)?;
+                }
+
+                hardlink_or_copy(src, &path.join(dst.file_name().unwrap()))?;
+            }
+        }
+
+        if json_messages {
+            let art_profile = machine_message::ArtifactProfile {
+                opt_level: profile.opt_level.as_str(),
+                debuginfo: profile.debuginfo,
+                debug_assertions: profile.debug_assertions,
+                overflow_checks: profile.overflow_checks,
+                test: unit_mode.is_any_test(),
+            };
+
+            machine_message::emit(&machine_message::Artifact {
+                package_id: &package_id,
+                target: &target,
+                profile: art_profile,
+                features,
+                filenames: destinations,
+                fresh,
+            });
+        }
+        Ok(())
+    }))
+}
+
+fn hardlink_or_copy(src: &Path, dst: &Path) -> CargoResult<()> {
+    debug!("linking {} to {}", src.display(), dst.display());
+    if is_same_file(src, dst).unwrap_or(false) {
+        return Ok(());
+    }
+    if dst.exists() {
+        paths::remove_file(&dst)?;
+    }
+
+    let link_result = if src.is_dir() {
+        #[cfg(target_os = "redox")]
+        use std::os::redox::fs::symlink;
+        #[cfg(unix)]
+        use std::os::unix::fs::symlink;
+        #[cfg(windows)]
+        use std::os::windows::fs::symlink_dir as symlink;
+
+        let dst_dir = dst.parent().unwrap();
+        let src = if src.starts_with(dst_dir) {
+            src.strip_prefix(dst_dir).unwrap()
+        } else {
+            src
+        };
+        symlink(src, dst)
+    } else {
+        fs::hard_link(src, dst)
+    };
+    link_result
+        .or_else(|err| {
+            debug!("link failed {}. falling back to fs::copy", err);
+            fs::copy(src, dst).map(|_| ())
+        })
+        .chain_err(|| {
+            format!(
+                "failed to link or copy `{}` to `{}`",
+                src.display(),
+                dst.display()
+            )
+        })?;
+    Ok(())
+}
+
+fn load_build_deps(cx: &Context, unit: &Unit) -> Option<Arc<BuildScripts>> {
+    cx.build_scripts.get(unit).cloned()
+}
+
+// For all plugin dependencies, add their -L paths (now calculated and
+// present in `state`) to the dynamic library load path for the command to
+// execute.
+fn add_plugin_deps(
+    rustc: &mut ProcessBuilder,
+    build_state: &BuildMap,
+    build_scripts: &BuildScripts,
+    root_output: &PathBuf,
+) -> CargoResult<()> {
+    let var = util::dylib_path_envvar();
+    let search_path = rustc.get_env(var).unwrap_or_default();
+    let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>();
+    for id in build_scripts.plugins.iter() {
+        let key = (id.clone(), Kind::Host);
+        let output = build_state
+            .get(&key)
+            .ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", id)))?;
+        search_path.append(&mut filter_dynamic_search_path(
+            output.library_paths.iter(),
+            root_output,
+        ));
+    }
+    let search_path = join_paths(&search_path, var)?;
+    rustc.env(var, &search_path);
+    Ok(())
+}
+
+// Determine paths to add to the dynamic search path from -L entries
+//
+// Strip off prefixes like "native=" or "framework=" and filter out directories
+// *not* inside our output directory since they are likely spurious and can cause
+// clashes with system shared libraries (issue #3366).
+fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &PathBuf) -> Vec<PathBuf>
+where
+    I: Iterator<Item = &'a PathBuf>,
+{
+    let mut search_path = vec![];
+    for dir in paths {
+        let dir = match dir.to_str() {
+            Some(s) => {
+                let mut parts = s.splitn(2, '=');
+                match (parts.next(), parts.next()) {
+                    (Some("native"), Some(path))
+                    | (Some("crate"), Some(path))
+                    | (Some("dependency"), Some(path))
+                    | (Some("framework"), Some(path))
+                    | (Some("all"), Some(path)) => path.into(),
+                    _ => dir.clone(),
+                }
+            }
+            None => dir.clone(),
+        };
+        if dir.starts_with(&root_output) {
+            search_path.push(dir);
+        } else {
+            debug!(
+                "Not including path {} in runtime library search path because it is \
+                 outside target root {}",
+                dir.display(),
+                root_output.display()
+            );
+        }
+    }
+    search_path
+}
+
+fn prepare_rustc<'a, 'cfg>(
+    cx: &mut Context<'a, 'cfg>,
+    crate_types: &[&str],
+    unit: &Unit<'a>,
+) -> CargoResult<ProcessBuilder> {
+    let mut base = cx.compilation.rustc_process(unit.pkg, unit.target)?;
+    base.inherit_jobserver(&cx.jobserver);
+    build_base_args(cx, &mut base, unit, crate_types)?;
+    build_deps_args(&mut base, cx, unit)?;
+    Ok(base)
+}
+
+fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<Work> {
+    let bcx = cx.bcx;
+    let mut rustdoc = cx.compilation.rustdoc_process(unit.pkg, unit.target)?;
+    rustdoc.inherit_jobserver(&cx.jobserver);
+    rustdoc.arg("--crate-name").arg(&unit.target.crate_name());
+    add_path_args(bcx, unit, &mut rustdoc);
+    add_cap_lints(bcx, unit, &mut rustdoc);
+
+    let mut can_add_color_process = process(&*bcx.config.rustdoc()?);
+    can_add_color_process.args(&["--color", "never", "-V"]);
+    if bcx.rustc.cached_success(&can_add_color_process)? {
+        add_color(bcx, &mut rustdoc);
+    }
+
+    if unit.kind != Kind::Host {
+        if let Some(ref target) = bcx.build_config.requested_target {
+            rustdoc.arg("--target").arg(target);
+        }
+    }
+
+    let doc_dir = cx.files().out_dir(unit);
+
+    // Create the documentation directory ahead of time as rustdoc currently has
+    // a bug where concurrent invocations will race to create this directory if
+    // it doesn't already exist.
+    fs::create_dir_all(&doc_dir)?;
+
+    rustdoc.arg("-o").arg(doc_dir);
+
+    for feat in bcx.resolve.features_sorted(unit.pkg.package_id()) {
+        rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat));
+    }
+
+    add_error_format(bcx, &mut rustdoc);
+
+    if let Some(ref args) = bcx.extra_args_for(unit) {
+        rustdoc.args(args);
+    }
+
+    build_deps_args(&mut rustdoc, cx, unit)?;
+
+    rustdoc.args(&bcx.rustdocflags_args(unit)?);
+
+    let name = unit.pkg.name().to_string();
+    let build_state = cx.build_state.clone();
+    let key = (unit.pkg.package_id().clone(), unit.kind);
+    let json_messages = bcx.build_config.json_messages();
+    let package_id = unit.pkg.package_id().clone();
+    let target = unit.target.clone();
+
+    Ok(Work::new(move |state| {
+        if let Some(output) = build_state.outputs.lock().unwrap().get(&key) {
+            for cfg in output.cfgs.iter() {
+                rustdoc.arg("--cfg").arg(cfg);
+            }
+            for &(ref name, ref value) in output.env.iter() {
+                rustdoc.env(name, value);
+            }
+        }
+        state.running(&rustdoc);
+
+        let exec_result = if json_messages {
+            rustdoc
+                .exec_with_streaming(
+                    &mut assert_is_empty,
+                    &mut |line| json_stderr(line, &package_id, &target),
+                    false,
+                ).map(drop)
+        } else {
+            state.capture_output(&rustdoc, None, false).map(drop)
+        };
+        exec_result.chain_err(|| format!("Could not document `{}`.", name))?;
+        Ok(())
+    }))
+}
+
+// The path that we pass to rustc is actually fairly important because it will
+// show up in error messages (important for readability), debug information
+// (important for caching), etc. As a result we need to be pretty careful how we
+// actually invoke rustc.
+//
+// In general users don't expect `cargo build` to cause rebuilds if you change
+// directories. That could be if you just change directories in the package or
+// if you literally move the whole package wholesale to a new directory. As a
+// result we mostly don't factor in `cwd` to this calculation. Instead we try to
+// track the workspace as much as possible and we update the current directory
+// of rustc/rustdoc where appropriate.
+//
+// The first returned value here is the argument to pass to rustc, and the
+// second is the cwd that rustc should operate in.
+fn path_args(bcx: &BuildContext, unit: &Unit) -> (PathBuf, PathBuf) {
+    let ws_root = bcx.ws.root();
+    let src = if unit.target.is_custom_build() && unit.pkg.manifest().metabuild().is_some() {
+        unit.pkg.manifest().metabuild_path(bcx.ws.target_dir())
+    } else {
+        unit.target.src_path().path().to_path_buf()
+    };
+    assert!(src.is_absolute());
+    if unit.pkg.package_id().source_id().is_path() {
+        if let Ok(path) = src.strip_prefix(ws_root) {
+            return (path.to_path_buf(), ws_root.to_path_buf());
+        }
+    }
+    (src, unit.pkg.root().to_path_buf())
+}
+
+fn add_path_args(bcx: &BuildContext, unit: &Unit, cmd: &mut ProcessBuilder) {
+    let (arg, cwd) = path_args(bcx, unit);
+    cmd.arg(arg);
+    cmd.cwd(cwd);
+}
+
+fn add_cap_lints(bcx: &BuildContext, unit: &Unit, cmd: &mut ProcessBuilder) {
+    // If this is an upstream dep we don't want warnings from, turn off all
+    // lints.
+    if !bcx.show_warnings(unit.pkg.package_id()) {
+        cmd.arg("--cap-lints").arg("allow");
+
+    // If this is an upstream dep but we *do* want warnings, make sure that they
+    // don't fail compilation.
+    } else if !unit.pkg.package_id().source_id().is_path() {
+        cmd.arg("--cap-lints").arg("warn");
+    }
+}
+
+fn add_color(bcx: &BuildContext, cmd: &mut ProcessBuilder) {
+    let shell = bcx.config.shell();
+    let color = if shell.supports_color() { "always" } else { "never" };
+    cmd.args(&["--color", color]);
+}
+
+fn add_error_format(bcx: &BuildContext, cmd: &mut ProcessBuilder) {
+    match bcx.build_config.message_format {
+        MessageFormat::Human => (),
+        MessageFormat::Json => { cmd.arg("--error-format").arg("json"); },
+        MessageFormat::Short => { cmd.arg("--error-format").arg("short"); },
+    }
+}
+
+fn build_base_args<'a, 'cfg>(
+    cx: &mut Context<'a, 'cfg>,
+    cmd: &mut ProcessBuilder,
+    unit: &Unit<'a>,
+    crate_types: &[&str],
+) -> CargoResult<()> {
+    assert!(!unit.mode.is_run_custom_build());
+
+    let bcx = cx.bcx;
+    let Profile {
+        ref opt_level,
+        ref lto,
+        codegen_units,
+        debuginfo,
+        debug_assertions,
+        overflow_checks,
+        rpath,
+        ref panic,
+        ..
+    } = unit.profile;
+    let test = unit.mode.is_any_test();
+
+    cmd.arg("--crate-name").arg(&unit.target.crate_name());
+
+    add_path_args(bcx, unit, cmd);
+    add_color(bcx, cmd);
+    add_error_format(bcx, cmd);
+
+    if !test {
+        for crate_type in crate_types.iter() {
+            cmd.arg("--crate-type").arg(crate_type);
+        }
+    }
+
+    if unit.mode.is_check() {
+        cmd.arg("--emit=dep-info,metadata");
+    } else {
+        cmd.arg("--emit=dep-info,link");
+    }
+
+    let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build())
+        || (crate_types.contains(&"dylib") && bcx.ws.members().any(|p| p != unit.pkg));
+    if prefer_dynamic {
+        cmd.arg("-C").arg("prefer-dynamic");
+    }
+
+    if opt_level.as_str() != "0" {
+        cmd.arg("-C").arg(&format!("opt-level={}", opt_level));
+    }
+
+    if let Some(panic) = panic.as_ref() {
+        cmd.arg("-C").arg(format!("panic={}", panic));
+    }
+
+    // Disable LTO for host builds as prefer_dynamic and it are mutually
+    // exclusive.
+    if unit.target.can_lto() && !unit.target.for_host() {
+        match *lto {
+            Lto::Bool(false) => {}
+            Lto::Bool(true) => {
+                cmd.args(&["-C", "lto"]);
+            }
+            Lto::Named(ref s) => {
+                cmd.arg("-C").arg(format!("lto={}", s));
+            }
+        }
+    }
+
+    if let Some(n) = codegen_units {
+        // There are some restrictions with LTO and codegen-units, so we
+        // only add codegen units when LTO is not used.
+        cmd.arg("-C").arg(&format!("codegen-units={}", n));
+    }
+
+    if let Some(debuginfo) = debuginfo {
+        cmd.arg("-C").arg(format!("debuginfo={}", debuginfo));
+    }
+
+    if let Some(ref args) = bcx.extra_args_for(unit) {
+        cmd.args(args);
+    }
+
+    // -C overflow-checks is implied by the setting of -C debug-assertions,
+    // so we only need to provide -C overflow-checks if it differs from
+    // the value of -C debug-assertions we would provide.
+    if opt_level.as_str() != "0" {
+        if debug_assertions {
+            cmd.args(&["-C", "debug-assertions=on"]);
+            if !overflow_checks {
+                cmd.args(&["-C", "overflow-checks=off"]);
+            }
+        } else if overflow_checks {
+            cmd.args(&["-C", "overflow-checks=on"]);
+        }
+    } else if !debug_assertions {
+        cmd.args(&["-C", "debug-assertions=off"]);
+        if overflow_checks {
+            cmd.args(&["-C", "overflow-checks=on"]);
+        }
+    } else if !overflow_checks {
+        cmd.args(&["-C", "overflow-checks=off"]);
+    }
+
+    if test && unit.target.harness() {
+        cmd.arg("--test");
+    } else if test {
+        cmd.arg("--cfg").arg("test");
+    }
+
+    // We ideally want deterministic invocations of rustc to ensure that
+    // rustc-caching strategies like sccache are able to cache more, so sort the
+    // feature list here.
+    for feat in bcx.resolve.features_sorted(unit.pkg.package_id()) {
+        cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat));
+    }
+
+    match cx.files().metadata(unit) {
+        Some(m) => {
+            cmd.arg("-C").arg(&format!("metadata={}", m));
+            cmd.arg("-C").arg(&format!("extra-filename=-{}", m));
+        }
+        None => {
+            cmd.arg("-C")
+                .arg(&format!("metadata={}", cx.files().target_short_hash(unit)));
+        }
+    }
+
+    if rpath {
+        cmd.arg("-C").arg("rpath");
+    }
+
+    cmd.arg("--out-dir").arg(&cx.files().out_dir(unit));
+
+    fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) {
+        if let Some(val) = val {
+            let mut joined = OsString::from(prefix);
+            joined.push(val);
+            cmd.arg(key).arg(joined);
+        }
+    }
+
+    if unit.kind == Kind::Target {
+        opt(
+            cmd,
+            "--target",
+            "",
+            bcx.build_config
+                .requested_target
+                .as_ref()
+                .map(|s| s.as_ref()),
+        );
+    }
+
+    opt(cmd, "-C", "ar=", bcx.ar(unit.kind).map(|s| s.as_ref()));
+    opt(
+        cmd,
+        "-C",
+        "linker=",
+        bcx.linker(unit.kind).map(|s| s.as_ref()),
+    );
+    cmd.args(&cx.incremental_args(unit)?);
+
+    Ok(())
+}
+
+fn build_deps_args<'a, 'cfg>(
+    cmd: &mut ProcessBuilder,
+    cx: &mut Context<'a, 'cfg>,
+    unit: &Unit<'a>,
+) -> CargoResult<()> {
+    let bcx = cx.bcx;
+    cmd.arg("-L").arg(&{
+        let mut deps = OsString::from("dependency=");
+        deps.push(cx.files().deps_dir(unit));
+        deps
+    });
+
+    // Be sure that the host path is also listed. This'll ensure that proc-macro
+    // dependencies are correctly found (for reexported macros).
+    if let Kind::Target = unit.kind {
+        cmd.arg("-L").arg(&{
+            let mut deps = OsString::from("dependency=");
+            deps.push(cx.files().host_deps());
+            deps
+        });
+    }
+
+    let dep_targets = cx.dep_targets(unit);
+
+    // If there is not one linkable target but should, rustc fails later
+    // on if there is an `extern crate` for it. This may turn into a hard
+    // error in the future, see PR #4797
+    if !dep_targets
+        .iter()
+        .any(|u| !u.mode.is_doc() && u.target.linkable())
+    {
+        if let Some(u) = dep_targets
+            .iter()
+            .find(|u| !u.mode.is_doc() && u.target.is_lib())
+        {
+            bcx.config.shell().warn(format!(
+                "The package `{}` \
+                 provides no linkable target. The compiler might raise an error while compiling \
+                 `{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \
+                 Cargo.toml. This warning might turn into a hard error in the future.",
+                u.target.crate_name(),
+                unit.target.crate_name(),
+                u.target.crate_name()
+            ))?;
+        }
+    }
+
+    for dep in dep_targets {
+        if dep.mode.is_run_custom_build() {
+            cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep));
+        }
+        if dep.target.linkable() && !dep.mode.is_doc() {
+            link_to(cmd, cx, unit, &dep)?;
+        }
+    }
+
+    return Ok(());
+
+    fn link_to<'a, 'cfg>(
+        cmd: &mut ProcessBuilder,
+        cx: &mut Context<'a, 'cfg>,
+        current: &Unit<'a>,
+        dep: &Unit<'a>,
+    ) -> CargoResult<()> {
+        let bcx = cx.bcx;
+        for output in cx.outputs(dep)?.iter() {
+            if output.flavor != FileFlavor::Linkable {
+                continue;
+            }
+            let mut v = OsString::new();
+            let name = bcx.extern_crate_name(current, dep)?;
+            v.push(name);
+            v.push("=");
+            v.push(cx.files().out_dir(dep));
+            v.push(&path::MAIN_SEPARATOR.to_string());
+            v.push(&output.path.file_name().unwrap());
+            cmd.arg("--extern").arg(&v);
+        }
+        Ok(())
+    }
+}
+
+fn envify(s: &str) -> String {
+    s.chars()
+        .flat_map(|c| c.to_uppercase())
+        .map(|c| if c == '-' { '_' } else { c })
+        .collect()
+}
+
+impl Kind {
+    fn for_target(self, target: &Target) -> Kind {
+        // Once we start compiling for the `Host` kind we continue doing so, but
+        // if we are a `Target` kind and then we start compiling for a target
+        // that needs to be on the host we lift ourselves up to `Host`
+        match self {
+            Kind::Host => Kind::Host,
+            Kind::Target if target.for_host() => Kind::Host,
+            Kind::Target => Kind::Target,
+        }
+    }
+}
+
+fn assert_is_empty(line: &str) -> CargoResult<()> {
+    if !line.is_empty() {
+        Err(internal(&format!(
+            "compiler stdout is not empty: `{}`",
+            line
+        )))
+    } else {
+        Ok(())
+    }
+}
+
+fn json_stderr(line: &str, package_id: &PackageId, target: &Target) -> CargoResult<()> {
+    // stderr from rustc/rustdoc can have a mix of JSON and non-JSON output
+    if line.starts_with('{') {
+        // Handle JSON lines
+        let compiler_message = serde_json::from_str(line)
+            .map_err(|_| internal(&format!("compiler produced invalid json: `{}`", line)))?;
+
+        machine_message::emit(&machine_message::FromCompiler {
+            package_id,
+            target,
+            message: compiler_message,
+        });
+    } else {
+        // Forward non-JSON to stderr
+        writeln!(io::stderr(), "{}", line)?;
+    }
+    Ok(())
+}
diff --git a/src/cargo/core/compiler/output_depinfo.rs b/src/cargo/core/compiler/output_depinfo.rs
new file mode 100644 (file)
index 0000000..df4f3d4
--- /dev/null
@@ -0,0 +1,125 @@
+use std::collections::{BTreeSet, HashSet};
+use std::fs::File;
+use std::io::{BufWriter, Write};
+use std::path::{Path, PathBuf};
+
+use super::{fingerprint, Context, Unit};
+use util::paths;
+use util::{internal, CargoResult};
+
+fn render_filename<P: AsRef<Path>>(path: P, basedir: Option<&str>) -> CargoResult<String> {
+    let path = path.as_ref();
+    let relpath = match basedir {
+        None => path,
+        Some(base) => match path.strip_prefix(base) {
+            Ok(relpath) => relpath,
+            _ => path,
+        },
+    };
+    relpath
+        .to_str()
+        .ok_or_else(|| internal("path not utf-8"))
+        .map(|f| f.replace(" ", "\\ "))
+}
+
+fn add_deps_for_unit<'a, 'b>(
+    deps: &mut BTreeSet<PathBuf>,
+    context: &mut Context<'a, 'b>,
+    unit: &Unit<'a>,
+    visited: &mut HashSet<Unit<'a>>,
+) -> CargoResult<()> {
+    if !visited.insert(*unit) {
+        return Ok(());
+    }
+
+    // units representing the execution of a build script don't actually
+    // generate a dep info file, so we just keep on going below
+    if !unit.mode.is_run_custom_build() {
+        // Add dependencies from rustc dep-info output (stored in fingerprint directory)
+        let dep_info_loc = fingerprint::dep_info_loc(context, unit);
+        if let Some(paths) = fingerprint::parse_dep_info(unit.pkg, &dep_info_loc)? {
+            for path in paths {
+                deps.insert(path);
+            }
+        } else {
+            debug!(
+                "can't find dep_info for {:?} {}",
+                unit.pkg.package_id(),
+                unit.target
+            );
+            return Err(internal("dep_info missing"));
+        }
+    }
+
+    // Add rerun-if-changed dependencies
+    let key = (unit.pkg.package_id().clone(), unit.kind);
+    if let Some(output) = context.build_state.outputs.lock().unwrap().get(&key) {
+        for path in &output.rerun_if_changed {
+            deps.insert(path.into());
+        }
+    }
+
+    // Recursively traverse all transitive dependencies
+    for dep_unit in context.dep_targets(unit).iter() {
+        let source_id = dep_unit.pkg.package_id().source_id();
+        if source_id.is_path() {
+            add_deps_for_unit(deps, context, dep_unit, visited)?;
+        }
+    }
+    Ok(())
+}
+
+pub fn output_depinfo<'a, 'b>(cx: &mut Context<'a, 'b>, unit: &Unit<'a>) -> CargoResult<()> {
+    let bcx = cx.bcx;
+    let mut deps = BTreeSet::new();
+    let mut visited = HashSet::new();
+    let success = add_deps_for_unit(&mut deps, cx, unit, &mut visited).is_ok();
+    let basedir_string;
+    let basedir = match bcx.config.get_path("build.dep-info-basedir")? {
+        Some(value) => {
+            basedir_string = value
+                .val
+                .as_os_str()
+                .to_str()
+                .ok_or_else(|| internal("build.dep-info-basedir path not utf-8"))?
+                .to_string();
+            Some(basedir_string.as_str())
+        }
+        None => None,
+    };
+    let deps = deps.iter()
+        .map(|f| render_filename(f, basedir))
+        .collect::<CargoResult<Vec<_>>>()?;
+
+    for output in cx.outputs(unit)?.iter() {
+        if let Some(ref link_dst) = output.hardlink {
+            let output_path = link_dst.with_extension("d");
+            if success {
+                let target_fn = render_filename(link_dst, basedir)?;
+
+                // If nothing changed don't recreate the file which could alter
+                // its mtime
+                if let Ok(previous) = fingerprint::parse_rustc_dep_info(&output_path) {
+                    if previous.len() == 1 && previous[0].0 == target_fn && previous[0].1 == deps {
+                        continue;
+                    }
+                }
+
+                // Otherwise write it all out
+                let mut outfile = BufWriter::new(File::create(output_path)?);
+                write!(outfile, "{}:", target_fn)?;
+                for dep in &deps {
+                    write!(outfile, " {}", dep)?;
+                }
+                writeln!(outfile)?;
+
+            // dep-info generation failed, so delete output file. This will
+            // usually cause the build system to always rerun the build
+            // rule, which is correct if inefficient.
+            } else if output_path.exists() {
+                paths::remove_file(output_path)?;
+            }
+        }
+    }
+    Ok(())
+}
diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs
new file mode 100644 (file)
index 0000000..f96c206
--- /dev/null
@@ -0,0 +1,466 @@
+use std::fmt;
+use std::rc::Rc;
+use std::str::FromStr;
+
+use semver::VersionReq;
+use semver::ReqParseError;
+use serde::ser;
+
+use core::{PackageId, SourceId, Summary};
+use core::interning::InternedString;
+use util::{Cfg, CfgExpr, Config};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
+
+/// Information about a dependency requested by a Cargo manifest.
+/// Cheap to copy.
+#[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug)]
+pub struct Dependency {
+    inner: Rc<Inner>,
+}
+
+/// The data underlying a Dependency.
+#[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug)]
+struct Inner {
+    name: InternedString,
+    source_id: SourceId,
+    registry_id: Option<SourceId>,
+    req: VersionReq,
+    specified_req: bool,
+    kind: Kind,
+    only_match_name: bool,
+    explicit_name_in_toml: Option<InternedString>,
+
+    optional: bool,
+    default_features: bool,
+    features: Vec<InternedString>,
+
+    // This dependency should be used only for this platform.
+    // `None` means *all platforms*.
+    platform: Option<Platform>,
+}
+
+#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)]
+pub enum Platform {
+    Name(String),
+    Cfg(CfgExpr),
+}
+
+#[derive(Serialize)]
+struct SerializedDependency<'a> {
+    name: &'a str,
+    source: &'a SourceId,
+    req: String,
+    kind: Kind,
+    rename: Option<&'a str>,
+
+    optional: bool,
+    uses_default_features: bool,
+    features: &'a [InternedString],
+    target: Option<&'a Platform>,
+}
+
+impl ser::Serialize for Dependency {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        SerializedDependency {
+            name: &*self.package_name(),
+            source: self.source_id(),
+            req: self.version_req().to_string(),
+            kind: self.kind(),
+            optional: self.is_optional(),
+            uses_default_features: self.uses_default_features(),
+            features: self.features(),
+            target: self.platform(),
+            rename: self.explicit_name_in_toml().map(|s| s.as_str()),
+        }.serialize(s)
+    }
+}
+
+#[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug, Copy)]
+pub enum Kind {
+    Normal,
+    Development,
+    Build,
+}
+
+fn parse_req_with_deprecated(
+    name: &str,
+    req: &str,
+    extra: Option<(&PackageId, &Config)>,
+) -> CargoResult<VersionReq> {
+    match VersionReq::parse(req) {
+        Err(ReqParseError::DeprecatedVersionRequirement(requirement)) => {
+            let (inside, config) = match extra {
+                Some(pair) => pair,
+                None => return Err(ReqParseError::DeprecatedVersionRequirement(requirement).into()),
+            };
+            let msg = format!(
+                "\
+parsed version requirement `{}` is no longer valid
+
+Previous versions of Cargo accepted this malformed requirement,
+but it is being deprecated. This was found when parsing the manifest
+of {} {}, and the correct version requirement is `{}`.
+
+This will soon become a hard error, so it's either recommended to
+update to a fixed version or contact the upstream maintainer about
+this warning.
+",
+                req,
+                inside.name(),
+                inside.version(),
+                requirement
+            );
+            config.shell().warn(&msg)?;
+
+            Ok(requirement)
+        },
+        Err(e) => {
+            let err: CargoResult<VersionReq> = Err(e.into());
+            let v: VersionReq = err.chain_err(|| {
+                format!(
+                    "failed to parse the version requirement `{}` for dependency `{}`",
+                    req, name
+                )
+            })?;
+            Ok(v)
+        },
+        Ok(v) => Ok(v),
+    }
+}
+
+impl ser::Serialize for Kind {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        match *self {
+            Kind::Normal => None,
+            Kind::Development => Some("dev"),
+            Kind::Build => Some("build"),
+        }.serialize(s)
+    }
+}
+
+impl Dependency {
+    /// Attempt to create a `Dependency` from an entry in the manifest.
+    pub fn parse(
+        name: &str,
+        version: Option<&str>,
+        source_id: &SourceId,
+        inside: &PackageId,
+        config: &Config,
+    ) -> CargoResult<Dependency> {
+        let arg = Some((inside, config));
+        let (specified_req, version_req) = match version {
+            Some(v) => (true, parse_req_with_deprecated(name, v, arg)?),
+            None => (false, VersionReq::any()),
+        };
+
+        let mut ret = Dependency::new_override(name, source_id);
+        {
+            let ptr = Rc::make_mut(&mut ret.inner);
+            ptr.only_match_name = false;
+            ptr.req = version_req;
+            ptr.specified_req = specified_req;
+        }
+        Ok(ret)
+    }
+
+    /// Attempt to create a `Dependency` from an entry in the manifest.
+    pub fn parse_no_deprecated(
+        name: &str,
+        version: Option<&str>,
+        source_id: &SourceId,
+    ) -> CargoResult<Dependency> {
+        let (specified_req, version_req) = match version {
+            Some(v) => (true, parse_req_with_deprecated(name, v, None)?),
+            None => (false, VersionReq::any()),
+        };
+
+        let mut ret = Dependency::new_override(name, source_id);
+        {
+            let ptr = Rc::make_mut(&mut ret.inner);
+            ptr.only_match_name = false;
+            ptr.req = version_req;
+            ptr.specified_req = specified_req;
+        }
+        Ok(ret)
+    }
+
+    pub fn new_override(name: &str, source_id: &SourceId) -> Dependency {
+        assert!(!name.is_empty());
+        Dependency {
+            inner: Rc::new(Inner {
+                name: InternedString::new(name),
+                source_id: source_id.clone(),
+                registry_id: None,
+                req: VersionReq::any(),
+                kind: Kind::Normal,
+                only_match_name: true,
+                optional: false,
+                features: Vec::new(),
+                default_features: true,
+                specified_req: false,
+                platform: None,
+                explicit_name_in_toml: None,
+            }),
+        }
+    }
+
+    pub fn version_req(&self) -> &VersionReq {
+        &self.inner.req
+    }
+
+    /// This is the name of this `Dependency` as listed in `Cargo.toml`.
+    ///
+    /// Or in other words, this is what shows up in the `[dependencies]` section
+    /// on the left hand side. This is **not** the name of the package that's
+    /// being depended on as the dependency can be renamed. For that use
+    /// `package_name` below.
+    ///
+    /// Both of the dependencies below return `foo` for `name_in_toml`:
+    ///
+    /// ```toml
+    /// [dependencies]
+    /// foo = "0.1"
+    /// ```
+    ///
+    /// and ...
+    ///
+    /// ```toml
+    /// [dependencies]
+    /// foo = { version = "0.1", package = 'bar' }
+    /// ```
+    pub fn name_in_toml(&self) -> InternedString {
+        self.explicit_name_in_toml().unwrap_or(self.inner.name)
+    }
+
+    /// The name of the package that this `Dependency` depends on.
+    ///
+    /// Usually this is what's written on the left hand side of a dependencies
+    /// section, but it can also be renamed via the `package` key.
+    ///
+    /// Both of the dependencies below return `foo` for `package_name`:
+    ///
+    /// ```toml
+    /// [dependencies]
+    /// foo = "0.1"
+    /// ```
+    ///
+    /// and ...
+    ///
+    /// ```toml
+    /// [dependencies]
+    /// bar = { version = "0.1", package = 'foo' }
+    /// ```
+    pub fn package_name(&self) -> InternedString {
+        self.inner.name
+    }
+
+    pub fn source_id(&self) -> &SourceId {
+        &self.inner.source_id
+    }
+
+    pub fn registry_id(&self) -> Option<&SourceId> {
+        self.inner.registry_id.as_ref()
+    }
+
+    pub fn set_registry_id(&mut self, registry_id: &SourceId) -> &mut Dependency {
+        Rc::make_mut(&mut self.inner).registry_id = Some(registry_id.clone());
+        self
+    }
+
+    pub fn kind(&self) -> Kind {
+        self.inner.kind
+    }
+
+    pub fn specified_req(&self) -> bool {
+        self.inner.specified_req
+    }
+
+    /// If none, this dependencies must be built for all platforms.
+    /// If some, it must only be built for the specified platform.
+    pub fn platform(&self) -> Option<&Platform> {
+        self.inner.platform.as_ref()
+    }
+
+    /// The renamed name of this dependency, if any.
+    ///
+    /// If the `package` key is used in `Cargo.toml` then this returns the same
+    /// value as `name_in_toml`.
+    pub fn explicit_name_in_toml(&self) -> Option<InternedString> {
+        self.inner.explicit_name_in_toml
+    }
+
+    pub fn set_kind(&mut self, kind: Kind) -> &mut Dependency {
+        Rc::make_mut(&mut self.inner).kind = kind;
+        self
+    }
+
+    /// Sets the list of features requested for the package.
+    pub fn set_features(&mut self, features: impl IntoIterator<Item=impl AsRef<str>>) -> &mut Dependency {
+        Rc::make_mut(&mut self.inner).features =
+            features.into_iter().map(|s| InternedString::new(s.as_ref())).collect();
+        self
+    }
+
+    /// Sets whether the dependency requests default features of the package.
+    pub fn set_default_features(&mut self, default_features: bool) -> &mut Dependency {
+        Rc::make_mut(&mut self.inner).default_features = default_features;
+        self
+    }
+
+    /// Sets whether the dependency is optional.
+    pub fn set_optional(&mut self, optional: bool) -> &mut Dependency {
+        Rc::make_mut(&mut self.inner).optional = optional;
+        self
+    }
+
+    /// Set the source id for this dependency
+    pub fn set_source_id(&mut self, id: SourceId) -> &mut Dependency {
+        Rc::make_mut(&mut self.inner).source_id = id;
+        self
+    }
+
+    /// Set the version requirement for this dependency
+    pub fn set_version_req(&mut self, req: VersionReq) -> &mut Dependency {
+        Rc::make_mut(&mut self.inner).req = req;
+        self
+    }
+
+    pub fn set_platform(&mut self, platform: Option<Platform>) -> &mut Dependency {
+        Rc::make_mut(&mut self.inner).platform = platform;
+        self
+    }
+
+    pub fn set_explicit_name_in_toml(&mut self, name: &str) -> &mut Dependency {
+        Rc::make_mut(&mut self.inner).explicit_name_in_toml = Some(InternedString::new(name));
+        self
+    }
+
+    /// Lock this dependency to depending on the specified package id
+    pub fn lock_to(&mut self, id: &PackageId) -> &mut Dependency {
+        assert_eq!(self.inner.source_id, *id.source_id());
+        assert!(self.inner.req.matches(id.version()));
+        trace!(
+            "locking dep from `{}` with `{}` at {} to {}",
+            self.package_name(),
+            self.version_req(),
+            self.source_id(),
+            id
+        );
+        self.set_version_req(VersionReq::exact(id.version()))
+            .set_source_id(id.source_id().clone())
+    }
+
+    /// Returns whether this is a "locked" dependency, basically whether it has
+    /// an exact version req.
+    pub fn is_locked(&self) -> bool {
+        // Kind of a hack to figure this out, but it works!
+        self.inner.req.to_string().starts_with('=')
+    }
+
+    /// Returns false if the dependency is only used to build the local package.
+    pub fn is_transitive(&self) -> bool {
+        match self.inner.kind {
+            Kind::Normal | Kind::Build => true,
+            Kind::Development => false,
+        }
+    }
+
+    pub fn is_build(&self) -> bool {
+        match self.inner.kind {
+            Kind::Build => true,
+            _ => false,
+        }
+    }
+
+    pub fn is_optional(&self) -> bool {
+        self.inner.optional
+    }
+
+    /// Returns true if the default features of the dependency are requested.
+    pub fn uses_default_features(&self) -> bool {
+        self.inner.default_features
+    }
+    /// Returns the list of features that are requested by the dependency.
+    pub fn features(&self) -> &[InternedString] {
+        &self.inner.features
+    }
+
+    /// Returns true if the package (`sum`) can fulfill this dependency request.
+    pub fn matches(&self, sum: &Summary) -> bool {
+        self.matches_id(sum.package_id())
+    }
+
+    /// Returns true if the package (`sum`) can fulfill this dependency request.
+    pub fn matches_ignoring_source(&self, id: &PackageId) -> bool {
+        self.package_name() == id.name() && self.version_req().matches(id.version())
+    }
+
+    /// Returns true if the package (`id`) can fulfill this dependency request.
+    pub fn matches_id(&self, id: &PackageId) -> bool {
+        self.inner.name == id.name()
+            && (self.inner.only_match_name
+                || (self.inner.req.matches(id.version())
+                    && &self.inner.source_id == id.source_id()))
+    }
+
+    pub fn map_source(mut self, to_replace: &SourceId, replace_with: &SourceId) -> Dependency {
+        if self.source_id() != to_replace {
+            self
+        } else {
+            self.set_source_id(replace_with.clone());
+            self
+        }
+    }
+}
+
+impl Platform {
+    pub fn matches(&self, name: &str, cfg: Option<&[Cfg]>) -> bool {
+        match *self {
+            Platform::Name(ref p) => p == name,
+            Platform::Cfg(ref p) => match cfg {
+                Some(cfg) => p.matches(cfg),
+                None => false,
+            },
+        }
+    }
+}
+
+impl ser::Serialize for Platform {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        self.to_string().serialize(s)
+    }
+}
+
+impl FromStr for Platform {
+    type Err = CargoError;
+
+    fn from_str(s: &str) -> CargoResult<Platform> {
+        if s.starts_with("cfg(") && s.ends_with(')') {
+            let s = &s[4..s.len() - 1];
+            let p = s.parse()
+                .map(Platform::Cfg)
+                .chain_err(|| format_err!("failed to parse `{}` as a cfg expression", s))?;
+            Ok(p)
+        } else {
+            Ok(Platform::Name(s.to_string()))
+        }
+    }
+}
+
+impl fmt::Display for Platform {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match *self {
+            Platform::Name(ref n) => n.fmt(f),
+            Platform::Cfg(ref e) => write!(f, "cfg({})", e),
+        }
+    }
+}
diff --git a/src/cargo/core/features.rs b/src/cargo/core/features.rs
new file mode 100644 (file)
index 0000000..7f20a49
--- /dev/null
@@ -0,0 +1,420 @@
+//! Support for nightly features in Cargo itself
+//!
+//! This file is the version of `feature_gate.rs` in upstream Rust for Cargo
+//! itself and is intended to be the avenue for which new features in Cargo are
+//! gated by default and then eventually stabilized. All known stable and
+//! unstable features are tracked in this file.
+//!
+//! If you're reading this then you're likely interested in adding a feature to
+//! Cargo, and the good news is that it shouldn't be too hard! To do this you'll
+//! want to follow these steps:
+//!
+//! 1. Add your feature. Do this by searching for "look here" in this file and
+//!    expanding the macro invocation that lists all features with your new
+//!    feature.
+//!
+//! 2. Find the appropriate place to place the feature gate in Cargo itself. If
+//!    you're extending the manifest format you'll likely just want to modify
+//!    the `Manifest::feature_gate` function, but otherwise you may wish to
+//!    place the feature gate elsewhere in Cargo.
+//!
+//! 3. To actually perform the feature gate, you'll want to have code that looks
+//!    like:
+//!
+//! ```rust,ignore
+//! use core::{Feature, Features};
+//!
+//! let feature = Feature::launch_into_space();
+//! package.manifest().features().require(feature).chain_err(|| {
+//!     "launching Cargo into space right now is unstable and may result in \
+//!      unintended damage to your codebase, use with caution"
+//! })?;
+//! ```
+//!
+//! Notably you'll notice the `require` function called with your `Feature`, and
+//! then you use `chain_err` to tack on more context for why the feature was
+//! required when the feature isn't activated.
+//!
+//! 4. Update the unstable documentation at
+//!    `src/doc/src/reference/unstable.md` to include a short description of
+//!    how to use your new feature.  When the feature is stabilized, be sure
+//!    that the Cargo Guide or Reference is updated to fully document the
+//!    feature and remove the entry from the Unstable section.
+//!
+//! And hopefully that's it! Bear with us though that this is, at the time of
+//! this writing, a very new feature in Cargo. If the process differs from this
+//! we'll be sure to update this documentation!
+
+use std::cell::Cell;
+use std::env;
+use std::fmt;
+use std::str::FromStr;
+
+use failure::Error;
+
+use util::errors::CargoResult;
+
+/// The edition of the compiler (RFC 2052)
+#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq, Serialize, Deserialize)]
+pub enum Edition {
+    /// The 2015 edition
+    Edition2015,
+    /// The 2018 edition
+    Edition2018,
+}
+
+impl fmt::Display for Edition {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match *self {
+            Edition::Edition2015 => f.write_str("2015"),
+            Edition::Edition2018 => f.write_str("2018"),
+        }
+    }
+}
+impl FromStr for Edition {
+    type Err = Error;
+    fn from_str(s: &str) -> Result<Self, Error> {
+        match s {
+            "2015" => Ok(Edition::Edition2015),
+            "2018" => Ok(Edition::Edition2018),
+            s => {
+                bail!("supported edition values are `2015` or `2018`, but `{}` \
+                       is unknown", s)
+            }
+        }
+    }
+}
+
+#[derive(PartialEq)]
+enum Status {
+    Stable,
+    Unstable,
+}
+
+macro_rules! features {
+    (
+        pub struct Features {
+            $([$stab:ident] $feature:ident: bool,)*
+        }
+    ) => (
+        #[derive(Default, Clone, Debug)]
+        pub struct Features {
+            $($feature: bool,)*
+            activated: Vec<String>,
+        }
+
+        impl Feature {
+            $(
+                pub fn $feature() -> &'static Feature {
+                    fn get(features: &Features) -> bool {
+                        stab!($stab) == Status::Stable || features.$feature
+                    }
+                    static FEAT: Feature = Feature {
+                        name: stringify!($feature),
+                        get,
+                    };
+                    &FEAT
+                }
+            )*
+
+            fn is_enabled(&self, features: &Features) -> bool {
+                (self.get)(features)
+            }
+        }
+
+        impl Features {
+            fn status(&mut self, feature: &str) -> Option<(&mut bool, Status)> {
+                if feature.contains("_") {
+                    return None
+                }
+                let feature = feature.replace("-", "_");
+                $(
+                    if feature == stringify!($feature) {
+                        return Some((&mut self.$feature, stab!($stab)))
+                    }
+                )*
+                None
+            }
+        }
+    )
+}
+
+macro_rules! stab {
+    (stable) => {
+        Status::Stable
+    };
+    (unstable) => {
+        Status::Unstable
+    };
+}
+
+/// A listing of all features in Cargo
+///
+/// "look here"
+///
+/// This is the macro that lists all stable and unstable features in Cargo.
+/// You'll want to add to this macro whenever you add a feature to Cargo, also
+/// following the directions above.
+///
+/// Note that all feature names here are valid Rust identifiers, but the `_`
+/// character is translated to `-` when specified in the `cargo-features`
+/// manifest entry in `Cargo.toml`.
+features! {
+    pub struct Features {
+
+        // A dummy feature that doesn't actually gate anything, but it's used in
+        // testing to ensure that we can enable stable features.
+        [stable] test_dummy_stable: bool,
+
+        // A dummy feature that gates the usage of the `im-a-teapot` manifest
+        // entry. This is basically just intended for tests.
+        [unstable] test_dummy_unstable: bool,
+
+        // Downloading packages from alternative registry indexes.
+        [unstable] alternative_registries: bool,
+
+        // Using editions
+        [stable] edition: bool,
+
+        // Renaming a package in the manifest via the `package` key
+        [stable] rename_dependency: bool,
+
+        // Whether a lock file is published with this crate
+        [unstable] publish_lockfile: bool,
+
+        // Overriding profiles for dependencies.
+        [unstable] profile_overrides: bool,
+
+        // Separating the namespaces for features and dependencies
+        [unstable] namespaced_features: bool,
+
+        // "default-run" manifest option,
+        [unstable] default_run: bool,
+
+        // Declarative build scripts.
+        [unstable] metabuild: bool,
+    }
+}
+
+pub struct Feature {
+    name: &'static str,
+    get: fn(&Features) -> bool,
+}
+
+impl Features {
+    pub fn new(features: &[String], warnings: &mut Vec<String>) -> CargoResult<Features> {
+        let mut ret = Features::default();
+        for feature in features {
+            ret.add(feature, warnings)?;
+            ret.activated.push(feature.to_string());
+        }
+        Ok(ret)
+    }
+
+    fn add(&mut self, feature: &str, warnings: &mut Vec<String>) -> CargoResult<()> {
+        let (slot, status) = match self.status(feature) {
+            Some(p) => p,
+            None => bail!("unknown cargo feature `{}`", feature),
+        };
+
+        if *slot {
+            bail!("the cargo feature `{}` has already been activated", feature);
+        }
+
+        match status {
+            Status::Stable => {
+                let warning = format!(
+                    "the cargo feature `{}` is now stable \
+                     and is no longer necessary to be listed \
+                     in the manifest",
+                    feature
+                );
+                warnings.push(warning);
+            }
+            Status::Unstable if !nightly_features_allowed() => bail!(
+                "the cargo feature `{}` requires a nightly version of \
+                 Cargo, but this is the `{}` channel",
+                feature,
+                channel()
+            ),
+            Status::Unstable => {}
+        }
+
+        *slot = true;
+
+        Ok(())
+    }
+
+    pub fn activated(&self) -> &[String] {
+        &self.activated
+    }
+
+    pub fn require(&self, feature: &Feature) -> CargoResult<()> {
+        if feature.is_enabled(self) {
+            Ok(())
+        } else {
+            let feature = feature.name.replace("_", "-");
+            let mut msg = format!("feature `{}` is required", feature);
+
+            if nightly_features_allowed() {
+                let s = format!(
+                    "\n\nconsider adding `cargo-features = [\"{0}\"]` \
+                     to the manifest",
+                    feature
+                );
+                msg.push_str(&s);
+            } else {
+                let s = format!(
+                    "\n\n\
+                     this Cargo does not support nightly features, but if you\n\
+                     switch to nightly channel you can add\n\
+                     `cargo-features = [\"{}\"]` to enable this feature",
+                    feature
+                );
+                msg.push_str(&s);
+            }
+            bail!("{}", msg);
+        }
+    }
+
+    pub fn is_enabled(&self, feature: &Feature) -> bool {
+        feature.is_enabled(self)
+    }
+}
+
+/// A parsed representation of all unstable flags that Cargo accepts.
+///
+/// Cargo, like `rustc`, accepts a suite of `-Z` flags which are intended for
+/// gating unstable functionality to Cargo. These flags are only available on
+/// the nightly channel of Cargo.
+///
+/// This struct doesn't have quite the same convenience macro that the features
+/// have above, but the procedure should still be relatively stable for adding a
+/// new unstable flag:
+///
+/// 1. First, add a field to this `CliUnstable` structure. All flags are allowed
+///    to have a value as the `-Z` flags are either of the form `-Z foo` or
+///    `-Z foo=bar`, and it's up to you how to parse `bar`.
+///
+/// 2. Add an arm to the match statement in `CliUnstable::add` below to match on
+///    your new flag. The key (`k`) is what you're matching on and the value is
+///    in `v`.
+///
+/// 3. (optional) Add a new parsing function to parse your datatype. As of now
+///    there's an example for `bool`, but more can be added!
+///
+/// 4. In Cargo use `config.cli_unstable()` to get a reference to this structure
+///    and then test for your flag or your value and act accordingly.
+///
+/// If you have any trouble with this, please let us know!
+#[derive(Default, Debug)]
+pub struct CliUnstable {
+    pub print_im_a_teapot: bool,
+    pub unstable_options: bool,
+    pub offline: bool,
+    pub no_index_update: bool,
+    pub avoid_dev_deps: bool,
+    pub minimal_versions: bool,
+    pub package_features: bool,
+    pub advanced_env: bool,
+    pub config_profile: bool,
+}
+
+impl CliUnstable {
+    pub fn parse(&mut self, flags: &[String]) -> CargoResult<()> {
+        if !flags.is_empty() && !nightly_features_allowed() {
+            bail!("the `-Z` flag is only accepted on the nightly channel of Cargo")
+        }
+        for flag in flags {
+            self.add(flag)?;
+        }
+        Ok(())
+    }
+
+    fn add(&mut self, flag: &str) -> CargoResult<()> {
+        let mut parts = flag.splitn(2, '=');
+        let k = parts.next().unwrap();
+        let v = parts.next();
+
+        fn parse_bool(value: Option<&str>) -> CargoResult<bool> {
+            match value {
+                None | Some("yes") => Ok(true),
+                Some("no") => Ok(false),
+                Some(s) => bail!("expected `no` or `yes`, found: {}", s),
+            }
+        }
+
+        match k {
+            "print-im-a-teapot" => self.print_im_a_teapot = parse_bool(v)?,
+            "unstable-options" => self.unstable_options = true,
+            "offline" => self.offline = true,
+            "no-index-update" => self.no_index_update = true,
+            "avoid-dev-deps" => self.avoid_dev_deps = true,
+            "minimal-versions" => self.minimal_versions = true,
+            "package-features" => self.package_features = true,
+            "advanced-env" => self.advanced_env = true,
+            "config-profile" => self.config_profile = true,
+            _ => bail!("unknown `-Z` flag specified: {}", k),
+        }
+
+        Ok(())
+    }
+}
+
+fn channel() -> String {
+    if let Ok(override_channel) = env::var("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS") {
+        return override_channel;
+    }
+    if let Ok(staging) = env::var("RUSTC_BOOTSTRAP") {
+        if staging == "1" {
+            return "dev".to_string();
+        }
+    }
+    ::version()
+        .cfg_info
+        .map(|c| c.release_channel)
+        .unwrap_or_else(|| String::from("dev"))
+}
+
+thread_local!(
+    static NIGHTLY_FEATURES_ALLOWED: Cell<bool> = Cell::new(false);
+    static ENABLE_NIGHTLY_FEATURES: Cell<bool> = Cell::new(false);
+);
+
+/// This is a little complicated.
+/// This should return false if:
+/// - this is an artifact of the rustc distribution process for "stable" or for "beta"
+/// - this is an `#[test]` that does not opt in with `enable_nightly_features`
+/// - this is a integration test that uses `ProcessBuilder`
+///      that does not opt in with `masquerade_as_nightly_cargo`
+/// This should return true if:
+/// - this is an artifact of the rustc distribution process for "nightly"
+/// - this is being used in the rustc distribution process internally
+/// - this is a cargo executable that was built from source
+/// - this is an `#[test]` that called `enable_nightly_features`
+/// - this is a integration test that uses `ProcessBuilder`
+///       that called `masquerade_as_nightly_cargo`
+pub fn nightly_features_allowed() -> bool {
+    if ENABLE_NIGHTLY_FEATURES.with(|c| c.get()) {
+        return true
+    }
+     match &channel()[..] {
+        "nightly" | "dev" => NIGHTLY_FEATURES_ALLOWED.with(|c| c.get()),
+        _ => false,
+    }
+}
+
+/// Allows nightly features to be enabled for this thread, but only if the
+/// development channel is nightly or dev.
+///
+/// Used by cargo main to ensure that a cargo build from source has nightly features
+pub fn maybe_allow_nightly_features() {
+    NIGHTLY_FEATURES_ALLOWED.with(|c| c.set(true));
+}
+
+/// Forcibly enables nightly features for this thread.
+///
+/// Used by tests to allow the use of nightly features.
+pub fn enable_nightly_features() {
+    ENABLE_NIGHTLY_FEATURES.with(|c| c.set(true));
+}
diff --git a/src/cargo/core/interning.rs b/src/cargo/core/interning.rs
new file mode 100644 (file)
index 0000000..c925034
--- /dev/null
@@ -0,0 +1,107 @@
+use serde::{Serialize, Serializer};
+
+use std::borrow::Borrow;
+use std::cmp::Ordering;
+use std::collections::HashSet;
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::ops::Deref;
+use std::ptr;
+use std::str;
+use std::sync::Mutex;
+
+pub fn leak(s: String) -> &'static str {
+    Box::leak(s.into_boxed_str())
+}
+
+lazy_static! {
+    static ref STRING_CACHE: Mutex<HashSet<&'static str>> = Mutex::new(HashSet::new());
+}
+
+#[derive(Clone, Copy)]
+pub struct InternedString {
+    inner: &'static str,
+}
+
+impl PartialEq for InternedString {
+    fn eq(&self, other: &InternedString) -> bool {
+        ptr::eq(self.as_str(), other.as_str())
+    }
+}
+
+impl Eq for InternedString {}
+
+impl InternedString {
+    pub fn new(str: &str) -> InternedString {
+        let mut cache = STRING_CACHE.lock().unwrap();
+        let s = cache.get(str).map(|&s| s).unwrap_or_else(|| {
+            let s = leak(str.to_string());
+            cache.insert(s);
+            s
+        });
+
+        InternedString { inner: s }
+    }
+
+    pub fn as_str(&self) -> &'static str {
+        self.inner
+    }
+}
+
+impl Deref for InternedString {
+    type Target = str;
+
+    fn deref(&self) -> &'static str {
+        self.as_str()
+    }
+}
+
+impl Hash for InternedString {
+    // NB: we can't implement this as `identity(self).hash(state)`,
+    // because we use this for on-disk fingerprints and so need
+    // stability across Cargo invocations.
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        self.as_str().hash(state);
+    }
+}
+
+impl Borrow<str> for InternedString {
+    // if we implement Hash as `identity(self).hash(state)`,
+    // then this will nead to be removed.
+    fn borrow(&self) -> &str {
+        self.as_str()
+    }
+}
+
+impl fmt::Debug for InternedString {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        fmt::Debug::fmt(self.as_str(), f)
+    }
+}
+
+impl fmt::Display for InternedString {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        fmt::Display::fmt(self.as_str(), f)
+    }
+}
+
+impl Ord for InternedString {
+    fn cmp(&self, other: &InternedString) -> Ordering {
+        self.as_str().cmp(other.as_str())
+    }
+}
+
+impl PartialOrd for InternedString {
+    fn partial_cmp(&self, other: &InternedString) -> Option<Ordering> {
+        Some(self.cmp(other))
+    }
+}
+
+impl Serialize for InternedString {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: Serializer,
+    {
+        serializer.serialize_str(self.inner)
+    }
+}
diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs
new file mode 100644 (file)
index 0000000..8bc6fcb
--- /dev/null
@@ -0,0 +1,899 @@
+use std::collections::{BTreeMap, HashMap};
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::path::{Path, PathBuf};
+use std::rc::Rc;
+
+use semver::Version;
+use serde::ser;
+use toml;
+use url::Url;
+
+use core::interning::InternedString;
+use core::profiles::Profiles;
+use core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary};
+use core::{Edition, Feature, Features, WorkspaceConfig};
+use util::errors::*;
+use util::toml::TomlManifest;
+use util::{Config, Filesystem, short_hash};
+
+pub enum EitherManifest {
+    Real(Manifest),
+    Virtual(VirtualManifest),
+}
+
+/// Contains all the information about a package, as loaded from a Cargo.toml.
+#[derive(Clone, Debug)]
+pub struct Manifest {
+    summary: Summary,
+    targets: Vec<Target>,
+    links: Option<String>,
+    warnings: Warnings,
+    exclude: Vec<String>,
+    include: Vec<String>,
+    metadata: ManifestMetadata,
+    custom_metadata: Option<toml::Value>,
+    profiles: Profiles,
+    publish: Option<Vec<String>>,
+    publish_lockfile: bool,
+    replace: Vec<(PackageIdSpec, Dependency)>,
+    patch: HashMap<Url, Vec<Dependency>>,
+    workspace: WorkspaceConfig,
+    original: Rc<TomlManifest>,
+    features: Features,
+    edition: Edition,
+    im_a_teapot: Option<bool>,
+    default_run: Option<String>,
+    metabuild: Option<Vec<String>>,
+}
+
+/// When parsing `Cargo.toml`, some warnings should silenced
+/// if the manifest comes from a dependency. `ManifestWarning`
+/// allows this delayed emission of warnings.
+#[derive(Clone, Debug)]
+pub struct DelayedWarning {
+    pub message: String,
+    pub is_critical: bool,
+}
+
+#[derive(Clone, Debug)]
+pub struct Warnings(Vec<DelayedWarning>);
+
+#[derive(Clone, Debug)]
+pub struct VirtualManifest {
+    replace: Vec<(PackageIdSpec, Dependency)>,
+    patch: HashMap<Url, Vec<Dependency>>,
+    workspace: WorkspaceConfig,
+    profiles: Profiles,
+    warnings: Warnings,
+}
+
+/// General metadata about a package which is just blindly uploaded to the
+/// registry.
+///
+/// Note that many of these fields can contain invalid values such as the
+/// homepage, repository, documentation, or license. These fields are not
+/// validated by cargo itself, but rather it is up to the registry when uploaded
+/// to validate these fields. Cargo will itself accept any valid TOML
+/// specification for these values.
+#[derive(PartialEq, Clone, Debug)]
+pub struct ManifestMetadata {
+    pub authors: Vec<String>,
+    pub keywords: Vec<String>,
+    pub categories: Vec<String>,
+    pub license: Option<String>,
+    pub license_file: Option<String>,
+    pub description: Option<String>,   // not markdown
+    pub readme: Option<String>,        // file, not contents
+    pub homepage: Option<String>,      // url
+    pub repository: Option<String>,    // url
+    pub documentation: Option<String>, // url
+    pub badges: BTreeMap<String, BTreeMap<String, String>>,
+    pub links: Option<String>,
+}
+
+#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum LibKind {
+    Lib,
+    Rlib,
+    Dylib,
+    ProcMacro,
+    Other(String),
+}
+
+impl LibKind {
+    /// Returns the argument suitable for `--crate-type` to pass to rustc.
+    pub fn crate_type(&self) -> &str {
+        match *self {
+            LibKind::Lib => "lib",
+            LibKind::Rlib => "rlib",
+            LibKind::Dylib => "dylib",
+            LibKind::ProcMacro => "proc-macro",
+            LibKind::Other(ref s) => s,
+        }
+    }
+
+    pub fn linkable(&self) -> bool {
+        match *self {
+            LibKind::Lib | LibKind::Rlib | LibKind::Dylib | LibKind::ProcMacro => true,
+            LibKind::Other(..) => false,
+        }
+    }
+}
+
+impl fmt::Debug for LibKind {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.crate_type().fmt(f)
+    }
+}
+
+impl<'a> From<&'a String> for LibKind {
+    fn from(string: &'a String) -> Self {
+        match string.as_ref() {
+            "lib" => LibKind::Lib,
+            "rlib" => LibKind::Rlib,
+            "dylib" => LibKind::Dylib,
+            "proc-macro" => LibKind::ProcMacro,
+            s => LibKind::Other(s.to_string()),
+        }
+    }
+}
+
+#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
+pub enum TargetKind {
+    Lib(Vec<LibKind>),
+    Bin,
+    Test,
+    Bench,
+    ExampleLib(Vec<LibKind>),
+    ExampleBin,
+    CustomBuild,
+}
+
+impl ser::Serialize for TargetKind {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        use self::TargetKind::*;
+        match *self {
+            Lib(ref kinds) => s.collect_seq(kinds.iter().map(LibKind::crate_type)),
+            Bin => ["bin"].serialize(s),
+            ExampleBin | ExampleLib(_) => ["example"].serialize(s),
+            Test => ["test"].serialize(s),
+            CustomBuild => ["custom-build"].serialize(s),
+            Bench => ["bench"].serialize(s),
+        }
+    }
+}
+
+impl fmt::Debug for TargetKind {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        use self::TargetKind::*;
+        match *self {
+            Lib(ref kinds) => kinds.fmt(f),
+            Bin => "bin".fmt(f),
+            ExampleBin | ExampleLib(_) => "example".fmt(f),
+            Test => "test".fmt(f),
+            CustomBuild => "custom-build".fmt(f),
+            Bench => "bench".fmt(f),
+        }
+    }
+}
+
+/// Information about a binary, a library, an example, etc. that is part of the
+/// package.
+#[derive(Clone, Hash, PartialEq, Eq)]
+pub struct Target {
+    kind: TargetKind,
+    name: String,
+    // Note that the `src_path` here is excluded from the `Hash` implementation
+    // as it's absolute currently and is otherwise a little too brittle for
+    // causing rebuilds. Instead the hash for the path that we send to the
+    // compiler is handled elsewhere.
+    src_path: TargetSourcePath,
+    required_features: Option<Vec<String>>,
+    tested: bool,
+    benched: bool,
+    doc: bool,
+    doctest: bool,
+    harness: bool, // whether to use the test harness (--test)
+    for_host: bool,
+    edition: Edition,
+}
+
+#[derive(Clone, PartialEq, Eq)]
+pub enum TargetSourcePath {
+    Path(PathBuf),
+    Metabuild,
+}
+
+impl TargetSourcePath {
+    pub fn path(&self) -> &Path {
+        match self {
+            TargetSourcePath::Path(path) => path.as_ref(),
+            TargetSourcePath::Metabuild => panic!("metabuild not expected"),
+        }
+    }
+
+    pub fn is_path(&self) -> bool {
+        match self {
+            TargetSourcePath::Path(_) => true,
+            _ => false,
+        }
+    }
+}
+
+impl Hash for TargetSourcePath {
+    fn hash<H: Hasher>(&self, _: &mut H) {
+        // ...
+    }
+}
+
+impl fmt::Debug for TargetSourcePath {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match self {
+            TargetSourcePath::Path(path) => path.fmt(f),
+            TargetSourcePath::Metabuild => "metabuild".fmt(f),
+        }
+    }
+}
+
+impl From<PathBuf> for TargetSourcePath {
+    fn from(path: PathBuf) -> Self {
+        assert!(
+            path.is_absolute(),
+            "`{}` is not absolute",
+            path.display()
+        );
+        TargetSourcePath::Path(path)
+    }
+}
+
+#[derive(Serialize)]
+struct SerializedTarget<'a> {
+    /// Is this a `--bin bin`, `--lib`, `--example ex`?
+    /// Serialized as a list of strings for historical reasons.
+    kind: &'a TargetKind,
+    /// Corresponds to `--crate-type` compiler attribute.
+    /// See https://doc.rust-lang.org/reference/linkage.html
+    crate_types: Vec<&'a str>,
+    name: &'a str,
+    src_path: &'a PathBuf,
+    edition: &'a str,
+    #[serde(rename = "required-features", skip_serializing_if = "Option::is_none")]
+    required_features: Option<Vec<&'a str>>,
+}
+
+impl ser::Serialize for Target {
+    fn serialize<S: ser::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
+        SerializedTarget {
+            kind: &self.kind,
+            crate_types: self.rustc_crate_types(),
+            name: &self.name,
+            src_path: &self.src_path.path().to_path_buf(),
+            edition: &self.edition.to_string(),
+            required_features: self
+                .required_features
+                .as_ref()
+                .map(|rf| rf.iter().map(|s| &**s).collect()),
+        }.serialize(s)
+    }
+}
+
+compact_debug! {
+    impl fmt::Debug for Target {
+        fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+            let (default, default_name) = {
+                match &self.kind {
+                    TargetKind::Lib(kinds) => {
+                        (
+                            Target::lib_target(
+                                &self.name,
+                                kinds.clone(),
+                                self.src_path().path().to_path_buf(),
+                                self.edition,
+                            ),
+                            format!("lib_target({:?}, {:?}, {:?}, {:?})",
+                                    self.name, kinds, self.src_path, self.edition),
+                        )
+                    }
+                    TargetKind::CustomBuild => {
+                        match self.src_path {
+                            TargetSourcePath::Path(ref path) => {
+                                (
+                                    Target::custom_build_target(
+                                        &self.name,
+                                        path.to_path_buf(),
+                                        self.edition,
+                                    ),
+                                    format!("custom_build_target({:?}, {:?}, {:?})",
+                                            self.name, path, self.edition),
+                                )
+                            }
+                            TargetSourcePath::Metabuild => {
+                                (
+                                    Target::metabuild_target(&self.name),
+                                    format!("metabuild_target({:?})", self.name),
+                                )
+                            }
+                        }
+                    }
+                    _ => (
+                        Target::new(self.src_path.clone(), self.edition),
+                        format!("with_path({:?}, {:?})", self.src_path, self.edition),
+                    ),
+                }
+            };
+            [debug_the_fields(
+                kind
+                name
+                src_path
+                required_features
+                tested
+                benched
+                doc
+                doctest
+                harness
+                for_host
+                edition
+            )]
+        }
+    }
+}
+
+impl Manifest {
+    pub fn new(
+        summary: Summary,
+        targets: Vec<Target>,
+        exclude: Vec<String>,
+        include: Vec<String>,
+        links: Option<String>,
+        metadata: ManifestMetadata,
+        custom_metadata: Option<toml::Value>,
+        profiles: Profiles,
+        publish: Option<Vec<String>>,
+        publish_lockfile: bool,
+        replace: Vec<(PackageIdSpec, Dependency)>,
+        patch: HashMap<Url, Vec<Dependency>>,
+        workspace: WorkspaceConfig,
+        features: Features,
+        edition: Edition,
+        im_a_teapot: Option<bool>,
+        default_run: Option<String>,
+        original: Rc<TomlManifest>,
+        metabuild: Option<Vec<String>>,
+    ) -> Manifest {
+        Manifest {
+            summary,
+            targets,
+            warnings: Warnings::new(),
+            exclude,
+            include,
+            links,
+            metadata,
+            custom_metadata,
+            profiles,
+            publish,
+            replace,
+            patch,
+            workspace,
+            features,
+            edition,
+            original,
+            im_a_teapot,
+            default_run,
+            publish_lockfile,
+            metabuild,
+        }
+    }
+
+    pub fn dependencies(&self) -> &[Dependency] {
+        self.summary.dependencies()
+    }
+    pub fn exclude(&self) -> &[String] {
+        &self.exclude
+    }
+    pub fn include(&self) -> &[String] {
+        &self.include
+    }
+    pub fn metadata(&self) -> &ManifestMetadata {
+        &self.metadata
+    }
+    pub fn name(&self) -> InternedString {
+        self.package_id().name()
+    }
+    pub fn package_id(&self) -> &PackageId {
+        self.summary.package_id()
+    }
+    pub fn summary(&self) -> &Summary {
+        &self.summary
+    }
+    pub fn targets(&self) -> &[Target] {
+        &self.targets
+    }
+    pub fn version(&self) -> &Version {
+        self.package_id().version()
+    }
+    pub fn warnings_mut(&mut self) -> &mut Warnings {
+        &mut self.warnings
+    }
+    pub fn warnings(&self) -> &Warnings {
+        &self.warnings
+    }
+    pub fn profiles(&self) -> &Profiles {
+        &self.profiles
+    }
+    pub fn publish(&self) -> &Option<Vec<String>> {
+        &self.publish
+    }
+    pub fn publish_lockfile(&self) -> bool {
+        self.publish_lockfile
+    }
+    pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] {
+        &self.replace
+    }
+    pub fn original(&self) -> &TomlManifest {
+        &self.original
+    }
+    pub fn patch(&self) -> &HashMap<Url, Vec<Dependency>> {
+        &self.patch
+    }
+    pub fn links(&self) -> Option<&str> {
+        self.links.as_ref().map(|s| &s[..])
+    }
+
+    pub fn workspace_config(&self) -> &WorkspaceConfig {
+        &self.workspace
+    }
+
+    pub fn features(&self) -> &Features {
+        &self.features
+    }
+
+    pub fn set_summary(&mut self, summary: Summary) {
+        self.summary = summary;
+    }
+
+    pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Manifest {
+        Manifest {
+            summary: self.summary.map_source(to_replace, replace_with),
+            ..self
+        }
+    }
+
+    pub fn feature_gate(&self) -> CargoResult<()> {
+        if self.im_a_teapot.is_some() {
+            self.features
+                .require(Feature::test_dummy_unstable())
+                .chain_err(|| {
+                    format_err!(
+                        "the `im-a-teapot` manifest key is unstable and may \
+                         not work properly in England"
+                    )
+                })?;
+        }
+
+        if self.default_run.is_some() {
+            self.features
+                .require(Feature::default_run())
+                .chain_err(|| {
+                    format_err!(
+                        "the `default-run` manifest key is unstable"
+                    )
+                })?;
+        }
+
+        Ok(())
+    }
+
+    // Just a helper function to test out `-Z` flags on Cargo
+    pub fn print_teapot(&self, config: &Config) {
+        if let Some(teapot) = self.im_a_teapot {
+            if config.cli_unstable().print_im_a_teapot {
+                println!("im-a-teapot = {}", teapot);
+            }
+        }
+    }
+
+    pub fn edition(&self) -> Edition {
+        self.edition
+    }
+
+    pub fn custom_metadata(&self) -> Option<&toml::Value> {
+        self.custom_metadata.as_ref()
+    }
+
+    pub fn default_run(&self) -> Option<&str> {
+        self.default_run.as_ref().map(|s| &s[..])
+    }
+
+    pub fn metabuild(&self) -> Option<&Vec<String>> {
+        self.metabuild.as_ref()
+    }
+
+    pub fn metabuild_path(&self, target_dir: Filesystem) -> PathBuf {
+        let hash = short_hash(self.package_id());
+        target_dir
+            .into_path_unlocked()
+            .join(".metabuild")
+            .join(format!("metabuild-{}-{}.rs", self.name(), hash))
+    }
+}
+
+impl VirtualManifest {
+    pub fn new(
+        replace: Vec<(PackageIdSpec, Dependency)>,
+        patch: HashMap<Url, Vec<Dependency>>,
+        workspace: WorkspaceConfig,
+        profiles: Profiles,
+    ) -> VirtualManifest {
+        VirtualManifest {
+            replace,
+            patch,
+            workspace,
+            profiles,
+            warnings: Warnings::new(),
+        }
+    }
+
+    pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] {
+        &self.replace
+    }
+
+    pub fn patch(&self) -> &HashMap<Url, Vec<Dependency>> {
+        &self.patch
+    }
+
+    pub fn workspace_config(&self) -> &WorkspaceConfig {
+        &self.workspace
+    }
+
+    pub fn profiles(&self) -> &Profiles {
+        &self.profiles
+    }
+
+    pub fn warnings_mut(&mut self) -> &mut Warnings {
+        &mut self.warnings
+    }
+
+    pub fn warnings(&self) -> &Warnings {
+        &self.warnings
+    }
+}
+
+impl Target {
+    fn new(src_path: TargetSourcePath, edition: Edition) -> Target {
+        Target {
+            kind: TargetKind::Bin,
+            name: String::new(),
+            src_path,
+            required_features: None,
+            doc: false,
+            doctest: false,
+            harness: true,
+            for_host: false,
+            edition,
+            tested: true,
+            benched: true,
+        }
+    }
+
+    fn with_path(src_path: PathBuf, edition: Edition) -> Target {
+        Target::new(TargetSourcePath::from(src_path), edition)
+    }
+
+    pub fn lib_target(
+        name: &str,
+        crate_targets: Vec<LibKind>,
+        src_path: PathBuf,
+        edition: Edition,
+    ) -> Target {
+        Target {
+            kind: TargetKind::Lib(crate_targets),
+            name: name.to_string(),
+            doctest: true,
+            doc: true,
+            ..Target::with_path(src_path, edition)
+        }
+    }
+
+    pub fn bin_target(
+        name: &str,
+        src_path: PathBuf,
+        required_features: Option<Vec<String>>,
+        edition: Edition,
+    ) -> Target {
+        Target {
+            kind: TargetKind::Bin,
+            name: name.to_string(),
+            required_features,
+            doc: true,
+            ..Target::with_path(src_path, edition)
+        }
+    }
+
+    /// Builds a `Target` corresponding to the `build = "build.rs"` entry.
+    pub fn custom_build_target(
+        name: &str,
+        src_path: PathBuf,
+        edition: Edition,
+    ) -> Target {
+        Target {
+            kind: TargetKind::CustomBuild,
+            name: name.to_string(),
+            for_host: true,
+            benched: false,
+            tested: false,
+            ..Target::with_path(src_path, edition)
+        }
+    }
+
+    pub fn metabuild_target(name: &str) -> Target {
+        Target {
+            kind: TargetKind::CustomBuild,
+            name: name.to_string(),
+            for_host: true,
+            benched: false,
+            tested: false,
+            ..Target::new(TargetSourcePath::Metabuild, Edition::Edition2015)
+        }
+    }
+
+    pub fn example_target(
+        name: &str,
+        crate_targets: Vec<LibKind>,
+        src_path: PathBuf,
+        required_features: Option<Vec<String>>,
+        edition: Edition,
+    ) -> Target {
+        let kind = if crate_targets.is_empty() {
+            TargetKind::ExampleBin
+        } else {
+            TargetKind::ExampleLib(crate_targets)
+        };
+
+        Target {
+            kind,
+            name: name.to_string(),
+            required_features,
+            tested: false,
+            benched: false,
+            ..Target::with_path(src_path, edition)
+        }
+    }
+
+    pub fn test_target(
+        name: &str,
+        src_path: PathBuf,
+        required_features: Option<Vec<String>>,
+        edition: Edition,
+    ) -> Target {
+        Target {
+            kind: TargetKind::Test,
+            name: name.to_string(),
+            required_features,
+            benched: false,
+            ..Target::with_path(src_path, edition)
+        }
+    }
+
+    pub fn bench_target(
+        name: &str,
+        src_path: PathBuf,
+        required_features: Option<Vec<String>>,
+        edition: Edition,
+    ) -> Target {
+        Target {
+            kind: TargetKind::Bench,
+            name: name.to_string(),
+            required_features,
+            tested: false,
+            ..Target::with_path(src_path, edition)
+        }
+    }
+
+    pub fn name(&self) -> &str {
+        &self.name
+    }
+    pub fn crate_name(&self) -> String {
+        self.name.replace("-", "_")
+    }
+    pub fn src_path(&self) -> &TargetSourcePath {
+        &self.src_path
+    }
+    pub fn set_src_path(&mut self, src_path: TargetSourcePath) {
+        self.src_path = src_path;
+    }
+    pub fn required_features(&self) -> Option<&Vec<String>> {
+        self.required_features.as_ref()
+    }
+    pub fn kind(&self) -> &TargetKind {
+        &self.kind
+    }
+    pub fn tested(&self) -> bool {
+        self.tested
+    }
+    pub fn harness(&self) -> bool {
+        self.harness
+    }
+    pub fn documented(&self) -> bool {
+        self.doc
+    }
+    pub fn for_host(&self) -> bool {
+        self.for_host
+    }
+    pub fn edition(&self) -> Edition { self.edition }
+    pub fn benched(&self) -> bool {
+        self.benched
+    }
+    pub fn doctested(&self) -> bool {
+        self.doctest
+    }
+
+    pub fn doctestable(&self) -> bool {
+        match self.kind {
+            TargetKind::Lib(ref kinds) => kinds
+                .iter()
+                .any(|k| *k == LibKind::Rlib || *k == LibKind::Lib || *k == LibKind::ProcMacro),
+            _ => false,
+        }
+    }
+
+    pub fn allows_underscores(&self) -> bool {
+        self.is_bin() || self.is_example() || self.is_custom_build()
+    }
+
+    pub fn is_lib(&self) -> bool {
+        match self.kind {
+            TargetKind::Lib(_) => true,
+            _ => false,
+        }
+    }
+
+    pub fn is_dylib(&self) -> bool {
+        match self.kind {
+            TargetKind::Lib(ref libs) => libs.iter().any(|l| *l == LibKind::Dylib),
+            _ => false,
+        }
+    }
+
+    pub fn is_cdylib(&self) -> bool {
+        let libs = match self.kind {
+            TargetKind::Lib(ref libs) => libs,
+            _ => return false,
+        };
+        libs.iter().any(|l| match *l {
+            LibKind::Other(ref s) => s == "cdylib",
+            _ => false,
+        })
+    }
+
+    pub fn linkable(&self) -> bool {
+        match self.kind {
+            TargetKind::Lib(ref kinds) => kinds.iter().any(|k| k.linkable()),
+            _ => false,
+        }
+    }
+
+    pub fn is_bin(&self) -> bool {
+        self.kind == TargetKind::Bin
+    }
+
+    pub fn is_example(&self) -> bool {
+        match self.kind {
+            TargetKind::ExampleBin | TargetKind::ExampleLib(..) => true,
+            _ => false,
+        }
+    }
+
+    pub fn is_bin_example(&self) -> bool {
+        // Needed for --all-examples in contexts where only runnable examples make sense
+        match self.kind {
+            TargetKind::ExampleBin => true,
+            _ => false,
+        }
+    }
+
+    pub fn is_test(&self) -> bool {
+        self.kind == TargetKind::Test
+    }
+    pub fn is_bench(&self) -> bool {
+        self.kind == TargetKind::Bench
+    }
+    pub fn is_custom_build(&self) -> bool {
+        self.kind == TargetKind::CustomBuild
+    }
+
+    /// Returns the arguments suitable for `--crate-type` to pass to rustc.
+    pub fn rustc_crate_types(&self) -> Vec<&str> {
+        match self.kind {
+            TargetKind::Lib(ref kinds) | TargetKind::ExampleLib(ref kinds) => {
+                kinds.iter().map(LibKind::crate_type).collect()
+            }
+            TargetKind::CustomBuild
+            | TargetKind::Bench
+            | TargetKind::Test
+            | TargetKind::ExampleBin
+            | TargetKind::Bin => vec!["bin"],
+        }
+    }
+
+    pub fn can_lto(&self) -> bool {
+        match self.kind {
+            TargetKind::Lib(ref v) => {
+                !v.contains(&LibKind::Rlib) && !v.contains(&LibKind::Dylib)
+                    && !v.contains(&LibKind::Lib)
+            }
+            _ => true,
+        }
+    }
+
+    pub fn set_tested(&mut self, tested: bool) -> &mut Target {
+        self.tested = tested;
+        self
+    }
+    pub fn set_benched(&mut self, benched: bool) -> &mut Target {
+        self.benched = benched;
+        self
+    }
+    pub fn set_doctest(&mut self, doctest: bool) -> &mut Target {
+        self.doctest = doctest;
+        self
+    }
+    pub fn set_for_host(&mut self, for_host: bool) -> &mut Target {
+        self.for_host = for_host;
+        self
+    }
+    pub fn set_edition(&mut self, edition: Edition) -> &mut Target {
+        self.edition = edition;
+        self
+    }
+    pub fn set_harness(&mut self, harness: bool) -> &mut Target {
+        self.harness = harness;
+        self
+    }
+    pub fn set_doc(&mut self, doc: bool) -> &mut Target {
+        self.doc = doc;
+        self
+    }
+}
+
+impl fmt::Display for Target {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match self.kind {
+            TargetKind::Lib(..) => write!(f, "Target(lib)"),
+            TargetKind::Bin => write!(f, "Target(bin: {})", self.name),
+            TargetKind::Test => write!(f, "Target(test: {})", self.name),
+            TargetKind::Bench => write!(f, "Target(bench: {})", self.name),
+            TargetKind::ExampleBin | TargetKind::ExampleLib(..) => {
+                write!(f, "Target(example: {})", self.name)
+            }
+            TargetKind::CustomBuild => write!(f, "Target(script)"),
+        }
+    }
+}
+
+impl Warnings {
+    fn new() -> Warnings {
+        Warnings(Vec::new())
+    }
+
+    pub fn add_warning(&mut self, s: String) {
+        self.0.push(DelayedWarning {
+            message: s,
+            is_critical: false,
+        })
+    }
+
+    pub fn add_critical_warning(&mut self, s: String) {
+        self.0.push(DelayedWarning {
+            message: s,
+            is_critical: true,
+        })
+    }
+
+    pub fn warnings(&self) -> &[DelayedWarning] {
+        &self.0
+    }
+}
diff --git a/src/cargo/core/mod.rs b/src/cargo/core/mod.rs
new file mode 100644 (file)
index 0000000..3312ba3
--- /dev/null
@@ -0,0 +1,34 @@
+pub use self::dependency::Dependency;
+pub use self::features::{CliUnstable, Edition, Feature, Features};
+pub use self::features::{
+    maybe_allow_nightly_features,
+    enable_nightly_features,
+    nightly_features_allowed
+};
+pub use self::manifest::{EitherManifest, VirtualManifest};
+pub use self::manifest::{LibKind, Manifest, Target, TargetKind};
+pub use self::package::{Package, PackageSet};
+pub use self::package_id::PackageId;
+pub use self::package_id_spec::PackageIdSpec;
+pub use self::registry::Registry;
+pub use self::resolver::Resolve;
+pub use self::shell::{Shell, Verbosity};
+pub use self::source::{GitReference, Source, SourceId, SourceMap};
+pub use self::summary::{FeatureMap, FeatureValue, Summary};
+pub use self::workspace::{Members, Workspace, WorkspaceConfig, WorkspaceRootConfig};
+
+pub mod compiler;
+pub mod dependency;
+mod features;
+mod interning;
+pub mod manifest;
+pub mod package;
+pub mod package_id;
+mod package_id_spec;
+pub mod profiles;
+pub mod registry;
+pub mod resolver;
+pub mod shell;
+pub mod source;
+pub mod summary;
+mod workspace;
diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs
new file mode 100644 (file)
index 0000000..89908ad
--- /dev/null
@@ -0,0 +1,864 @@
+use std::cell::{Ref, RefCell, Cell};
+use std::collections::{HashMap, HashSet};
+use std::fmt;
+use std::hash;
+use std::mem;
+use std::path::{Path, PathBuf};
+use std::time::{Instant, Duration};
+
+use bytesize::ByteSize;
+use curl;
+use curl_sys;
+use curl::easy::{Easy, HttpVersion};
+use curl::multi::{Multi, EasyHandle};
+use lazycell::LazyCell;
+use semver::Version;
+use serde::ser;
+use toml;
+
+use core::{Dependency, Manifest, PackageId, SourceId, Target};
+use core::{FeatureMap, SourceMap, Summary};
+use core::source::MaybePackage;
+use core::interning::InternedString;
+use ops;
+use util::{self, internal, lev_distance, Config, Progress, ProgressStyle};
+use util::errors::{CargoResult, CargoResultExt, HttpNot200};
+use util::network::Retry;
+
+/// Information about a package that is available somewhere in the file system.
+///
+/// A package is a `Cargo.toml` file plus all the files that are part of it.
+// TODO: Is manifest_path a relic?
+#[derive(Clone)]
+pub struct Package {
+    /// The package's manifest
+    manifest: Manifest,
+    /// The root of the package
+    manifest_path: PathBuf,
+}
+
+/// A Package in a form where `Serialize` can be derived.
+#[derive(Serialize)]
+struct SerializedPackage<'a> {
+    name: &'a str,
+    version: &'a str,
+    id: &'a PackageId,
+    license: Option<&'a str>,
+    license_file: Option<&'a str>,
+    description: Option<&'a str>,
+    source: &'a SourceId,
+    dependencies: &'a [Dependency],
+    targets: Vec<&'a Target>,
+    features: &'a FeatureMap,
+    manifest_path: &'a str,
+    metadata: Option<&'a toml::Value>,
+    authors: &'a [String],
+    categories: &'a [String],
+    keywords: &'a [String],
+    readme: Option<&'a str>,
+    repository: Option<&'a str>,
+    edition: &'a str,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    metabuild: Option<&'a Vec<String>>,
+}
+
+impl ser::Serialize for Package {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        let summary = self.manifest.summary();
+        let package_id = summary.package_id();
+        let manmeta = self.manifest.metadata();
+        let license = manmeta.license.as_ref().map(String::as_ref);
+        let license_file = manmeta.license_file.as_ref().map(String::as_ref);
+        let description = manmeta.description.as_ref().map(String::as_ref);
+        let authors = manmeta.authors.as_ref();
+        let categories = manmeta.categories.as_ref();
+        let keywords = manmeta.keywords.as_ref();
+        let readme = manmeta.readme.as_ref().map(String::as_ref);
+        let repository = manmeta.repository.as_ref().map(String::as_ref);
+        // Filter out metabuild targets. They are an internal implementation
+        // detail that is probably not relevant externally. There's also not a
+        // real path to show in `src_path`, and this avoids changing the format.
+        let targets: Vec<&Target> = self
+            .manifest
+            .targets()
+            .iter()
+            .filter(|t| t.src_path().is_path())
+            .collect();
+
+        SerializedPackage {
+            name: &*package_id.name(),
+            version: &package_id.version().to_string(),
+            id: package_id,
+            license,
+            license_file,
+            description,
+            source: summary.source_id(),
+            dependencies: summary.dependencies(),
+            targets,
+            features: summary.features(),
+            manifest_path: &self.manifest_path.display().to_string(),
+            metadata: self.manifest.custom_metadata(),
+            authors,
+            categories,
+            keywords,
+            readme,
+            repository,
+            edition: &self.manifest.edition().to_string(),
+            metabuild: self.manifest.metabuild(),
+        }.serialize(s)
+    }
+}
+
+impl Package {
+    /// Create a package from a manifest and its location
+    pub fn new(manifest: Manifest, manifest_path: &Path) -> Package {
+        Package {
+            manifest,
+            manifest_path: manifest_path.to_path_buf(),
+        }
+    }
+
+    /// Get the manifest dependencies
+    pub fn dependencies(&self) -> &[Dependency] {
+        self.manifest.dependencies()
+    }
+    /// Get the manifest
+    pub fn manifest(&self) -> &Manifest {
+        &self.manifest
+    }
+    /// Get the path to the manifest
+    pub fn manifest_path(&self) -> &Path {
+        &self.manifest_path
+    }
+    /// Get the name of the package
+    pub fn name(&self) -> InternedString {
+        self.package_id().name()
+    }
+    /// Get the PackageId object for the package (fully defines a package)
+    pub fn package_id(&self) -> &PackageId {
+        self.manifest.package_id()
+    }
+    /// Get the root folder of the package
+    pub fn root(&self) -> &Path {
+        self.manifest_path.parent().unwrap()
+    }
+    /// Get the summary for the package
+    pub fn summary(&self) -> &Summary {
+        self.manifest.summary()
+    }
+    /// Get the targets specified in the manifest
+    pub fn targets(&self) -> &[Target] {
+        self.manifest.targets()
+    }
+    /// Get the current package version
+    pub fn version(&self) -> &Version {
+        self.package_id().version()
+    }
+    /// Get the package authors
+    pub fn authors(&self) -> &Vec<String> {
+        &self.manifest.metadata().authors
+    }
+    /// Whether the package is set to publish
+    pub fn publish(&self) -> &Option<Vec<String>> {
+        self.manifest.publish()
+    }
+
+    /// Whether the package uses a custom build script for any target
+    pub fn has_custom_build(&self) -> bool {
+        self.targets().iter().any(|t| t.is_custom_build())
+    }
+
+    pub fn find_closest_target(
+        &self,
+        target: &str,
+        is_expected_kind: fn(&Target) -> bool,
+    ) -> Option<&Target> {
+        let targets = self.targets();
+
+        let matches = targets
+            .iter()
+            .filter(|t| is_expected_kind(t))
+            .map(|t| (lev_distance(target, t.name()), t))
+            .filter(|&(d, _)| d < 4);
+        matches.min_by_key(|t| t.0).map(|t| t.1)
+    }
+
+    pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Package {
+        Package {
+            manifest: self.manifest.map_source(to_replace, replace_with),
+            manifest_path: self.manifest_path,
+        }
+    }
+
+    pub fn to_registry_toml(&self, config: &Config) -> CargoResult<String> {
+        let manifest = self.manifest().original().prepare_for_publish(config)?;
+        let toml = toml::to_string(&manifest)?;
+        Ok(format!(
+            "\
+             # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO\n\
+             #\n\
+             # When uploading crates to the registry Cargo will automatically\n\
+             # \"normalize\" Cargo.toml files for maximal compatibility\n\
+             # with all versions of Cargo and also rewrite `path` dependencies\n\
+             # to registry (e.g. crates.io) dependencies\n\
+             #\n\
+             # If you believe there's an error in this file please file an\n\
+             # issue against the rust-lang/cargo repository. If you're\n\
+             # editing this file be aware that the upstream Cargo.toml\n\
+             # will likely look very different (and much more reasonable)\n\
+             \n\
+             {}\
+             ",
+            toml
+        ))
+    }
+}
+
+impl fmt::Display for Package {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "{}", self.summary().package_id())
+    }
+}
+
+impl fmt::Debug for Package {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.debug_struct("Package")
+            .field("id", self.summary().package_id())
+            .field("..", &"..")
+            .finish()
+    }
+}
+
+impl PartialEq for Package {
+    fn eq(&self, other: &Package) -> bool {
+        self.package_id() == other.package_id()
+    }
+}
+
+impl Eq for Package {}
+
+impl hash::Hash for Package {
+    fn hash<H: hash::Hasher>(&self, into: &mut H) {
+        self.package_id().hash(into)
+    }
+}
+
+pub struct PackageSet<'cfg> {
+    packages: HashMap<PackageId, LazyCell<Package>>,
+    sources: RefCell<SourceMap<'cfg>>,
+    config: &'cfg Config,
+    multi: Multi,
+    downloading: Cell<bool>,
+    multiplexing: bool,
+}
+
+pub struct Downloads<'a, 'cfg: 'a> {
+    set: &'a PackageSet<'cfg>,
+    pending: HashMap<usize, (Download<'cfg>, EasyHandle)>,
+    pending_ids: HashSet<PackageId>,
+    results: Vec<(usize, Result<(), curl::Error>)>,
+    next: usize,
+    progress: RefCell<Option<Progress<'cfg>>>,
+    downloads_finished: usize,
+    downloaded_bytes: u64,
+    largest: (u64, String),
+    start: Instant,
+    success: bool,
+
+    /// Timeout management, both of timeout thresholds as well as whether or not
+    /// our connection has timed out (and accompanying message if it has).
+    ///
+    /// Note that timeout management is done manually here instead of in libcurl
+    /// because we want to apply timeouts to an entire batch of operations, not
+    /// any one particular single operatino
+    timeout: ops::HttpTimeout, // timeout configuration
+    updated_at: Cell<Instant>, // last time we received bytes
+    next_speed_check: Cell<Instant>, // if threshold isn't 0 by this time, error
+    next_speed_check_bytes_threshold: Cell<u64>, // decremented when we receive bytes
+}
+
+struct Download<'cfg> {
+    /// Token for this download, used as the key of the `Downloads::pending` map
+    /// and stored in `EasyHandle` as well.
+    token: usize,
+
+    /// Package that we're downloading
+    id: PackageId,
+
+    /// Actual downloaded data, updated throughout the lifetime of this download
+    data: RefCell<Vec<u8>>,
+
+    /// The URL that we're downloading from, cached here for error messages and
+    /// reenqueuing.
+    url: String,
+
+    /// A descriptive string to print when we've finished downloading this crate
+    descriptor: String,
+
+    /// Statistics updated from the progress callback in libcurl
+    total: Cell<u64>,
+    current: Cell<u64>,
+
+    /// The moment we started this transfer at
+    start: Instant,
+    timed_out: Cell<Option<String>>,
+
+    /// Logic used to track retrying this download if it's a spurious failure.
+    retry: Retry<'cfg>,
+}
+
+impl<'cfg> PackageSet<'cfg> {
+    pub fn new(
+        package_ids: &[PackageId],
+        sources: SourceMap<'cfg>,
+        config: &'cfg Config,
+    ) -> CargoResult<PackageSet<'cfg>> {
+        // We've enabled the `http2` feature of `curl` in Cargo, so treat
+        // failures here as fatal as it would indicate a build-time problem.
+        //
+        // Note that the multiplexing support is pretty new so we're having it
+        // off-by-default temporarily.
+        //
+        // Also note that pipelining is disabled as curl authors have indicated
+        // that it's buggy, and we've empirically seen that it's buggy with HTTP
+        // proxies.
+        let mut multi = Multi::new();
+        let multiplexing = config.get::<Option<bool>>("http.multiplexing")?
+            .unwrap_or(false);
+        multi.pipelining(false, multiplexing)
+            .chain_err(|| "failed to enable multiplexing/pipelining in curl")?;
+
+        // let's not flood crates.io with connections
+        multi.set_max_host_connections(2)?;
+
+        Ok(PackageSet {
+            packages: package_ids
+                .iter()
+                .map(|id| (id.clone(), LazyCell::new()))
+                .collect(),
+            sources: RefCell::new(sources),
+            config,
+            multi,
+            downloading: Cell::new(false),
+            multiplexing,
+        })
+    }
+
+    pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item = &'a PackageId> + 'a> {
+        Box::new(self.packages.keys())
+    }
+
+    pub fn enable_download<'a>(&'a self) -> CargoResult<Downloads<'a, 'cfg>> {
+        assert!(!self.downloading.replace(true));
+        let timeout = ops::HttpTimeout::new(self.config)?;
+        Ok(Downloads {
+            start: Instant::now(),
+            set: self,
+            next: 0,
+            pending: HashMap::new(),
+            pending_ids: HashSet::new(),
+            results: Vec::new(),
+            progress: RefCell::new(Some(Progress::with_style(
+                "Downloading",
+                ProgressStyle::Ratio,
+                self.config,
+            ))),
+            downloads_finished: 0,
+            downloaded_bytes: 0,
+            largest: (0, String::new()),
+            success: false,
+            updated_at: Cell::new(Instant::now()),
+            timeout,
+            next_speed_check: Cell::new(Instant::now()),
+            next_speed_check_bytes_threshold: Cell::new(0),
+        })
+    }
+
+    pub fn get_one(&self, id: &PackageId) -> CargoResult<&Package> {
+        Ok(self.get_many(Some(id))?.remove(0))
+    }
+
+    pub fn get_many<'a>(&self, ids: impl IntoIterator<Item = &'a PackageId>)
+        -> CargoResult<Vec<&Package>>
+    {
+        let mut pkgs = Vec::new();
+        let mut downloads = self.enable_download()?;
+        for id in ids {
+            pkgs.extend(downloads.start(id)?);
+        }
+        while downloads.remaining() > 0 {
+            pkgs.push(downloads.wait()?);
+        }
+        downloads.success = true;
+        Ok(pkgs)
+    }
+
+    pub fn sources(&self) -> Ref<SourceMap<'cfg>> {
+        self.sources.borrow()
+    }
+}
+
+impl<'a, 'cfg> Downloads<'a, 'cfg> {
+    /// Starts to download the package for the `id` specified.
+    ///
+    /// Returns `None` if the package is queued up for download and will
+    /// eventually be returned from `wait_for_download`. Returns `Some(pkg)` if
+    /// the package is ready and doesn't need to be downloaded.
+    pub fn start(&mut self, id: &PackageId) -> CargoResult<Option<&'a Package>> {
+        // First up see if we've already cached this package, in which case
+        // there's nothing to do.
+        let slot = self.set.packages
+            .get(id)
+            .ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?;
+        if let Some(pkg) = slot.borrow() {
+            return Ok(Some(pkg));
+        }
+
+        // Ask the original source fo this `PackageId` for the corresponding
+        // package. That may immediately come back and tell us that the package
+        // is ready, or it could tell us that it needs to be downloaded.
+        let mut sources = self.set.sources.borrow_mut();
+        let source = sources
+            .get_mut(id.source_id())
+            .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
+        let pkg = source
+            .download(id)
+            .chain_err(|| format_err!("unable to get packages from source"))?;
+        let (url, descriptor) = match pkg {
+            MaybePackage::Ready(pkg) => {
+                debug!("{} doesn't need a download", id);
+                assert!(slot.fill(pkg).is_ok());
+                return Ok(Some(slot.borrow().unwrap()))
+            }
+            MaybePackage::Download { url, descriptor } => (url, descriptor),
+        };
+
+        // Ok we're going to download this crate, so let's set up all our
+        // internal state and hand off an `Easy` handle to our libcurl `Multi`
+        // handle. This won't actually start the transfer, but later it'll
+        // hapen during `wait_for_download`
+        let token = self.next;
+        self.next += 1;
+        debug!("downloading {} as {}", id, token);
+        assert!(self.pending_ids.insert(id.clone()));
+
+        let (mut handle, _timeout) = ops::http_handle_and_timeout(self.set.config)?;
+        handle.get(true)?;
+        handle.url(&url)?;
+        handle.follow_location(true)?; // follow redirects
+
+        // Enable HTTP/2 to be used as it'll allow true multiplexing which makes
+        // downloads much faster. Currently Cargo requests the `http2` feature
+        // of the `curl` crate which means it should always be built in, so
+        // treat it as a fatal error of http/2 support isn't found.
+        if self.set.multiplexing {
+            handle.http_version(HttpVersion::V2)
+                .chain_err(|| "failed to enable HTTP2, is curl not built right?")?;
+        }
+
+        // This is an option to `libcurl` which indicates that if there's a
+        // bunch of parallel requests to the same host they all wait until the
+        // pipelining status of the host is known. This means that we won't
+        // initiate dozens of connections to crates.io, but rather only one.
+        // Once the main one is opened we realized that pipelining is possible
+        // and multiplexing is possible with static.crates.io. All in all this
+        // reduces the number of connections done to a more manageable state.
+        handle.pipewait(true)?;
+
+        handle.write_function(move |buf| {
+            debug!("{} - {} bytes of data", token, buf.len());
+            tls::with(|downloads| {
+                if let Some(downloads) = downloads {
+                    downloads.pending[&token].0.data
+                        .borrow_mut()
+                        .extend_from_slice(buf);
+                }
+            });
+            Ok(buf.len())
+        })?;
+
+        handle.progress(true)?;
+        handle.progress_function(move |dl_total, dl_cur, _, _| {
+            tls::with(|downloads| {
+                match downloads {
+                    Some(d) => d.progress(token, dl_total as u64, dl_cur as u64),
+                    None => false,
+                }
+            })
+        })?;
+
+        // If the progress bar isn't enabled then it may be awhile before the
+        // first crate finishes downloading so we inform immediately that we're
+        // downloading crates here.
+        if self.downloads_finished == 0 &&
+            self.pending.len() == 0 &&
+            !self.progress.borrow().as_ref().unwrap().is_enabled()
+        {
+            self.set.config.shell().status("Downloading", "crates ...")?;
+        }
+
+        let dl = Download {
+            token,
+            data: RefCell::new(Vec::new()),
+            id: id.clone(),
+            url,
+            descriptor,
+            total: Cell::new(0),
+            current: Cell::new(0),
+            start: Instant::now(),
+            timed_out: Cell::new(None),
+            retry: Retry::new(self.set.config)?,
+        };
+        self.enqueue(dl, handle)?;
+        self.tick(WhyTick::DownloadStarted)?;
+
+        Ok(None)
+    }
+
+    /// Returns the number of crates that are still downloading
+    pub fn remaining(&self) -> usize {
+        self.pending.len()
+    }
+
+    /// Blocks the current thread waiting for a package to finish downloading.
+    ///
+    /// This method will wait for a previously enqueued package to finish
+    /// downloading and return a reference to it after it's done downloading.
+    ///
+    /// # Panics
+    ///
+    /// This function will panic if there are no remaining downloads.
+    pub fn wait(&mut self) -> CargoResult<&'a Package> {
+        let (dl, data) = loop {
+            assert_eq!(self.pending.len(), self.pending_ids.len());
+            let (token, result) = self.wait_for_curl()?;
+            debug!("{} finished with {:?}", token, result);
+
+            let (mut dl, handle) = self.pending.remove(&token)
+                .expect("got a token for a non-in-progress transfer");
+            let data = mem::replace(&mut *dl.data.borrow_mut(), Vec::new());
+            let mut handle = self.set.multi.remove(handle)?;
+            self.pending_ids.remove(&dl.id);
+
+            // Check if this was a spurious error. If it was a spurious error
+            // then we want to re-enqueue our request for another attempt and
+            // then we wait for another request to finish.
+            let ret = {
+                let timed_out = &dl.timed_out;
+                let url = &dl.url;
+                dl.retry.try(|| {
+                    if let Err(e) = result {
+                        // If this error is "aborted by callback" then that's
+                        // probably because our progress callback aborted due to
+                        // a timeout. We'll find out by looking at the
+                        // `timed_out` field, looking for a descriptive message.
+                        // If one is found we switch the error code (to ensure
+                        // it's flagged as spurious) and then attach our extra
+                        // information to the error.
+                        if !e.is_aborted_by_callback() {
+                            return Err(e.into())
+                        }
+
+                        return Err(match timed_out.replace(None) {
+                            Some(msg) => {
+                                let code = curl_sys::CURLE_OPERATION_TIMEDOUT;
+                                let mut err = curl::Error::new(code);
+                                err.set_extra(msg);
+                                err
+                            }
+                            None => e,
+                        }.into())
+                    }
+
+                    let code = handle.response_code()?;
+                    if code != 200 && code != 0 {
+                        let url = handle.effective_url()?.unwrap_or(url);
+                        return Err(HttpNot200 {
+                            code,
+                            url: url.to_string(),
+                        }.into())
+                    }
+                    Ok(())
+                }).chain_err(|| {
+                    format!("failed to download from `{}`", dl.url)
+                })?
+            };
+            match ret {
+                Some(()) => break (dl, data),
+                None => {
+                    self.pending_ids.insert(dl.id.clone());
+                    self.enqueue(dl, handle)?
+                }
+            }
+        };
+
+        // If the progress bar isn't enabled then we still want to provide some
+        // semblance of progress of how we're downloading crates, and if the
+        // progress bar is enabled this provides a good log of what's happening.
+        self.progress.borrow_mut().as_mut().unwrap().clear();
+        self.set.config.shell().status("Downloaded", &dl.descriptor)?;
+
+        self.downloads_finished += 1;
+        self.downloaded_bytes += dl.total.get();
+        if dl.total.get() > self.largest.0 {
+            self.largest = (dl.total.get(), dl.id.name().to_string());
+        }
+
+        // We're about to synchronously extract the crate below. While we're
+        // doing that our download progress won't actually be updated, nor do we
+        // have a great view into the progress of the extraction. Let's prepare
+        // the user for this CPU-heavy step if it looks like it'll take some
+        // time to do so.
+        if dl.total.get() < ByteSize::kb(400).0 {
+            self.tick(WhyTick::DownloadFinished)?;
+        } else {
+            self.tick(WhyTick::Extracting(&dl.id.name()))?;
+        }
+
+        // Inform the original source that the download is finished which
+        // should allow us to actually get the package and fill it in now.
+        let mut sources = self.set.sources.borrow_mut();
+        let source = sources
+            .get_mut(dl.id.source_id())
+            .ok_or_else(|| internal(format!("couldn't find source for `{}`", dl.id)))?;
+        let start = Instant::now();
+        let pkg = source.finish_download(&dl.id, data)?;
+
+        // Assume that no time has passed while we were calling
+        // `finish_download`, update all speed checks and timeout limits of all
+        // active downloads to make sure they don't fire because of a slowly
+        // extracted tarball.
+        let finish_dur = start.elapsed();
+        self.updated_at.set(self.updated_at.get() + finish_dur);
+        self.next_speed_check.set(self.next_speed_check.get() + finish_dur);
+
+        let slot = &self.set.packages[&dl.id];
+        assert!(slot.fill(pkg).is_ok());
+        Ok(slot.borrow().unwrap())
+    }
+
+    fn enqueue(&mut self, dl: Download<'cfg>, handle: Easy) -> CargoResult<()> {
+        let mut handle = self.set.multi.add(handle)?;
+        let now = Instant::now();
+        handle.set_token(dl.token)?;
+        self.updated_at.set(now);
+        self.next_speed_check.set(now + self.timeout.dur);
+        self.next_speed_check_bytes_threshold.set(self.timeout.low_speed_limit as u64);
+        dl.timed_out.set(None);
+        dl.current.set(0);
+        dl.total.set(0);
+        self.pending.insert(dl.token, (dl, handle));
+        Ok(())
+    }
+
+    fn wait_for_curl(&mut self) -> CargoResult<(usize, Result<(), curl::Error>)> {
+        // This is the main workhorse loop. We use libcurl's portable `wait`
+        // method to actually perform blocking. This isn't necessarily too
+        // efficient in terms of fd management, but we should only be juggling
+        // a few anyway.
+        //
+        // Here we start off by asking the `multi` handle to do some work via
+        // the `perform` method. This will actually do I/O work (nonblocking)
+        // and attempt to make progress. Afterwards we ask about the `messages`
+        // contained in the handle which will inform us if anything has finished
+        // transferring.
+        //
+        // If we've got a finished transfer after all that work we break out
+        // and process the finished transfer at the end. Otherwise we need to
+        // actually block waiting for I/O to happen, which we achieve with the
+        // `wait` method on `multi`.
+        loop {
+            let n = tls::set(self, || {
+                self.set.multi.perform()
+                    .chain_err(|| "failed to perform http requests")
+            })?;
+            debug!("handles remaining: {}", n);
+            let results = &mut self.results;
+            let pending = &self.pending;
+            self.set.multi.messages(|msg| {
+                let token = msg.token().expect("failed to read token");
+                let handle = &pending[&token].1;
+                if let Some(result) = msg.result_for(&handle) {
+                    results.push((token, result));
+                } else {
+                    debug!("message without a result (?)");
+                }
+            });
+
+            if let Some(pair) = results.pop() {
+                break Ok(pair)
+            }
+            assert!(self.pending.len() > 0);
+            let timeout = self.set.multi.get_timeout()?
+                .unwrap_or(Duration::new(5, 0));
+            self.set.multi.wait(&mut [], timeout)
+                .chain_err(|| "failed to wait on curl `Multi`")?;
+        }
+    }
+
+    fn progress(&self, token: usize, total: u64, cur: u64) -> bool {
+        let dl = &self.pending[&token].0;
+        dl.total.set(total);
+        let now = Instant::now();
+        if cur != dl.current.get() {
+            let delta = cur - dl.current.get();
+            let threshold = self.next_speed_check_bytes_threshold.get();
+
+            dl.current.set(cur);
+            self.updated_at.set(now);
+
+            if delta >= threshold {
+                self.next_speed_check.set(now + self.timeout.dur);
+                self.next_speed_check_bytes_threshold.set(
+                    self.timeout.low_speed_limit as u64,
+                );
+            } else {
+                self.next_speed_check_bytes_threshold.set(threshold - delta);
+            }
+        }
+        if !self.tick(WhyTick::DownloadUpdate).is_ok() {
+            return false
+        }
+
+        // If we've spent too long not actually receiving any data we time out.
+        if now - self.updated_at.get() > self.timeout.dur {
+            self.updated_at.set(now);
+            let msg = format!("failed to download any data for `{}` within {}s",
+                              dl.id,
+                              self.timeout.dur.as_secs());
+            dl.timed_out.set(Some(msg));
+            return false
+        }
+
+        // If we reached the point in time that we need to check our speed
+        // limit, see if we've transferred enough data during this threshold. If
+        // it fails this check then we fail because the download is going too
+        // slowly.
+        if now >= self.next_speed_check.get() {
+            self.next_speed_check.set(now + self.timeout.dur);
+            assert!(self.next_speed_check_bytes_threshold.get() > 0);
+            let msg = format!("download of `{}` failed to transfer more \
+                               than {} bytes in {}s",
+                              dl.id,
+                              self.timeout.low_speed_limit,
+                              self.timeout.dur.as_secs());
+            dl.timed_out.set(Some(msg));
+            return false
+        }
+
+        true
+    }
+
+    fn tick(&self, why: WhyTick) -> CargoResult<()> {
+        let mut progress = self.progress.borrow_mut();
+        let progress = progress.as_mut().unwrap();
+
+        if let WhyTick::DownloadUpdate = why {
+            if !progress.update_allowed() {
+                return Ok(())
+            }
+        }
+        let mut msg = format!("{} crates", self.pending.len());
+        match why {
+            WhyTick::Extracting(krate) => {
+                msg.push_str(&format!(", extracting {} ...", krate));
+            }
+            _ => {
+                let mut dur = Duration::new(0, 0);
+                let mut remaining = 0;
+                for (dl, _) in self.pending.values() {
+                    dur += dl.start.elapsed();
+                    // If the total/current look weird just throw out the data
+                    // point, sounds like curl has more to learn before we have
+                    // the true information.
+                    if dl.total.get() >= dl.current.get() {
+                        remaining += dl.total.get() - dl.current.get();
+                    }
+                }
+                if remaining > 0 && dur > Duration::from_millis(500) {
+                    msg.push_str(&format!(", remaining bytes: {}", ByteSize(remaining)));
+                }
+            }
+        }
+        progress.print_now(&msg)
+    }
+}
+
+enum WhyTick<'a> {
+    DownloadStarted,
+    DownloadUpdate,
+    DownloadFinished,
+    Extracting(&'a str),
+}
+
+impl<'a, 'cfg> Drop for Downloads<'a, 'cfg> {
+    fn drop(&mut self) {
+        self.set.downloading.set(false);
+        let progress = self.progress.get_mut().take().unwrap();
+        // Don't print a download summary if we're not using a progress bar,
+        // we've already printed lots of `Downloading...` items.
+        if !progress.is_enabled() {
+            return
+        }
+        // If we didn't download anything, no need for a summary
+        if self.downloads_finished == 0 {
+            return
+        }
+        // If an error happened, let's not clutter up the output
+        if !self.success {
+            return
+        }
+        let mut status = format!("{} crates ({}) in {}",
+                                 self.downloads_finished,
+                                 ByteSize(self.downloaded_bytes),
+                                 util::elapsed(self.start.elapsed()));
+        if self.largest.0 > ByteSize::mb(1).0 {
+            status.push_str(&format!(
+                " (largest was `{}` at {})",
+                self.largest.1,
+                ByteSize(self.largest.0),
+            ));
+        }
+        // Clear progress before displaying final summary.
+        drop(progress);
+        drop(self.set.config.shell().status("Downloaded", status));
+    }
+}
+
+mod tls {
+    use std::cell::Cell;
+
+    use super::Downloads;
+
+    thread_local!(static PTR: Cell<usize> = Cell::new(0));
+
+    pub(crate) fn with<R>(f: impl FnOnce(Option<&Downloads>) -> R) -> R {
+        let ptr = PTR.with(|p| p.get());
+        if ptr == 0 {
+            f(None)
+        } else {
+            unsafe {
+                f(Some(&*(ptr as *const Downloads)))
+            }
+        }
+    }
+
+    pub(crate) fn set<R>(dl: &Downloads, f: impl FnOnce() -> R) -> R {
+        struct Reset<'a, T: Copy + 'a>(&'a Cell<T>, T);
+
+        impl<'a, T: Copy> Drop for Reset<'a, T> {
+            fn drop(&mut self) {
+                self.0.set(self.1);
+            }
+        }
+
+        PTR.with(|p| {
+            let _reset = Reset(p, p.get());
+            p.set(dl as *const Downloads as usize);
+            f()
+        })
+    }
+}
diff --git a/src/cargo/core/package_id.rs b/src/cargo/core/package_id.rs
new file mode 100644 (file)
index 0000000..568bef5
--- /dev/null
@@ -0,0 +1,198 @@
+use std::cmp::Ordering;
+use std::fmt::{self, Formatter};
+use std::hash::Hash;
+use std::hash;
+use std::path::Path;
+use std::sync::Arc;
+
+use semver;
+use serde::de;
+use serde::ser;
+
+use util::{CargoResult, ToSemver};
+use core::source::SourceId;
+use core::interning::InternedString;
+
+/// Identifier for a specific version of a package in a specific source.
+#[derive(Clone)]
+pub struct PackageId {
+    inner: Arc<PackageIdInner>,
+}
+
+#[derive(PartialEq, PartialOrd, Eq, Ord)]
+struct PackageIdInner {
+    name: InternedString,
+    version: semver::Version,
+    source_id: SourceId,
+}
+
+impl ser::Serialize for PackageId {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        s.collect_str(&format_args!(
+            "{} {} ({})",
+            self.inner.name,
+            self.inner.version,
+            self.inner.source_id.to_url()
+        ))
+    }
+}
+
+impl<'de> de::Deserialize<'de> for PackageId {
+    fn deserialize<D>(d: D) -> Result<PackageId, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        let string = String::deserialize(d)?;
+        let mut s = string.splitn(3, ' ');
+        let name = s.next().unwrap();
+        let version = match s.next() {
+            Some(s) => s,
+            None => return Err(de::Error::custom("invalid serialized PackageId")),
+        };
+        let version = semver::Version::parse(version).map_err(de::Error::custom)?;
+        let url = match s.next() {
+            Some(s) => s,
+            None => return Err(de::Error::custom("invalid serialized PackageId")),
+        };
+        let url = if url.starts_with('(') && url.ends_with(')') {
+            &url[1..url.len() - 1]
+        } else {
+            return Err(de::Error::custom("invalid serialized PackageId"));
+        };
+        let source_id = SourceId::from_url(url).map_err(de::Error::custom)?;
+
+        Ok(PackageId {
+            inner: Arc::new(PackageIdInner {
+                name: InternedString::new(name),
+                version,
+                source_id,
+            }),
+        })
+    }
+}
+
+impl Hash for PackageId {
+    fn hash<S: hash::Hasher>(&self, state: &mut S) {
+        self.inner.name.hash(state);
+        self.inner.version.hash(state);
+        self.inner.source_id.hash(state);
+    }
+}
+
+impl PartialEq for PackageId {
+    fn eq(&self, other: &PackageId) -> bool {
+        (*self.inner).eq(&*other.inner)
+    }
+}
+impl PartialOrd for PackageId {
+    fn partial_cmp(&self, other: &PackageId) -> Option<Ordering> {
+        (*self.inner).partial_cmp(&*other.inner)
+    }
+}
+impl Eq for PackageId {}
+impl Ord for PackageId {
+    fn cmp(&self, other: &PackageId) -> Ordering {
+        (*self.inner).cmp(&*other.inner)
+    }
+}
+
+impl PackageId {
+    pub fn new<T: ToSemver>(name: &str, version: T, sid: &SourceId) -> CargoResult<PackageId> {
+        let v = version.to_semver()?;
+        Ok(PackageId {
+            inner: Arc::new(PackageIdInner {
+                name: InternedString::new(name),
+                version: v,
+                source_id: sid.clone(),
+            }),
+        })
+    }
+
+    pub fn name(&self) -> InternedString {
+        self.inner.name
+    }
+    pub fn version(&self) -> &semver::Version {
+        &self.inner.version
+    }
+    pub fn source_id(&self) -> &SourceId {
+        &self.inner.source_id
+    }
+
+    pub fn with_precise(&self, precise: Option<String>) -> PackageId {
+        PackageId {
+            inner: Arc::new(PackageIdInner {
+                name: self.inner.name,
+                version: self.inner.version.clone(),
+                source_id: self.inner.source_id.with_precise(precise),
+            }),
+        }
+    }
+
+    pub fn with_source_id(&self, source: &SourceId) -> PackageId {
+        PackageId {
+            inner: Arc::new(PackageIdInner {
+                name: self.inner.name,
+                version: self.inner.version.clone(),
+                source_id: source.clone(),
+            }),
+        }
+    }
+
+    pub fn stable_hash<'a>(&'a self, workspace: &'a Path) -> PackageIdStableHash<'a> {
+        PackageIdStableHash(self, workspace)
+    }
+}
+
+pub struct PackageIdStableHash<'a>(&'a PackageId, &'a Path);
+
+impl<'a> Hash for PackageIdStableHash<'a> {
+    fn hash<S: hash::Hasher>(&self, state: &mut S) {
+        self.0.inner.name.hash(state);
+        self.0.inner.version.hash(state);
+        self.0.inner.source_id.stable_hash(self.1, state);
+    }
+}
+
+impl fmt::Display for PackageId {
+    fn fmt(&self, f: &mut Formatter) -> fmt::Result {
+        write!(f, "{} v{}", self.inner.name, self.inner.version)?;
+
+        if !self.inner.source_id.is_default_registry() {
+            write!(f, " ({})", self.inner.source_id)?;
+        }
+
+        Ok(())
+    }
+}
+
+impl fmt::Debug for PackageId {
+    fn fmt(&self, f: &mut Formatter) -> fmt::Result {
+        f.debug_struct("PackageId")
+            .field("name", &self.inner.name)
+            .field("version", &self.inner.version.to_string())
+            .field("source", &self.inner.source_id.to_string())
+            .finish()
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::PackageId;
+    use core::source::SourceId;
+    use sources::CRATES_IO_INDEX;
+    use util::ToUrl;
+
+    #[test]
+    fn invalid_version_handled_nicely() {
+        let loc = CRATES_IO_INDEX.to_url().unwrap();
+        let repo = SourceId::for_registry(&loc).unwrap();
+
+        assert!(PackageId::new("foo", "1.0", &repo).is_err());
+        assert!(PackageId::new("foo", "1", &repo).is_err());
+        assert!(PackageId::new("foo", "bar", &repo).is_err());
+        assert!(PackageId::new("foo", "", &repo).is_err());
+    }
+}
diff --git a/src/cargo/core/package_id_spec.rs b/src/cargo/core/package_id_spec.rs
new file mode 100644 (file)
index 0000000..8acc654
--- /dev/null
@@ -0,0 +1,378 @@
+use std::collections::HashMap;
+use std::fmt;
+
+use semver::Version;
+use serde::{de, ser};
+use url::Url;
+
+use core::PackageId;
+use util::{ToSemver, ToUrl};
+use util::errors::{CargoResult, CargoResultExt};
+
+/// Some or all of the data required to identify a package:
+///
+///  1. the package name (a `String`, required)
+///  2. the package version (a `Version`, optional)
+///  3. the package source (a `Url`, optional)
+///
+/// If any of the optional fields are omitted, then the package id may be ambiguous, there may be
+/// more than one package/version/url combo that will match. However, often just the name is
+/// sufficient to uniquely define a package id.
+#[derive(Clone, PartialEq, Eq, Debug, Hash, Ord, PartialOrd)]
+pub struct PackageIdSpec {
+    name: String,
+    version: Option<Version>,
+    url: Option<Url>,
+}
+
+impl PackageIdSpec {
+    /// Parses a spec string and returns a `PackageIdSpec` if the string was valid.
+    ///
+    /// # Examples
+    /// Some examples of valid strings
+    ///
+    /// ```
+    /// use cargo::core::PackageIdSpec;
+    ///
+    /// let specs = vec![
+    ///     "http://crates.io/foo#1.2.3",
+    ///     "http://crates.io/foo#bar:1.2.3",
+    ///     "crates.io/foo",
+    ///     "crates.io/foo#1.2.3",
+    ///     "crates.io/foo#bar",
+    ///     "crates.io/foo#bar:1.2.3",
+    ///     "foo",
+    ///     "foo:1.2.3",
+    /// ];
+    /// for spec in specs {
+    ///     assert!(PackageIdSpec::parse(spec).is_ok());
+    /// }
+    pub fn parse(spec: &str) -> CargoResult<PackageIdSpec> {
+        if spec.contains('/') {
+            if let Ok(url) = spec.to_url() {
+                return PackageIdSpec::from_url(url);
+            }
+            if !spec.contains("://") {
+                if let Ok(url) = Url::parse(&format!("cargo://{}", spec)) {
+                    return PackageIdSpec::from_url(url);
+                }
+            }
+        }
+        let mut parts = spec.splitn(2, ':');
+        let name = parts.next().unwrap();
+        let version = match parts.next() {
+            Some(version) => Some(Version::parse(version)?),
+            None => None,
+        };
+        for ch in name.chars() {
+            if !ch.is_alphanumeric() && ch != '_' && ch != '-' {
+                bail!("invalid character in pkgid `{}`: `{}`", spec, ch)
+            }
+        }
+        Ok(PackageIdSpec {
+            name: name.to_string(),
+            version,
+            url: None,
+        })
+    }
+
+    /// Roughly equivalent to `PackageIdSpec::parse(spec)?.query(i)`
+    pub fn query_str<'a, I>(spec: &str, i: I) -> CargoResult<&'a PackageId>
+    where
+        I: IntoIterator<Item = &'a PackageId>,
+    {
+        let spec = PackageIdSpec::parse(spec)
+            .chain_err(|| format_err!("invalid package id specification: `{}`", spec))?;
+        spec.query(i)
+    }
+
+    /// Convert a `PackageId` to a `PackageIdSpec`, which will have both the `Version` and `Url`
+    /// fields filled in.
+    pub fn from_package_id(package_id: &PackageId) -> PackageIdSpec {
+        PackageIdSpec {
+            name: package_id.name().to_string(),
+            version: Some(package_id.version().clone()),
+            url: Some(package_id.source_id().url().clone()),
+        }
+    }
+
+    /// Tries to convert a valid `Url` to a `PackageIdSpec`.
+    fn from_url(mut url: Url) -> CargoResult<PackageIdSpec> {
+        if url.query().is_some() {
+            bail!("cannot have a query string in a pkgid: {}", url)
+        }
+        let frag = url.fragment().map(|s| s.to_owned());
+        url.set_fragment(None);
+        let (name, version) = {
+            let mut path = url.path_segments()
+                .ok_or_else(|| format_err!("pkgid urls must have a path: {}", url))?;
+            let path_name = path.next_back().ok_or_else(|| {
+                format_err!(
+                    "pkgid urls must have at least one path \
+                     component: {}",
+                    url
+                )
+            })?;
+            match frag {
+                Some(fragment) => {
+                    let mut parts = fragment.splitn(2, ':');
+                    let name_or_version = parts.next().unwrap();
+                    match parts.next() {
+                        Some(part) => {
+                            let version = part.to_semver()?;
+                            (name_or_version.to_string(), Some(version))
+                        }
+                        None => {
+                            if name_or_version.chars().next().unwrap().is_alphabetic() {
+                                (name_or_version.to_string(), None)
+                            } else {
+                                let version = name_or_version.to_semver()?;
+                                (path_name.to_string(), Some(version))
+                            }
+                        }
+                    }
+                }
+                None => (path_name.to_string(), None),
+            }
+        };
+        Ok(PackageIdSpec {
+            name,
+            version,
+            url: Some(url),
+        })
+    }
+
+    pub fn name(&self) -> &str {
+        &self.name
+    }
+
+    pub fn version(&self) -> Option<&Version> {
+        self.version.as_ref()
+    }
+
+    pub fn url(&self) -> Option<&Url> {
+        self.url.as_ref()
+    }
+
+    pub fn set_url(&mut self, url: Url) {
+        self.url = Some(url);
+    }
+
+    /// Checks whether the given `PackageId` matches the `PackageIdSpec`.
+    pub fn matches(&self, package_id: &PackageId) -> bool {
+        if self.name() != &*package_id.name() {
+            return false;
+        }
+
+        if let Some(ref v) = self.version {
+            if v != package_id.version() {
+                return false;
+            }
+        }
+
+        match self.url {
+            Some(ref u) => u == package_id.source_id().url(),
+            None => true,
+        }
+    }
+
+    /// Checks a list of `PackageId`s to find 1 that matches this `PackageIdSpec`. If 0, 2, or
+    /// more are found, then this returns an error.
+    pub fn query<'a, I>(&self, i: I) -> CargoResult<&'a PackageId>
+    where
+        I: IntoIterator<Item = &'a PackageId>,
+    {
+        let mut ids = i.into_iter().filter(|p| self.matches(*p));
+        let ret = match ids.next() {
+            Some(id) => id,
+            None => bail!(
+                "package id specification `{}` \
+                 matched no packages",
+                self
+            ),
+        };
+        return match ids.next() {
+            Some(other) => {
+                let mut msg = format!(
+                    "There are multiple `{}` packages in \
+                     your project, and the specification \
+                     `{}` is ambiguous.\n\
+                     Please re-run this command \
+                     with `-p <spec>` where `<spec>` is one \
+                     of the following:",
+                    self.name(),
+                    self
+                );
+                let mut vec = vec![ret, other];
+                vec.extend(ids);
+                minimize(&mut msg, &vec, self);
+                Err(format_err!("{}", msg))
+            }
+            None => Ok(ret),
+        };
+
+        fn minimize(msg: &mut String, ids: &[&PackageId], spec: &PackageIdSpec) {
+            let mut version_cnt = HashMap::new();
+            for id in ids {
+                *version_cnt.entry(id.version()).or_insert(0) += 1;
+            }
+            for id in ids {
+                if version_cnt[id.version()] == 1 {
+                    msg.push_str(&format!("\n  {}:{}", spec.name(), id.version()));
+                } else {
+                    msg.push_str(&format!("\n  {}", PackageIdSpec::from_package_id(*id)));
+                }
+            }
+        }
+    }
+}
+
+impl fmt::Display for PackageIdSpec {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        let mut printed_name = false;
+        match self.url {
+            Some(ref url) => {
+                if url.scheme() == "cargo" {
+                    write!(f, "{}{}", url.host().unwrap(), url.path())?;
+                } else {
+                    write!(f, "{}", url)?;
+                }
+                if url.path_segments().unwrap().next_back().unwrap() != self.name {
+                    printed_name = true;
+                    write!(f, "#{}", self.name)?;
+                }
+            }
+            None => {
+                printed_name = true;
+                write!(f, "{}", self.name)?
+            }
+        }
+        if let Some(ref v) = self.version {
+            write!(f, "{}{}", if printed_name { ":" } else { "#" }, v)?;
+        }
+        Ok(())
+    }
+}
+
+impl ser::Serialize for PackageIdSpec {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        self.to_string().serialize(s)
+    }
+}
+
+impl<'de> de::Deserialize<'de> for PackageIdSpec {
+    fn deserialize<D>(d: D) -> Result<PackageIdSpec, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        let string = String::deserialize(d)?;
+        PackageIdSpec::parse(&string).map_err(de::Error::custom)
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use core::{PackageId, SourceId};
+    use super::PackageIdSpec;
+    use url::Url;
+    use semver::Version;
+
+    #[test]
+    fn good_parsing() {
+        fn ok(spec: &str, expected: PackageIdSpec) {
+            let parsed = PackageIdSpec::parse(spec).unwrap();
+            assert_eq!(parsed, expected);
+            assert_eq!(parsed.to_string(), spec);
+        }
+
+        ok(
+            "http://crates.io/foo#1.2.3",
+            PackageIdSpec {
+                name: "foo".to_string(),
+                version: Some(Version::parse("1.2.3").unwrap()),
+                url: Some(Url::parse("http://crates.io/foo").unwrap()),
+            },
+        );
+        ok(
+            "http://crates.io/foo#bar:1.2.3",
+            PackageIdSpec {
+                name: "bar".to_string(),
+                version: Some(Version::parse("1.2.3").unwrap()),
+                url: Some(Url::parse("http://crates.io/foo").unwrap()),
+            },
+        );
+        ok(
+            "crates.io/foo",
+            PackageIdSpec {
+                name: "foo".to_string(),
+                version: None,
+                url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
+            },
+        );
+        ok(
+            "crates.io/foo#1.2.3",
+            PackageIdSpec {
+                name: "foo".to_string(),
+                version: Some(Version::parse("1.2.3").unwrap()),
+                url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
+            },
+        );
+        ok(
+            "crates.io/foo#bar",
+            PackageIdSpec {
+                name: "bar".to_string(),
+                version: None,
+                url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
+            },
+        );
+        ok(
+            "crates.io/foo#bar:1.2.3",
+            PackageIdSpec {
+                name: "bar".to_string(),
+                version: Some(Version::parse("1.2.3").unwrap()),
+                url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
+            },
+        );
+        ok(
+            "foo",
+            PackageIdSpec {
+                name: "foo".to_string(),
+                version: None,
+                url: None,
+            },
+        );
+        ok(
+            "foo:1.2.3",
+            PackageIdSpec {
+                name: "foo".to_string(),
+                version: Some(Version::parse("1.2.3").unwrap()),
+                url: None,
+            },
+        );
+    }
+
+    #[test]
+    fn bad_parsing() {
+        assert!(PackageIdSpec::parse("baz:").is_err());
+        assert!(PackageIdSpec::parse("baz:*").is_err());
+        assert!(PackageIdSpec::parse("baz:1.0").is_err());
+        assert!(PackageIdSpec::parse("http://baz:1.0").is_err());
+        assert!(PackageIdSpec::parse("http://#baz:1.0").is_err());
+    }
+
+    #[test]
+    fn matching() {
+        let url = Url::parse("http://example.com").unwrap();
+        let sid = SourceId::for_registry(&url).unwrap();
+        let foo = PackageId::new("foo", "1.2.3", &sid).unwrap();
+        let bar = PackageId::new("bar", "1.2.3", &sid).unwrap();
+
+        assert!(PackageIdSpec::parse("foo").unwrap().matches(&foo));
+        assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar));
+        assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo));
+        assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(&foo));
+    }
+}
diff --git a/src/cargo/core/profiles.rs b/src/cargo/core/profiles.rs
new file mode 100644 (file)
index 0000000..6120007
--- /dev/null
@@ -0,0 +1,637 @@
+use std::collections::HashSet;
+use std::{cmp, fmt, hash};
+
+use core::compiler::CompileMode;
+use core::interning::InternedString;
+use core::{Features, PackageId, PackageIdSpec, PackageSet, Shell};
+use util::errors::CargoResultExt;
+use util::lev_distance::lev_distance;
+use util::toml::{ProfilePackageSpec, StringOrBool, TomlProfile, TomlProfiles, U32OrBool};
+use util::{CargoResult, Config};
+
+/// Collection of all user profiles.
+#[derive(Clone, Debug)]
+pub struct Profiles {
+    dev: ProfileMaker,
+    release: ProfileMaker,
+    test: ProfileMaker,
+    bench: ProfileMaker,
+    doc: ProfileMaker,
+}
+
+impl Profiles {
+    pub fn new(
+        profiles: Option<&TomlProfiles>,
+        config: &Config,
+        features: &Features,
+        warnings: &mut Vec<String>,
+    ) -> CargoResult<Profiles> {
+        if let Some(profiles) = profiles {
+            profiles.validate(features, warnings)?;
+        }
+
+        let config_profiles = config.profiles()?;
+        config_profiles.validate(features, warnings)?;
+
+        Ok(Profiles {
+            dev: ProfileMaker {
+                default: Profile::default_dev(),
+                toml: profiles.and_then(|p| p.dev.clone()),
+                config: config_profiles.dev.clone(),
+            },
+            release: ProfileMaker {
+                default: Profile::default_release(),
+                toml: profiles.and_then(|p| p.release.clone()),
+                config: config_profiles.release.clone(),
+            },
+            test: ProfileMaker {
+                default: Profile::default_test(),
+                toml: profiles.and_then(|p| p.test.clone()),
+                config: None,
+            },
+            bench: ProfileMaker {
+                default: Profile::default_bench(),
+                toml: profiles.and_then(|p| p.bench.clone()),
+                config: None,
+            },
+            doc: ProfileMaker {
+                default: Profile::default_doc(),
+                toml: profiles.and_then(|p| p.doc.clone()),
+                config: None,
+            },
+        })
+    }
+
+    /// Retrieve the profile for a target.
+    /// `is_member` is whether or not this package is a member of the
+    /// workspace.
+    pub fn get_profile(
+        &self,
+        pkg_id: &PackageId,
+        is_member: bool,
+        unit_for: UnitFor,
+        mode: CompileMode,
+        release: bool,
+    ) -> Profile {
+        let maker = match mode {
+            CompileMode::Test => {
+                if release {
+                    &self.bench
+                } else {
+                    &self.test
+                }
+            }
+            CompileMode::Build
+            | CompileMode::Check { .. }
+            | CompileMode::Doctest
+            | CompileMode::RunCustomBuild => {
+                // Note: RunCustomBuild doesn't normally use this code path.
+                // `build_unit_profiles` normally ensures that it selects the
+                // ancestor's profile.  However `cargo clean -p` can hit this
+                // path.
+                if release {
+                    &self.release
+                } else {
+                    &self.dev
+                }
+            }
+            CompileMode::Bench => &self.bench,
+            CompileMode::Doc { .. } => &self.doc,
+        };
+        let mut profile = maker.get_profile(Some(pkg_id), is_member, unit_for);
+        // `panic` should not be set for tests/benches, or any of their
+        // dependencies.
+        if !unit_for.is_panic_ok() || mode.is_any_test() {
+            profile.panic = None;
+        }
+        profile
+    }
+
+    /// The profile for *running* a `build.rs` script is only used for setting
+    /// a few environment variables.  To ensure proper de-duplication of the
+    /// running `Unit`, this uses a stripped-down profile (so that unrelated
+    /// profile flags don't cause `build.rs` to needlessly run multiple
+    /// times).
+    pub fn get_profile_run_custom_build(&self, for_unit_profile: &Profile) -> Profile {
+        let mut result = Profile::default();
+        result.debuginfo = for_unit_profile.debuginfo;
+        result.opt_level = for_unit_profile.opt_level;
+        result
+    }
+
+    /// This returns a generic base profile. This is currently used for the
+    /// `[Finished]` line.  It is not entirely accurate, since it doesn't
+    /// select for the package that was actually built.
+    pub fn base_profile(&self, release: bool) -> Profile {
+        if release {
+            self.release.get_profile(None, true, UnitFor::new_normal())
+        } else {
+            self.dev.get_profile(None, true, UnitFor::new_normal())
+        }
+    }
+
+    /// Used to check for overrides for non-existing packages.
+    pub fn validate_packages(&self, shell: &mut Shell, packages: &PackageSet) -> CargoResult<()> {
+        self.dev.validate_packages(shell, packages)?;
+        self.release.validate_packages(shell, packages)?;
+        self.test.validate_packages(shell, packages)?;
+        self.bench.validate_packages(shell, packages)?;
+        self.doc.validate_packages(shell, packages)?;
+        Ok(())
+    }
+}
+
+/// An object used for handling the profile override hierarchy.
+///
+/// The precedence of profiles are (first one wins):
+/// - Profiles in .cargo/config files (using same order as below).
+/// - [profile.dev.overrides.name] - A named package.
+/// - [profile.dev.overrides."*"] - This cannot apply to workspace members.
+/// - [profile.dev.build-override] - This can only apply to `build.rs` scripts
+///   and their dependencies.
+/// - [profile.dev]
+/// - Default (hard-coded) values.
+#[derive(Debug, Clone)]
+struct ProfileMaker {
+    /// The starting, hard-coded defaults for the profile.
+    default: Profile,
+    /// The profile from the `Cargo.toml` manifest.
+    toml: Option<TomlProfile>,
+    /// Profile loaded from `.cargo/config` files.
+    config: Option<TomlProfile>,
+}
+
+impl ProfileMaker {
+    fn get_profile(
+        &self,
+        pkg_id: Option<&PackageId>,
+        is_member: bool,
+        unit_for: UnitFor,
+    ) -> Profile {
+        let mut profile = self.default;
+        if let Some(ref toml) = self.toml {
+            merge_toml(pkg_id, is_member, unit_for, &mut profile, toml);
+        }
+        if let Some(ref toml) = self.config {
+            merge_toml(pkg_id, is_member, unit_for, &mut profile, toml);
+        }
+        profile
+    }
+
+    fn validate_packages(&self, shell: &mut Shell, packages: &PackageSet) -> CargoResult<()> {
+        self.validate_packages_toml(shell, packages, &self.toml, true)?;
+        self.validate_packages_toml(shell, packages, &self.config, false)?;
+        Ok(())
+    }
+
+    fn validate_packages_toml(
+        &self,
+        shell: &mut Shell,
+        packages: &PackageSet,
+        toml: &Option<TomlProfile>,
+        warn_unmatched: bool,
+    ) -> CargoResult<()> {
+        let toml = match *toml {
+            Some(ref toml) => toml,
+            None => return Ok(()),
+        };
+        let overrides = match toml.overrides {
+            Some(ref overrides) => overrides,
+            None => return Ok(()),
+        };
+        // Verify that a package doesn't match multiple spec overrides.
+        let mut found = HashSet::new();
+        for pkg_id in packages.package_ids() {
+            let matches: Vec<&PackageIdSpec> = overrides
+                .keys()
+                .filter_map(|key| match *key {
+                    ProfilePackageSpec::All => None,
+                    ProfilePackageSpec::Spec(ref spec) => if spec.matches(pkg_id) {
+                        Some(spec)
+                    } else {
+                        None
+                    },
+                })
+                .collect();
+            match matches.len() {
+                0 => {}
+                1 => {
+                    found.insert(matches[0].clone());
+                }
+                _ => {
+                    let specs = matches
+                        .iter()
+                        .map(|spec| spec.to_string())
+                        .collect::<Vec<_>>()
+                        .join(", ");
+                    bail!(
+                        "multiple profile overrides in profile `{}` match package `{}`\n\
+                         found profile override specs: {}",
+                        self.default.name,
+                        pkg_id,
+                        specs
+                    );
+                }
+            }
+        }
+
+        if !warn_unmatched {
+            return Ok(());
+        }
+        // Verify every override matches at least one package.
+        let missing_specs = overrides.keys().filter_map(|key| {
+            if let ProfilePackageSpec::Spec(ref spec) = *key {
+                if !found.contains(spec) {
+                    return Some(spec);
+                }
+            }
+            None
+        });
+        for spec in missing_specs {
+            // See if there is an exact name match.
+            let name_matches: Vec<String> = packages
+                .package_ids()
+                .filter_map(|pkg_id| {
+                    if pkg_id.name().as_str() == spec.name() {
+                        Some(pkg_id.to_string())
+                    } else {
+                        None
+                    }
+                })
+                .collect();
+            if name_matches.is_empty() {
+                let suggestion = packages
+                    .package_ids()
+                    .map(|p| (lev_distance(spec.name(), &p.name()), p.name()))
+                    .filter(|&(d, _)| d < 4)
+                    .min_by_key(|p| p.0)
+                    .map(|p| p.1);
+                match suggestion {
+                    Some(p) => shell.warn(format!(
+                        "profile override spec `{}` did not match any packages\n\n\
+                         Did you mean `{}`?",
+                        spec, p
+                    ))?,
+                    None => shell.warn(format!(
+                        "profile override spec `{}` did not match any packages",
+                        spec
+                    ))?,
+                }
+            } else {
+                shell.warn(format!(
+                    "version or URL in profile override spec `{}` does not \
+                     match any of the packages: {}",
+                    spec,
+                    name_matches.join(", ")
+                ))?;
+            }
+        }
+        Ok(())
+    }
+}
+
+fn merge_toml(
+    pkg_id: Option<&PackageId>,
+    is_member: bool,
+    unit_for: UnitFor,
+    profile: &mut Profile,
+    toml: &TomlProfile,
+) {
+    merge_profile(profile, toml);
+    if unit_for.is_custom_build() {
+        if let Some(ref build_override) = toml.build_override {
+            merge_profile(profile, build_override);
+        }
+    }
+    if let Some(ref overrides) = toml.overrides {
+        if !is_member {
+            if let Some(all) = overrides.get(&ProfilePackageSpec::All) {
+                merge_profile(profile, all);
+            }
+        }
+        if let Some(pkg_id) = pkg_id {
+            let mut matches = overrides
+                .iter()
+                .filter_map(|(key, spec_profile)| match *key {
+                    ProfilePackageSpec::All => None,
+                    ProfilePackageSpec::Spec(ref s) => if s.matches(pkg_id) {
+                        Some(spec_profile)
+                    } else {
+                        None
+                    },
+                });
+            if let Some(spec_profile) = matches.next() {
+                merge_profile(profile, spec_profile);
+                // `validate_packages` should ensure that there are
+                // no additional matches.
+                assert!(
+                    matches.next().is_none(),
+                    "package `{}` matched multiple profile overrides",
+                    pkg_id
+                );
+            }
+        }
+    }
+}
+
+fn merge_profile(profile: &mut Profile, toml: &TomlProfile) {
+    if let Some(ref opt_level) = toml.opt_level {
+        profile.opt_level = InternedString::new(&opt_level.0);
+    }
+    match toml.lto {
+        Some(StringOrBool::Bool(b)) => profile.lto = Lto::Bool(b),
+        Some(StringOrBool::String(ref n)) => profile.lto = Lto::Named(InternedString::new(n)),
+        None => {}
+    }
+    if toml.codegen_units.is_some() {
+        profile.codegen_units = toml.codegen_units;
+    }
+    match toml.debug {
+        Some(U32OrBool::U32(debug)) => profile.debuginfo = Some(debug),
+        Some(U32OrBool::Bool(true)) => profile.debuginfo = Some(2),
+        Some(U32OrBool::Bool(false)) => profile.debuginfo = None,
+        None => {}
+    }
+    if let Some(debug_assertions) = toml.debug_assertions {
+        profile.debug_assertions = debug_assertions;
+    }
+    if let Some(rpath) = toml.rpath {
+        profile.rpath = rpath;
+    }
+    if let Some(ref panic) = toml.panic {
+        profile.panic = Some(InternedString::new(panic));
+    }
+    if let Some(overflow_checks) = toml.overflow_checks {
+        profile.overflow_checks = overflow_checks;
+    }
+    if let Some(incremental) = toml.incremental {
+        profile.incremental = incremental;
+    }
+}
+
+/// Profile settings used to determine which compiler flags to use for a
+/// target.
+#[derive(Clone, Copy, Eq)]
+pub struct Profile {
+    pub name: &'static str,
+    pub opt_level: InternedString,
+    pub lto: Lto,
+    // None = use rustc default
+    pub codegen_units: Option<u32>,
+    pub debuginfo: Option<u32>,
+    pub debug_assertions: bool,
+    pub overflow_checks: bool,
+    pub rpath: bool,
+    pub incremental: bool,
+    pub panic: Option<InternedString>,
+}
+
+impl Default for Profile {
+    fn default() -> Profile {
+        Profile {
+            name: "",
+            opt_level: InternedString::new("0"),
+            lto: Lto::Bool(false),
+            codegen_units: None,
+            debuginfo: None,
+            debug_assertions: false,
+            overflow_checks: false,
+            rpath: false,
+            incremental: false,
+            panic: None,
+        }
+    }
+}
+
+compact_debug! {
+    impl fmt::Debug for Profile {
+        fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+            let (default, default_name) = match self.name {
+                "dev" => (Profile::default_dev(), "default_dev()"),
+                "release" => (Profile::default_release(), "default_release()"),
+                "test" => (Profile::default_test(), "default_test()"),
+                "bench" => (Profile::default_bench(), "default_bench()"),
+                "doc" => (Profile::default_doc(), "default_doc()"),
+                _ => (Profile::default(), "default()"),
+            };
+            [debug_the_fields(
+                name
+                opt_level
+                lto
+                codegen_units
+                debuginfo
+                debug_assertions
+                overflow_checks
+                rpath
+                incremental
+                panic
+            )]
+        }
+    }
+}
+
+impl fmt::Display for Profile {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "Profile({})", self.name)
+    }
+}
+
+impl hash::Hash for Profile {
+    fn hash<H>(&self, state: &mut H)
+    where
+        H: hash::Hasher,
+    {
+        self.comparable().hash(state);
+    }
+}
+
+impl cmp::PartialEq for Profile {
+    fn eq(&self, other: &Self) -> bool {
+        self.comparable() == other.comparable()
+    }
+}
+
+impl Profile {
+    fn default_dev() -> Profile {
+        Profile {
+            name: "dev",
+            debuginfo: Some(2),
+            debug_assertions: true,
+            overflow_checks: true,
+            incremental: true,
+            ..Profile::default()
+        }
+    }
+
+    fn default_release() -> Profile {
+        Profile {
+            name: "release",
+            opt_level: InternedString::new("3"),
+            ..Profile::default()
+        }
+    }
+
+    fn default_test() -> Profile {
+        Profile {
+            name: "test",
+            ..Profile::default_dev()
+        }
+    }
+
+    fn default_bench() -> Profile {
+        Profile {
+            name: "bench",
+            ..Profile::default_release()
+        }
+    }
+
+    fn default_doc() -> Profile {
+        Profile {
+            name: "doc",
+            ..Profile::default_dev()
+        }
+    }
+
+    /// Compare all fields except `name`, which doesn't affect compilation.
+    /// This is necessary for `Unit` deduplication for things like "test" and
+    /// "dev" which are essentially the same.
+    fn comparable(
+        &self,
+    ) -> (
+        &InternedString,
+        &Lto,
+        &Option<u32>,
+        &Option<u32>,
+        &bool,
+        &bool,
+        &bool,
+        &bool,
+        &Option<InternedString>,
+    ) {
+        (
+            &self.opt_level,
+            &self.lto,
+            &self.codegen_units,
+            &self.debuginfo,
+            &self.debug_assertions,
+            &self.overflow_checks,
+            &self.rpath,
+            &self.incremental,
+            &self.panic,
+        )
+    }
+}
+
+/// The link-time-optimization setting.
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum Lto {
+    /// False = no LTO
+    /// True = "Fat" LTO
+    Bool(bool),
+    /// Named LTO settings like "thin".
+    Named(InternedString),
+}
+
+/// Flags used in creating `Unit`s to indicate the purpose for the target, and
+/// to ensure the target's dependencies have the correct settings.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
+pub struct UnitFor {
+    /// A target for `build.rs` or any of its dependencies.  This enables
+    /// `build-override` profiles for these targets.
+    custom_build: bool,
+    /// This is true if it is *allowed* to set the `panic` flag. Currently
+    /// this is false for test/bench targets and all their dependencies, and
+    /// "for_host" units such as proc-macro and custom build scripts and their
+    /// dependencies.
+    panic_ok: bool,
+}
+
+impl UnitFor {
+    /// A unit for a normal target/dependency (i.e. not custom build,
+    /// proc-macro/plugin, or test/bench).
+    pub fn new_normal() -> UnitFor {
+        UnitFor {
+            custom_build: false,
+            panic_ok: true,
+        }
+    }
+
+    /// A unit for a custom build script or its dependencies.
+    pub fn new_build() -> UnitFor {
+        UnitFor {
+            custom_build: true,
+            panic_ok: false,
+        }
+    }
+
+    /// A unit for a proc-macro or compiler plugin or their dependencies.
+    pub fn new_compiler() -> UnitFor {
+        UnitFor {
+            custom_build: false,
+            panic_ok: false,
+        }
+    }
+
+    /// A unit for a test/bench target or their dependencies.
+    pub fn new_test() -> UnitFor {
+        UnitFor {
+            custom_build: false,
+            panic_ok: false,
+        }
+    }
+
+    /// Create a variant based on `for_host` setting.
+    ///
+    /// When `for_host` is true, this clears `panic_ok` in a sticky fashion so
+    /// that all its dependencies also have `panic_ok=false`.
+    pub fn with_for_host(self, for_host: bool) -> UnitFor {
+        UnitFor {
+            custom_build: self.custom_build,
+            panic_ok: self.panic_ok && !for_host
+        }
+    }
+
+    /// Returns true if this unit is for a custom build script or one of its
+    /// dependencies.
+    pub fn is_custom_build(&self) -> bool {
+        self.custom_build
+    }
+
+    /// Returns true if this unit is allowed to set the `panic` compiler flag.
+    pub fn is_panic_ok(&self) -> bool {
+        self.panic_ok
+    }
+
+    /// All possible values, used by `clean`.
+    pub fn all_values() -> &'static [UnitFor] {
+        static ALL: [UnitFor; 3] = [
+            UnitFor { custom_build: false, panic_ok: true },
+            UnitFor { custom_build: true, panic_ok: false },
+            UnitFor { custom_build: false, panic_ok: false },
+        ];
+        &ALL
+    }
+}
+
+/// Profiles loaded from .cargo/config files.
+#[derive(Clone, Debug, Deserialize, Default)]
+pub struct ConfigProfiles {
+    dev: Option<TomlProfile>,
+    release: Option<TomlProfile>,
+}
+
+impl ConfigProfiles {
+    pub fn validate(&self, features: &Features, warnings: &mut Vec<String>) -> CargoResult<()> {
+        if let Some(ref profile) = self.dev {
+            profile
+                .validate("dev", features, warnings)
+                .chain_err(|| format_err!("config profile `profile.dev` is not valid"))?;
+        }
+        if let Some(ref profile) = self.release {
+            profile
+                .validate("release", features, warnings)
+                .chain_err(|| format_err!("config profile `profile.release` is not valid"))?;
+        }
+        Ok(())
+    }
+}
diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs
new file mode 100644 (file)
index 0000000..e5c975e
--- /dev/null
@@ -0,0 +1,639 @@
+use std::collections::HashMap;
+
+use semver::VersionReq;
+use url::Url;
+
+use core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary};
+use core::PackageSet;
+use util::{profile, Config};
+use util::errors::{CargoResult, CargoResultExt};
+use sources::config::SourceConfigMap;
+
+/// Source of information about a group of packages.
+///
+/// See also `core::Source`.
+pub trait Registry {
+    /// Attempt to find the packages that match a dependency request.
+    fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary), fuzzy: bool) -> CargoResult<()>;
+
+    fn query_vec(&mut self, dep: &Dependency, fuzzy: bool) -> CargoResult<Vec<Summary>> {
+        let mut ret = Vec::new();
+        self.query(dep, &mut |s| ret.push(s), fuzzy)?;
+        Ok(ret)
+    }
+
+    fn describe_source(&self, source: &SourceId) -> String;
+    fn is_replaced(&self, source: &SourceId) -> bool;
+}
+
+/// This structure represents a registry of known packages. It internally
+/// contains a number of `Box<Source>` instances which are used to load a
+/// `Package` from.
+///
+/// The resolution phase of Cargo uses this to drive knowledge about new
+/// packages as well as querying for lists of new packages. It is here that
+/// sources are updated (e.g. network operations) and overrides are
+/// handled.
+///
+/// The general idea behind this registry is that it is centered around the
+/// `SourceMap` structure, contained within which is a mapping of a `SourceId` to
+/// a `Source`. Each `Source` in the map has been updated (using network
+/// operations if necessary) and is ready to be queried for packages.
+pub struct PackageRegistry<'cfg> {
+    config: &'cfg Config,
+    sources: SourceMap<'cfg>,
+
+    // A list of sources which are considered "overrides" which take precedent
+    // when querying for packages.
+    overrides: Vec<SourceId>,
+
+    // Note that each SourceId does not take into account its `precise` field
+    // when hashing or testing for equality. When adding a new `SourceId`, we
+    // want to avoid duplicates in the `SourceMap` (to prevent re-updating the
+    // same git repo twice for example), but we also want to ensure that the
+    // loaded source is always updated.
+    //
+    // Sources with a `precise` field normally don't need to be updated because
+    // their contents are already on disk, but sources without a `precise` field
+    // almost always need to be updated. If we have a cached `Source` for a
+    // precise `SourceId`, then when we add a new `SourceId` that is not precise
+    // we want to ensure that the underlying source is updated.
+    //
+    // This is basically a long-winded way of saying that we want to know
+    // precisely what the keys of `sources` are, so this is a mapping of key to
+    // what exactly the key is.
+    source_ids: HashMap<SourceId, (SourceId, Kind)>,
+
+    locked: LockedMap,
+    source_config: SourceConfigMap<'cfg>,
+
+    patches: HashMap<Url, Vec<Summary>>,
+    patches_locked: bool,
+    patches_available: HashMap<Url, Vec<PackageId>>,
+}
+
+type LockedMap = HashMap<SourceId, HashMap<String, Vec<(PackageId, Vec<PackageId>)>>>;
+
+#[derive(PartialEq, Eq, Clone, Copy)]
+enum Kind {
+    Override,
+    Locked,
+    Normal,
+}
+
+impl<'cfg> PackageRegistry<'cfg> {
+    pub fn new(config: &'cfg Config) -> CargoResult<PackageRegistry<'cfg>> {
+        let source_config = SourceConfigMap::new(config)?;
+        Ok(PackageRegistry {
+            config,
+            sources: SourceMap::new(),
+            source_ids: HashMap::new(),
+            overrides: Vec::new(),
+            source_config,
+            locked: HashMap::new(),
+            patches: HashMap::new(),
+            patches_locked: false,
+            patches_available: HashMap::new(),
+        })
+    }
+
+    pub fn get(self, package_ids: &[PackageId]) -> CargoResult<PackageSet<'cfg>> {
+        trace!("getting packages; sources={}", self.sources.len());
+        PackageSet::new(package_ids, self.sources, self.config)
+    }
+
+    fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> {
+        match self.source_ids.get(namespace) {
+            // We've previously loaded this source, and we've already locked it,
+            // so we're not allowed to change it even if `namespace` has a
+            // slightly different precise version listed.
+            Some(&(_, Kind::Locked)) => {
+                debug!("load/locked   {}", namespace);
+                return Ok(());
+            }
+
+            // If the previous source was not a precise source, then we can be
+            // sure that it's already been updated if we've already loaded it.
+            Some(&(ref previous, _)) if previous.precise().is_none() => {
+                debug!("load/precise  {}", namespace);
+                return Ok(());
+            }
+
+            // If the previous source has the same precise version as we do,
+            // then we're done, otherwise we need to need to move forward
+            // updating this source.
+            Some(&(ref previous, _)) => {
+                if previous.precise() == namespace.precise() {
+                    debug!("load/match    {}", namespace);
+                    return Ok(());
+                }
+                debug!("load/mismatch {}", namespace);
+            }
+            None => {
+                debug!("load/missing  {}", namespace);
+            }
+        }
+
+        self.load(namespace, kind)?;
+        Ok(())
+    }
+
+    pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> {
+        for id in ids.iter() {
+            self.ensure_loaded(id, Kind::Locked)?;
+        }
+        Ok(())
+    }
+
+    pub fn add_preloaded(&mut self, source: Box<Source + 'cfg>) {
+        self.add_source(source, Kind::Locked);
+    }
+
+    fn add_source(&mut self, source: Box<Source + 'cfg>, kind: Kind) {
+        let id = source.source_id().clone();
+        self.sources.insert(source);
+        self.source_ids.insert(id.clone(), (id, kind));
+    }
+
+    pub fn add_override(&mut self, source: Box<Source + 'cfg>) {
+        self.overrides.push(source.source_id().clone());
+        self.add_source(source, Kind::Override);
+    }
+
+    pub fn register_lock(&mut self, id: PackageId, deps: Vec<PackageId>) {
+        trace!("register_lock: {}", id);
+        for dep in deps.iter() {
+            trace!("\t-> {}", dep);
+        }
+        let sub_map = self.locked
+            .entry(id.source_id().clone())
+            .or_insert_with(HashMap::new);
+        let sub_vec = sub_map
+            .entry(id.name().to_string())
+            .or_insert_with(Vec::new);
+        sub_vec.push((id, deps));
+    }
+
+    /// Insert a `[patch]` section into this registry.
+    ///
+    /// This method will insert a `[patch]` section for the `url` specified,
+    /// with the given list of dependencies. The `url` specified is the URL of
+    /// the source to patch (for example this is `crates-io` in the manifest).
+    /// The `deps` is an array of all the entries in the `[patch]` section of
+    /// the manifest.
+    ///
+    /// Here the `deps` will be resolved to a precise version and stored
+    /// internally for future calls to `query` below. It's expected that `deps`
+    /// have had `lock_to` call already, if applicable. (e.g. if a lock file was
+    /// already present).
+    ///
+    /// Note that the patch list specified here *will not* be available to
+    /// `query` until `lock_patches` is called below, which should be called
+    /// once all patches have been added.
+    pub fn patch(&mut self, url: &Url, deps: &[Dependency]) -> CargoResult<()> {
+        // First up we need to actually resolve each `deps` specification to
+        // precisely one summary. We're not using the `query` method below as it
+        // internally uses maps we're building up as part of this method
+        // (`patches_available` and `patches). Instead we're going straight to
+        // the source to load information from it.
+        //
+        // Remember that each dependency listed in `[patch]` has to resolve to
+        // precisely one package, so that's why we're just creating a flat list
+        // of summaries which should be the same length as `deps` above.
+        let unlocked_summaries = deps.iter()
+            .map(|dep| {
+                debug!("registring a patch for `{}` with `{}`", url, dep.package_name());
+
+                // Go straight to the source for resolving `dep`. Load it as we
+                // normally would and then ask it directly for the list of summaries
+                // corresponding to this `dep`.
+                self.ensure_loaded(dep.source_id(), Kind::Normal)
+                    .chain_err(|| {
+                        format_err!(
+                            "failed to load source for a dependency \
+                             on `{}`",
+                            dep.package_name()
+                        )
+                    })?;
+
+                let mut summaries = self.sources
+                    .get_mut(dep.source_id())
+                    .expect("loaded source not present")
+                    .query_vec(dep)?
+                    .into_iter();
+
+                let summary = match summaries.next() {
+                    Some(summary) => summary,
+                    None => bail!(
+                        "patch for `{}` in `{}` did not resolve to any crates. If this is \
+                         unexpected, you may wish to consult: \
+                         https://github.com/rust-lang/cargo/issues/4678",
+                        dep.package_name(),
+                        url
+                    ),
+                };
+                if summaries.next().is_some() {
+                    bail!(
+                        "patch for `{}` in `{}` resolved to more than one candidate",
+                        dep.package_name(),
+                        url
+                    )
+                }
+                if summary.package_id().source_id().url() == url {
+                    bail!(
+                        "patch for `{}` in `{}` points to the same source, but \
+                         patches must point to different sources",
+                        dep.package_name(),
+                        url
+                    );
+                }
+                Ok(summary)
+            })
+            .collect::<CargoResult<Vec<_>>>()
+            .chain_err(|| format_err!("failed to resolve patches for `{}`", url))?;
+
+        // Note that we do not use `lock` here to lock summaries! That step
+        // happens later once `lock_patches` is invoked. In the meantime though
+        // we want to fill in the `patches_available` map (later used in the
+        // `lock` method) and otherwise store the unlocked summaries in
+        // `patches` to get locked in a future call to `lock_patches`.
+        let ids = unlocked_summaries
+            .iter()
+            .map(|s| s.package_id())
+            .cloned()
+            .collect();
+        self.patches_available.insert(url.clone(), ids);
+        self.patches.insert(url.clone(), unlocked_summaries);
+
+        Ok(())
+    }
+
+    /// Lock all patch summaries added via `patch`, making them available to
+    /// resolution via `query`.
+    ///
+    /// This function will internally `lock` each summary added via `patch`
+    /// above now that the full set of `patch` packages are known. This'll allow
+    /// us to correctly resolve overridden dependencies between patches
+    /// hopefully!
+    pub fn lock_patches(&mut self) {
+        assert!(!self.patches_locked);
+        for summaries in self.patches.values_mut() {
+            for summary in summaries {
+                *summary = lock(&self.locked, &self.patches_available, summary.clone());
+            }
+        }
+        self.patches_locked = true;
+    }
+
+    pub fn patches(&self) -> &HashMap<Url, Vec<Summary>> {
+        &self.patches
+    }
+
+    fn load(&mut self, source_id: &SourceId, kind: Kind) -> CargoResult<()> {
+        (|| {
+            debug!("loading source {}", source_id);
+            let source = self.source_config.load(source_id)?;
+            assert_eq!(source.source_id(), source_id);
+
+            if kind == Kind::Override {
+                self.overrides.push(source_id.clone());
+            }
+            self.add_source(source, kind);
+
+            // Ensure the source has fetched all necessary remote data.
+            let _p = profile::start(format!("updating: {}", source_id));
+            self.sources.get_mut(source_id).unwrap().update()
+        })()
+            .chain_err(|| format_err!("Unable to update {}", source_id))?;
+        Ok(())
+    }
+
+    fn query_overrides(&mut self, dep: &Dependency) -> CargoResult<Option<Summary>> {
+        for s in self.overrides.iter() {
+            let src = self.sources.get_mut(s).unwrap();
+            let dep = Dependency::new_override(&*dep.package_name(), s);
+            let mut results = src.query_vec(&dep)?;
+            if !results.is_empty() {
+                return Ok(Some(results.remove(0)));
+            }
+        }
+        Ok(None)
+    }
+
+    /// This function is used to transform a summary to another locked summary
+    /// if possible. This is where the concept of a lockfile comes into play.
+    ///
+    /// If a summary points at a package id which was previously locked, then we
+    /// override the summary's id itself, as well as all dependencies, to be
+    /// rewritten to the locked versions. This will transform the summary's
+    /// source to a precise source (listed in the locked version) as well as
+    /// transforming all of the dependencies from range requirements on
+    /// imprecise sources to exact requirements on precise sources.
+    ///
+    /// If a summary does not point at a package id which was previously locked,
+    /// or if any dependencies were added and don't have a previously listed
+    /// version, we still want to avoid updating as many dependencies as
+    /// possible to keep the graph stable. In this case we map all of the
+    /// summary's dependencies to be rewritten to a locked version wherever
+    /// possible. If we're unable to map a dependency though, we just pass it on
+    /// through.
+    pub fn lock(&self, summary: Summary) -> Summary {
+        assert!(self.patches_locked);
+        lock(&self.locked, &self.patches_available, summary)
+    }
+
+    fn warn_bad_override(
+        &self,
+        override_summary: &Summary,
+        real_summary: &Summary,
+    ) -> CargoResult<()> {
+        let mut real_deps = real_summary.dependencies().iter().collect::<Vec<_>>();
+
+        let boilerplate = "\
+This is currently allowed but is known to produce buggy behavior with spurious
+recompiles and changes to the crate graph. Path overrides unfortunately were
+never intended to support this feature, so for now this message is just a
+warning. In the future, however, this message will become a hard error.
+
+To change the dependency graph via an override it's recommended to use the
+`[replace]` feature of Cargo instead of the path override feature. This is
+documented online at the url below for more information.
+
+https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#overriding-dependencies
+";
+
+        for dep in override_summary.dependencies() {
+            if let Some(i) = real_deps.iter().position(|d| dep == *d) {
+                real_deps.remove(i);
+                continue;
+            }
+            let msg = format!(
+                "\
+                 path override for crate `{}` has altered the original list of\n\
+                 dependencies; the dependency on `{}` was either added or\n\
+                 modified to not match the previously resolved version\n\n\
+                 {}",
+                override_summary.package_id().name(),
+                dep.package_name(),
+                boilerplate
+            );
+            self.source_config.config().shell().warn(&msg)?;
+            return Ok(());
+        }
+
+        if let Some(dep) = real_deps.get(0) {
+            let msg = format!(
+                "\
+                path override for crate `{}` has altered the original list of
+                dependencies; the dependency on `{}` was removed\n\n
+                {}",
+                override_summary.package_id().name(),
+                dep.package_name(),
+                boilerplate
+            );
+            self.source_config.config().shell().warn(&msg)?;
+            return Ok(());
+        }
+
+        Ok(())
+    }
+}
+
+impl<'cfg> Registry for PackageRegistry<'cfg> {
+    fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary), fuzzy: bool) -> CargoResult<()> {
+        assert!(self.patches_locked);
+        let (override_summary, n, to_warn) = {
+            // Look for an override and get ready to query the real source.
+            let override_summary = self.query_overrides(dep)?;
+
+            // Next up on our list of candidates is to check the `[patch]`
+            // section of the manifest. Here we look through all patches
+            // relevant to the source that `dep` points to, and then we match
+            // name/version. Note that we don't use `dep.matches(..)` because
+            // the patches, by definition, come from a different source.
+            // This means that `dep.matches(..)` will always return false, when
+            // what we really care about is the name/version match.
+            let mut patches = Vec::<Summary>::new();
+            if let Some(extra) = self.patches.get(dep.source_id().url()) {
+                patches.extend(
+                    extra
+                        .iter()
+                        .filter(|s| dep.matches_ignoring_source(s.package_id()))
+                        .cloned(),
+                );
+            }
+
+            // A crucial feature of the `[patch]` feature is that we *don't*
+            // query the actual registry if we have a "locked" dependency. A
+            // locked dep basically just means a version constraint of `=a.b.c`,
+            // and because patches take priority over the actual source then if
+            // we have a candidate we're done.
+            if patches.len() == 1 && dep.is_locked() {
+                let patch = patches.remove(0);
+                match override_summary {
+                    Some(summary) => (summary, 1, Some(patch)),
+                    None => {
+                        f(patch);
+                        return Ok(());
+                    }
+                }
+            } else {
+                if !patches.is_empty() {
+                    debug!(
+                        "found {} patches with an unlocked dep on `{}` at {} \
+                         with `{}`, \
+                         looking at sources",
+                        patches.len(),
+                        dep.package_name(),
+                        dep.source_id(),
+                        dep.version_req()
+                    );
+                }
+
+                // Ensure the requested source_id is loaded
+                self.ensure_loaded(dep.source_id(), Kind::Normal)
+                    .chain_err(|| {
+                        format_err!(
+                            "failed to load source for a dependency \
+                             on `{}`",
+                            dep.package_name()
+                        )
+                    })?;
+
+                let source = self.sources.get_mut(dep.source_id());
+                match (override_summary, source) {
+                    (Some(_), None) => bail!("override found but no real ones"),
+                    (None, None) => return Ok(()),
+
+                    // If we don't have an override then we just ship
+                    // everything upstairs after locking the summary
+                    (None, Some(source)) => {
+                        for patch in patches.iter() {
+                            f(patch.clone());
+                        }
+
+                        // Our sources shouldn't ever come back to us with two
+                        // summaries that have the same version. We could,
+                        // however, have an `[patch]` section which is in use
+                        // to override a version in the registry. This means
+                        // that if our `summary` in this loop has the same
+                        // version as something in `patches` that we've
+                        // already selected, then we skip this `summary`.
+                        let locked = &self.locked;
+                        let all_patches = &self.patches_available;
+                        let callback = &mut |summary: Summary| {
+                            for patch in patches.iter() {
+                                let patch = patch.package_id().version();
+                                if summary.package_id().version() == patch {
+                                    return;
+                                }
+                            }
+                            f(lock(locked, all_patches, summary))
+                        };
+                        return if fuzzy {
+                            source.fuzzy_query(dep, callback)
+                        } else {
+                            source.query(dep, callback)
+                        };
+                    }
+
+                    // If we have an override summary then we query the source
+                    // to sanity check its results. We don't actually use any of
+                    // the summaries it gives us though.
+                    (Some(override_summary), Some(source)) => {
+                        if !patches.is_empty() {
+                            bail!("found patches and a path override")
+                        }
+                        let mut n = 0;
+                        let mut to_warn = None;
+                        {
+                            let callback = &mut |summary| {
+                                n += 1;
+                                to_warn = Some(summary);
+                            };
+                            if fuzzy {
+                                source.fuzzy_query(dep, callback)?;
+                            } else {
+                                source.query(dep, callback)?;
+                            }
+                        }
+                        (override_summary, n, to_warn)
+                    }
+                }
+            }
+        };
+
+        if n > 1 {
+            bail!("found an override with a non-locked list");
+        } else if let Some(summary) = to_warn {
+            self.warn_bad_override(&override_summary, &summary)?;
+        }
+        f(self.lock(override_summary));
+        Ok(())
+    }
+
+    fn describe_source(&self, id: &SourceId) -> String {
+        match self.sources.get(id) {
+            Some(src) => src.describe(),
+            None => id.to_string(),
+        }
+    }
+
+    fn is_replaced(&self, id: &SourceId) -> bool {
+        match self.sources.get(id) {
+            Some(src) => src.is_replaced(),
+            None => false,
+        }
+    }
+}
+
+fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Summary) -> Summary {
+    let pair = locked
+        .get(summary.source_id())
+        .and_then(|map| map.get(&*summary.name()))
+        .and_then(|vec| vec.iter().find(|&&(ref id, _)| id == summary.package_id()));
+
+    trace!("locking summary of {}", summary.package_id());
+
+    // Lock the summary's id if possible
+    let summary = match pair {
+        Some(&(ref precise, _)) => summary.override_id(precise.clone()),
+        None => summary,
+    };
+    summary.map_dependencies(|dep| {
+        trace!("\t{}/{}/{}", dep.package_name(), dep.version_req(), dep.source_id());
+
+        // If we've got a known set of overrides for this summary, then
+        // one of a few cases can arise:
+        //
+        // 1. We have a lock entry for this dependency from the same
+        //    source as it's listed as coming from. In this case we make
+        //    sure to lock to precisely the given package id.
+        //
+        // 2. We have a lock entry for this dependency, but it's from a
+        //    different source than what's listed, or the version
+        //    requirement has changed. In this case we must discard the
+        //    locked version because the dependency needs to be
+        //    re-resolved.
+        //
+        // 3. We don't have a lock entry for this dependency, in which
+        //    case it was likely an optional dependency which wasn't
+        //    included previously so we just pass it through anyway.
+        //
+        // Cases 1/2 are handled by `matches_id` and case 3 is handled by
+        // falling through to the logic below.
+        if let Some(&(_, ref locked_deps)) = pair {
+            let locked = locked_deps.iter().find(|id| dep.matches_id(id));
+            if let Some(locked) = locked {
+                trace!("\tfirst hit on {}", locked);
+                let mut dep = dep.clone();
+                dep.lock_to(locked);
+                return dep;
+            }
+        }
+
+        // If this dependency did not have a locked version, then we query
+        // all known locked packages to see if they match this dependency.
+        // If anything does then we lock it to that and move on.
+        let v = locked
+            .get(dep.source_id())
+            .and_then(|map| map.get(&*dep.package_name()))
+            .and_then(|vec| vec.iter().find(|&&(ref id, _)| dep.matches_id(id)));
+        if let Some(&(ref id, _)) = v {
+            trace!("\tsecond hit on {}", id);
+            let mut dep = dep.clone();
+            dep.lock_to(id);
+            return dep;
+        }
+
+        // Finally we check to see if any registered patches correspond to
+        // this dependency.
+        let v = patches.get(dep.source_id().url()).map(|vec| {
+            let dep2 = dep.clone();
+            let mut iter = vec.iter().filter(move |p| {
+                dep2.matches_ignoring_source(p)
+            });
+            (iter.next(), iter)
+        });
+        if let Some((Some(patch_id), mut remaining)) = v {
+            assert!(remaining.next().is_none());
+            let patch_source = patch_id.source_id();
+            let patch_locked = locked
+                .get(patch_source)
+                .and_then(|m| m.get(&*patch_id.name()))
+                .map(|list| list.iter().any(|&(ref id, _)| id == patch_id))
+                .unwrap_or(false);
+
+            if patch_locked {
+                trace!("\tthird hit on {}", patch_id);
+                let req = VersionReq::exact(patch_id.version());
+                let mut dep = dep.clone();
+                dep.set_version_req(req);
+                return dep;
+            }
+        }
+
+        trace!("\tnope, unlocked");
+        dep
+    })
+}
diff --git a/src/cargo/core/resolver/conflict_cache.rs b/src/cargo/core/resolver/conflict_cache.rs
new file mode 100644 (file)
index 0000000..b35813b
--- /dev/null
@@ -0,0 +1,103 @@
+use std::collections::{HashMap, HashSet};
+
+use super::types::ConflictReason;
+use core::resolver::Context;
+use core::{Dependency, PackageId};
+
+pub(super) struct ConflictCache {
+    // `con_from_dep` is a cache of the reasons for each time we
+    // backtrack. For example after several backtracks we may have:
+    //
+    //  con_from_dep[`foo = "^1.0.2"`] = vec![
+    //      map!{`foo=1.0.1`: Semver},
+    //      map!{`foo=1.0.0`: Semver},
+    //  ];
+    //
+    // This can be read as "we cannot find a candidate for dep `foo = "^1.0.2"`
+    // if either `foo=1.0.1` OR `foo=1.0.0` are activated".
+    //
+    // Another example after several backtracks we may have:
+    //
+    //  con_from_dep[`foo = ">=0.8.2, <=0.9.3"`] = vec![
+    //      map!{`foo=0.8.1`: Semver, `foo=0.9.4`: Semver},
+    //  ];
+    //
+    // This can be read as "we cannot find a candidate for dep `foo = ">=0.8.2,
+    // <=0.9.3"` if both `foo=0.8.1` AND `foo=0.9.4` are activated".
+    //
+    // This is used to make sure we don't queue work we know will fail. See the
+    // discussion in https://github.com/rust-lang/cargo/pull/5168 for why this
+    // is so important, and there can probably be a better data structure here
+    // but for now this works well enough!
+    //
+    // Also, as a final note, this map is *not* ever removed from. This remains
+    // as a global cache which we never delete from. Any entry in this map is
+    // unconditionally true regardless of our resolution history of how we got
+    // here.
+    con_from_dep: HashMap<Dependency, Vec<HashMap<PackageId, ConflictReason>>>,
+    // `past_conflict_triggers` is an
+    // of `past_conflicting_activations`.
+    // For every `PackageId` this lists the `Dependency`s that mention it in `past_conflicting_activations`.
+    dep_from_pid: HashMap<PackageId, HashSet<Dependency>>,
+}
+
+impl ConflictCache {
+    pub fn new() -> ConflictCache {
+        ConflictCache {
+            con_from_dep: HashMap::new(),
+            dep_from_pid: HashMap::new(),
+        }
+    }
+    /// Finds any known set of conflicts, if any,
+    /// which are activated in `cx` and pass the `filter` specified?
+    pub fn find_conflicting<F>(
+        &self,
+        cx: &Context,
+        dep: &Dependency,
+        filter: F,
+    ) -> Option<&HashMap<PackageId, ConflictReason>>
+    where
+        for<'r> F: FnMut(&'r &HashMap<PackageId, ConflictReason>) -> bool,
+    {
+        self.con_from_dep
+            .get(dep)?
+            .iter()
+            .filter(filter)
+            .find(|conflicting| cx.is_conflicting(None, conflicting))
+    }
+    pub fn conflicting(
+        &self,
+        cx: &Context,
+        dep: &Dependency,
+    ) -> Option<&HashMap<PackageId, ConflictReason>> {
+        self.find_conflicting(cx, dep, |_| true)
+    }
+
+    /// Add to the cache a conflict of the form:
+    /// `dep` is known to be unresolvable if
+    /// all the `PackageId` entries are activated
+    pub fn insert(&mut self, dep: &Dependency, con: &HashMap<PackageId, ConflictReason>) {
+        let past = self
+            .con_from_dep
+            .entry(dep.clone())
+            .or_insert_with(Vec::new);
+        if !past.contains(con) {
+            trace!(
+                "{} = \"{}\" adding a skip {:?}",
+                dep.package_name(),
+                dep.version_req(),
+                con
+            );
+            past.push(con.clone());
+            for c in con.keys() {
+                self.dep_from_pid
+                    .entry(c.clone())
+                    .or_insert_with(HashSet::new)
+                    .insert(dep.clone());
+            }
+        }
+    }
+    pub fn dependencies_conflicting_with(&self, pid: &PackageId) -> Option<&HashSet<Dependency>> {
+        self.dep_from_pid.get(pid)
+    }
+}
diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs
new file mode 100644 (file)
index 0000000..b39bbd1
--- /dev/null
@@ -0,0 +1,417 @@
+use std::collections::{HashMap, HashSet};
+use std::rc::Rc;
+
+use core::interning::InternedString;
+use core::{Dependency, FeatureValue, PackageId, SourceId, Summary};
+use util::CargoResult;
+use util::Graph;
+
+use super::types::RegistryQueryer;
+use super::types::{ActivateResult, ConflictReason, DepInfo, GraphNode, Method, RcList};
+
+pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
+pub use super::encode::{Metadata, WorkspaceResolve};
+pub use super::resolve::Resolve;
+
+// A `Context` is basically a bunch of local resolution information which is
+// kept around for all `BacktrackFrame` instances. As a result, this runs the
+// risk of being cloned *a lot* so we want to make this as cheap to clone as
+// possible.
+#[derive(Clone)]
+pub struct Context {
+    // TODO: Both this and the two maps below are super expensive to clone. We should
+    //       switch to persistent hash maps if we can at some point or otherwise
+    //       make these much cheaper to clone in general.
+    pub activations: Activations,
+    pub resolve_features: HashMap<PackageId, Rc<HashSet<InternedString>>>,
+    pub links: HashMap<InternedString, PackageId>,
+
+    // These are two cheaply-cloneable lists (O(1) clone) which are effectively
+    // hash maps but are built up as "construction lists". We'll iterate these
+    // at the very end and actually construct the map that we're making.
+    pub resolve_graph: RcList<GraphNode>,
+    pub resolve_replacements: RcList<(PackageId, PackageId)>,
+
+    // These warnings are printed after resolution.
+    pub warnings: RcList<String>,
+}
+
+pub type Activations = HashMap<(InternedString, SourceId), Rc<Vec<Summary>>>;
+
+impl Context {
+    pub fn new() -> Context {
+        Context {
+            resolve_graph: RcList::new(),
+            resolve_features: HashMap::new(),
+            links: HashMap::new(),
+            resolve_replacements: RcList::new(),
+            activations: HashMap::new(),
+            warnings: RcList::new(),
+        }
+    }
+
+    /// Activate this summary by inserting it into our list of known activations.
+    ///
+    /// Returns true if this summary with the given method is already activated.
+    pub fn flag_activated(&mut self, summary: &Summary, method: &Method) -> CargoResult<bool> {
+        let id = summary.package_id();
+        let prev = self
+            .activations
+            .entry((id.name(), id.source_id().clone()))
+            .or_insert_with(|| Rc::new(Vec::new()));
+        if !prev.iter().any(|c| c == summary) {
+            self.resolve_graph.push(GraphNode::Add(id.clone()));
+            if let Some(link) = summary.links() {
+                ensure!(
+                    self.links.insert(link, id.clone()).is_none(),
+                    "Attempting to resolve a with more then one crate with the links={}. \n\
+                     This will not build as is. Consider rebuilding the .lock file.",
+                    &*link
+                );
+            }
+            Rc::make_mut(prev).push(summary.clone());
+            return Ok(false);
+        }
+        debug!("checking if {} is already activated", summary.package_id());
+        let (features, use_default) = match *method {
+            Method::Everything
+            | Method::Required {
+                all_features: true, ..
+            } => return Ok(false),
+            Method::Required {
+                features,
+                uses_default_features,
+                ..
+            } => (features, uses_default_features),
+        };
+
+        let has_default_feature = summary.features().contains_key("default");
+        Ok(match self.resolve_features.get(id) {
+            Some(prev) => {
+                features.iter().all(|f| prev.contains(f))
+                    && (!use_default || prev.contains("default") || !has_default_feature)
+            }
+            None => features.is_empty() && (!use_default || !has_default_feature),
+        })
+    }
+
+    pub fn build_deps(
+        &mut self,
+        registry: &mut RegistryQueryer,
+        parent: Option<&Summary>,
+        candidate: &Summary,
+        method: &Method,
+    ) -> ActivateResult<Vec<DepInfo>> {
+        // First, figure out our set of dependencies based on the requested set
+        // of features. This also calculates what features we're going to enable
+        // for our own dependencies.
+        let deps = self.resolve_features(parent, candidate, method)?;
+
+        // Next, transform all dependencies into a list of possible candidates
+        // which can satisfy that dependency.
+        let mut deps = deps
+            .into_iter()
+            .map(|(dep, features)| {
+                let candidates = registry.query(&dep)?;
+                Ok((dep, candidates, Rc::new(features)))
+            })
+            .collect::<CargoResult<Vec<DepInfo>>>()?;
+
+        // Attempt to resolve dependencies with fewer candidates before trying
+        // dependencies with more candidates.  This way if the dependency with
+        // only one candidate can't be resolved we don't have to do a bunch of
+        // work before we figure that out.
+        deps.sort_by_key(|&(_, ref a, _)| a.len());
+
+        Ok(deps)
+    }
+
+    pub fn prev_active(&self, dep: &Dependency) -> &[Summary] {
+        self.activations
+            .get(&(dep.package_name(), dep.source_id().clone()))
+            .map(|v| &v[..])
+            .unwrap_or(&[])
+    }
+
+    fn is_active(&self, id: &PackageId) -> bool {
+        self.activations
+            .get(&(id.name(), id.source_id().clone()))
+            .map(|v| v.iter().any(|s| s.package_id() == id))
+            .unwrap_or(false)
+    }
+
+    /// checks whether all of `parent` and the keys of `conflicting activations`
+    /// are still active
+    pub fn is_conflicting(
+        &self,
+        parent: Option<&PackageId>,
+        conflicting_activations: &HashMap<PackageId, ConflictReason>,
+    ) -> bool {
+        conflicting_activations
+            .keys()
+            .chain(parent)
+            .all(|id| self.is_active(id))
+    }
+
+    /// Return all dependencies and the features we want from them.
+    fn resolve_features<'b>(
+        &mut self,
+        parent: Option<&Summary>,
+        s: &'b Summary,
+        method: &'b Method,
+    ) -> ActivateResult<Vec<(Dependency, Vec<InternedString>)>> {
+        let dev_deps = match *method {
+            Method::Everything => true,
+            Method::Required { dev_deps, .. } => dev_deps,
+        };
+
+        // First, filter by dev-dependencies
+        let deps = s.dependencies();
+        let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps);
+
+        let reqs = build_requirements(s, method)?;
+        let mut ret = Vec::new();
+        let mut used_features = HashSet::new();
+        let default_dep = (false, Vec::new());
+
+        // Next, collect all actually enabled dependencies and their features.
+        for dep in deps {
+            // Skip optional dependencies, but not those enabled through a
+            // feature
+            if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) {
+                continue;
+            }
+            // So we want this dependency. Move the features we want from
+            // `feature_deps` to `ret` and register ourselves as using this
+            // name.
+            let base = reqs.deps.get(&dep.name_in_toml()).unwrap_or(&default_dep);
+            used_features.insert(dep.name_in_toml());
+            let always_required = !dep.is_optional()
+                && !s
+                    .dependencies()
+                    .iter()
+                    .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml());
+            if always_required && base.0 {
+                self.warnings.push(format!(
+                    "Package `{}` does not have feature `{}`. It has a required dependency \
+                     with that name, but only optional dependencies can be used as features. \
+                     This is currently a warning to ease the transition, but it will become an \
+                     error in the future.",
+                    s.package_id(),
+                    dep.name_in_toml()
+                ));
+            }
+            let mut base = base.1.clone();
+            base.extend(dep.features().iter());
+            for feature in base.iter() {
+                if feature.contains('/') {
+                    return Err(
+                        format_err!("feature names may not contain slashes: `{}`", feature).into(),
+                    );
+                }
+            }
+            ret.push((dep.clone(), base));
+        }
+
+        // Any entries in `reqs.dep` which weren't used are bugs in that the
+        // package does not actually have those dependencies. We classified
+        // them as dependencies in the first place because there is no such
+        // feature, either.
+        let remaining = reqs
+            .deps
+            .keys()
+            .cloned()
+            .filter(|s| !used_features.contains(s))
+            .collect::<Vec<_>>();
+        if !remaining.is_empty() {
+            let features = remaining.join(", ");
+            return Err(match parent {
+                None => format_err!(
+                    "Package `{}` does not have these features: `{}`",
+                    s.package_id(),
+                    features
+                ).into(),
+                Some(p) => (
+                    p.package_id().clone(),
+                    ConflictReason::MissingFeatures(features),
+                ).into(),
+            });
+        }
+
+        // Record what list of features is active for this package.
+        if !reqs.used.is_empty() {
+            let pkgid = s.package_id();
+
+            let set = Rc::make_mut(
+                self.resolve_features
+                    .entry(pkgid.clone())
+                    .or_insert_with(|| Rc::new(HashSet::new())),
+            );
+
+            for feature in reqs.used {
+                set.insert(feature);
+            }
+        }
+
+        Ok(ret)
+    }
+
+    pub fn resolve_replacements(&self) -> HashMap<PackageId, PackageId> {
+        let mut replacements = HashMap::new();
+        let mut cur = &self.resolve_replacements;
+        while let Some(ref node) = cur.head {
+            let (k, v) = node.0.clone();
+            replacements.insert(k, v);
+            cur = &node.1;
+        }
+        replacements
+    }
+
+    pub fn graph(&self) -> Graph<PackageId, Vec<Dependency>> {
+        let mut graph: Graph<PackageId, Vec<Dependency>> = Graph::new();
+        let mut cur = &self.resolve_graph;
+        while let Some(ref node) = cur.head {
+            match node.0 {
+                GraphNode::Add(ref p) => graph.add(p.clone()),
+                GraphNode::Link(ref a, ref b, ref dep) => {
+                    graph.link(a.clone(), b.clone()).push(dep.clone());
+                }
+            }
+            cur = &node.1;
+        }
+        graph
+    }
+}
+
+/// Takes requested features for a single package from the input Method and
+/// recurses to find all requested features, dependencies and requested
+/// dependency features in a Requirements object, returning it to the resolver.
+fn build_requirements<'a, 'b: 'a>(
+    s: &'a Summary,
+    method: &'b Method,
+) -> CargoResult<Requirements<'a>> {
+    let mut reqs = Requirements::new(s);
+
+    match *method {
+        Method::Everything
+        | Method::Required {
+            all_features: true, ..
+        } => {
+            for key in s.features().keys() {
+                reqs.require_feature(*key)?;
+            }
+            for dep in s.dependencies().iter().filter(|d| d.is_optional()) {
+                reqs.require_dependency(dep.name_in_toml());
+            }
+        }
+        Method::Required {
+            all_features: false,
+            features: requested,
+            ..
+        } => {
+            for &f in requested.iter() {
+                reqs.require_value(&FeatureValue::new(f, s))?;
+            }
+        }
+    }
+    match *method {
+        Method::Everything
+        | Method::Required {
+            uses_default_features: true,
+            ..
+        } => {
+            if s.features().contains_key("default") {
+                reqs.require_feature(InternedString::new("default"))?;
+            }
+        }
+        Method::Required {
+            uses_default_features: false,
+            ..
+        } => {}
+    }
+    Ok(reqs)
+}
+
+struct Requirements<'a> {
+    summary: &'a Summary,
+    // The deps map is a mapping of package name to list of features enabled.
+    // Each package should be enabled, and each package should have the
+    // specified set of features enabled. The boolean indicates whether this
+    // package was specifically requested (rather than just requesting features
+    // *within* this package).
+    deps: HashMap<InternedString, (bool, Vec<InternedString>)>,
+    // The used features set is the set of features which this local package had
+    // enabled, which is later used when compiling to instruct the code what
+    // features were enabled.
+    used: HashSet<InternedString>,
+    visited: HashSet<InternedString>,
+}
+
+impl<'r> Requirements<'r> {
+    fn new(summary: &Summary) -> Requirements {
+        Requirements {
+            summary,
+            deps: HashMap::new(),
+            used: HashSet::new(),
+            visited: HashSet::new(),
+        }
+    }
+
+    fn require_crate_feature(&mut self, package: InternedString, feat: InternedString) {
+        self.used.insert(package);
+        self.deps
+            .entry(package)
+            .or_insert((false, Vec::new()))
+            .1
+            .push(feat);
+    }
+
+    fn seen(&mut self, feat: InternedString) -> bool {
+        if self.visited.insert(feat) {
+            self.used.insert(feat);
+            false
+        } else {
+            true
+        }
+    }
+
+    fn require_dependency(&mut self, pkg: InternedString) {
+        if self.seen(pkg) {
+            return;
+        }
+        self.deps.entry(pkg).or_insert((false, Vec::new())).0 = true;
+    }
+
+    fn require_feature(&mut self, feat: InternedString) -> CargoResult<()> {
+        if feat.is_empty() || self.seen(feat) {
+            return Ok(());
+        }
+        for fv in self
+            .summary
+            .features()
+            .get(feat.as_str())
+            .expect("must be a valid feature")
+        {
+            match *fv {
+                FeatureValue::Feature(ref dep_feat) if **dep_feat == *feat => bail!(
+                    "Cyclic feature dependency: feature `{}` depends on itself",
+                    feat
+                ),
+                _ => {}
+            }
+            self.require_value(fv)?;
+        }
+        Ok(())
+    }
+
+    fn require_value<'f>(&mut self, fv: &'f FeatureValue) -> CargoResult<()> {
+        match fv {
+            FeatureValue::Feature(feat) => self.require_feature(*feat)?,
+            FeatureValue::Crate(dep) => self.require_dependency(*dep),
+            FeatureValue::CrateFeature(dep, dep_feat) => {
+                self.require_crate_feature(*dep, *dep_feat)
+            }
+        };
+        Ok(())
+    }
+}
diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs
new file mode 100644 (file)
index 0000000..33704ba
--- /dev/null
@@ -0,0 +1,420 @@
+use std::collections::{BTreeMap, HashMap, HashSet};
+use std::fmt;
+use std::str::FromStr;
+
+use serde::de;
+use serde::ser;
+
+use core::{Dependency, Package, PackageId, SourceId, Workspace};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
+use util::{internal, Graph};
+
+use super::Resolve;
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct EncodableResolve {
+    package: Option<Vec<EncodableDependency>>,
+    /// `root` is optional to allow backward compatibility.
+    root: Option<EncodableDependency>,
+    metadata: Option<Metadata>,
+
+    #[serde(default, skip_serializing_if = "Patch::is_empty")]
+    patch: Patch,
+}
+
+#[derive(Serialize, Deserialize, Debug, Default)]
+struct Patch {
+    unused: Vec<EncodableDependency>,
+}
+
+pub type Metadata = BTreeMap<String, String>;
+
+impl EncodableResolve {
+    pub fn into_resolve(self, ws: &Workspace) -> CargoResult<Resolve> {
+        let path_deps = build_path_deps(ws);
+
+        let packages = {
+            let mut packages = self.package.unwrap_or_default();
+            if let Some(root) = self.root {
+                packages.insert(0, root);
+            }
+            packages
+        };
+
+        // `PackageId`s in the lock file don't include the `source` part
+        // for workspace members, so we reconstruct proper ids.
+        let live_pkgs = {
+            let mut live_pkgs = HashMap::new();
+            let mut all_pkgs = HashSet::new();
+            for pkg in packages.iter() {
+                let enc_id = EncodablePackageId {
+                    name: pkg.name.clone(),
+                    version: pkg.version.clone(),
+                    source: pkg.source.clone(),
+                };
+
+                if !all_pkgs.insert(enc_id.clone()) {
+                    bail!("package `{}` is specified twice in the lockfile", pkg.name);
+                }
+                let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) {
+                    // We failed to find a local package in the workspace.
+                    // It must have been removed and should be ignored.
+                    None => {
+                        debug!("path dependency now missing {} v{}", pkg.name, pkg.version);
+                        continue;
+                    }
+                    Some(source) => PackageId::new(&pkg.name, &pkg.version, source)?,
+                };
+
+                assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none())
+            }
+            live_pkgs
+        };
+
+        let lookup_id = |enc_id: &EncodablePackageId| -> Option<PackageId> {
+            live_pkgs.get(enc_id).map(|&(ref id, _)| id.clone())
+        };
+
+        let g = {
+            let mut g = Graph::new();
+
+            for &(ref id, _) in live_pkgs.values() {
+                g.add(id.clone());
+            }
+
+            for &(ref id, pkg) in live_pkgs.values() {
+                let deps = match pkg.dependencies {
+                    Some(ref deps) => deps,
+                    None => continue,
+                };
+
+                for edge in deps.iter() {
+                    if let Some(to_depend_on) = lookup_id(edge) {
+                        g.link(id.clone(), to_depend_on);
+                    }
+                }
+            }
+            g
+        };
+
+        let replacements = {
+            let mut replacements = HashMap::new();
+            for &(ref id, pkg) in live_pkgs.values() {
+                if let Some(ref replace) = pkg.replace {
+                    assert!(pkg.dependencies.is_none());
+                    if let Some(replace_id) = lookup_id(replace) {
+                        replacements.insert(id.clone(), replace_id);
+                    }
+                }
+            }
+            replacements
+        };
+
+        let mut metadata = self.metadata.unwrap_or_default();
+
+        // Parse out all package checksums. After we do this we can be in a few
+        // situations:
+        //
+        // * We parsed no checksums. In this situation we're dealing with an old
+        //   lock file and we're gonna fill them all in.
+        // * We parsed some checksums, but not one for all packages listed. It
+        //   could have been the case that some were listed, then an older Cargo
+        //   client added more dependencies, and now we're going to fill in the
+        //   missing ones.
+        // * There are too many checksums listed, indicative of an older Cargo
+        //   client removing a package but not updating the checksums listed.
+        //
+        // In all of these situations they're part of normal usage, so we don't
+        // really worry about it. We just try to slurp up as many checksums as
+        // possible.
+        let mut checksums = HashMap::new();
+        let prefix = "checksum ";
+        let mut to_remove = Vec::new();
+        for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) {
+            to_remove.push(k.to_string());
+            let k = &k[prefix.len()..];
+            let enc_id: EncodablePackageId = k
+                .parse()
+                .chain_err(|| internal("invalid encoding of checksum in lockfile"))?;
+            let id = match lookup_id(&enc_id) {
+                Some(id) => id,
+                _ => continue,
+            };
+
+            let v = if v == "<none>" {
+                None
+            } else {
+                Some(v.to_string())
+            };
+            checksums.insert(id, v);
+        }
+
+        for k in to_remove {
+            metadata.remove(&k);
+        }
+
+        let mut unused_patches = Vec::new();
+        for pkg in self.patch.unused {
+            let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) {
+                Some(src) => PackageId::new(&pkg.name, &pkg.version, src)?,
+                None => continue,
+            };
+            unused_patches.push(id);
+        }
+
+        Ok(Resolve::new(
+            g,
+            replacements,
+            HashMap::new(),
+            checksums,
+            metadata,
+            unused_patches,
+        ))
+    }
+}
+
+fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
+    // If a crate is *not* a path source, then we're probably in a situation
+    // such as `cargo install` with a lock file from a remote dependency. In
+    // that case we don't need to fixup any path dependencies (as they're not
+    // actually path dependencies any more), so we ignore them.
+    let members = ws
+        .members()
+        .filter(|p| p.package_id().source_id().is_path())
+        .collect::<Vec<_>>();
+
+    let mut ret = HashMap::new();
+    let mut visited = HashSet::new();
+    for member in members.iter() {
+        ret.insert(
+            member.package_id().name().to_string(),
+            member.package_id().source_id().clone(),
+        );
+        visited.insert(member.package_id().source_id().clone());
+    }
+    for member in members.iter() {
+        build_pkg(member, ws, &mut ret, &mut visited);
+    }
+    for deps in ws.root_patch().values() {
+        for dep in deps {
+            build_dep(dep, ws, &mut ret, &mut visited);
+        }
+    }
+    for &(_, ref dep) in ws.root_replace() {
+        build_dep(dep, ws, &mut ret, &mut visited);
+    }
+
+    return ret;
+
+    fn build_pkg(
+        pkg: &Package,
+        ws: &Workspace,
+        ret: &mut HashMap<String, SourceId>,
+        visited: &mut HashSet<SourceId>,
+    ) {
+        for dep in pkg.dependencies() {
+            build_dep(dep, ws, ret, visited);
+        }
+    }
+
+    fn build_dep(
+        dep: &Dependency,
+        ws: &Workspace,
+        ret: &mut HashMap<String, SourceId>,
+        visited: &mut HashSet<SourceId>,
+    ) {
+        let id = dep.source_id();
+        if visited.contains(id) || !id.is_path() {
+            return;
+        }
+        let path = match id.url().to_file_path() {
+            Ok(p) => p.join("Cargo.toml"),
+            Err(_) => return,
+        };
+        let pkg = match ws.load(&path) {
+            Ok(p) => p,
+            Err(_) => return,
+        };
+        ret.insert(pkg.name().to_string(), pkg.package_id().source_id().clone());
+        visited.insert(pkg.package_id().source_id().clone());
+        build_pkg(&pkg, ws, ret, visited);
+    }
+}
+
+impl Patch {
+    fn is_empty(&self) -> bool {
+        self.unused.is_empty()
+    }
+}
+
+#[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)]
+pub struct EncodableDependency {
+    name: String,
+    version: String,
+    source: Option<SourceId>,
+    dependencies: Option<Vec<EncodablePackageId>>,
+    replace: Option<EncodablePackageId>,
+}
+
+#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)]
+pub struct EncodablePackageId {
+    name: String,
+    version: String,
+    source: Option<SourceId>,
+}
+
+impl fmt::Display for EncodablePackageId {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "{} {}", self.name, self.version)?;
+        if let Some(ref s) = self.source {
+            write!(f, " ({})", s.to_url())?;
+        }
+        Ok(())
+    }
+}
+
+impl FromStr for EncodablePackageId {
+    type Err = CargoError;
+
+    fn from_str(s: &str) -> CargoResult<EncodablePackageId> {
+        let mut s = s.splitn(3, ' ');
+        let name = s.next().unwrap();
+        let version = s
+            .next()
+            .ok_or_else(|| internal("invalid serialized PackageId"))?;
+        let source_id = match s.next() {
+            Some(s) => {
+                if s.starts_with('(') && s.ends_with(')') {
+                    Some(SourceId::from_url(&s[1..s.len() - 1])?)
+                } else {
+                    bail!("invalid serialized PackageId")
+                }
+            }
+            None => None,
+        };
+
+        Ok(EncodablePackageId {
+            name: name.to_string(),
+            version: version.to_string(),
+            source: source_id,
+        })
+    }
+}
+
+impl ser::Serialize for EncodablePackageId {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        s.collect_str(self)
+    }
+}
+
+impl<'de> de::Deserialize<'de> for EncodablePackageId {
+    fn deserialize<D>(d: D) -> Result<EncodablePackageId, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        String::deserialize(d).and_then(|string| {
+            string
+                .parse::<EncodablePackageId>()
+                .map_err(de::Error::custom)
+        })
+    }
+}
+
+pub struct WorkspaceResolve<'a, 'cfg: 'a> {
+    pub ws: &'a Workspace<'cfg>,
+    pub resolve: &'a Resolve,
+}
+
+impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        let mut ids: Vec<_> = self.resolve.iter().collect();
+        ids.sort();
+
+        let encodable = ids
+            .iter()
+            .filter_map(|&id| Some(encodable_resolve_node(id, self.resolve)))
+            .collect::<Vec<_>>();
+
+        let mut metadata = self.resolve.metadata().clone();
+
+        for id in ids.iter().filter(|id| !id.source_id().is_path()) {
+            let checksum = match self.resolve.checksums()[*id] {
+                Some(ref s) => &s[..],
+                None => "<none>",
+            };
+            let id = encodable_package_id(id);
+            metadata.insert(format!("checksum {}", id.to_string()), checksum.to_string());
+        }
+
+        let metadata = if metadata.is_empty() {
+            None
+        } else {
+            Some(metadata)
+        };
+
+        let patch = Patch {
+            unused: self
+                .resolve
+                .unused_patches()
+                .iter()
+                .map(|id| EncodableDependency {
+                    name: id.name().to_string(),
+                    version: id.version().to_string(),
+                    source: encode_source(id.source_id()),
+                    dependencies: None,
+                    replace: None,
+                })
+                .collect(),
+        };
+        EncodableResolve {
+            package: Some(encodable),
+            root: None,
+            metadata,
+            patch,
+        }.serialize(s)
+    }
+}
+
+fn encodable_resolve_node(id: &PackageId, resolve: &Resolve) -> EncodableDependency {
+    let (replace, deps) = match resolve.replacement(id) {
+        Some(id) => (Some(encodable_package_id(id)), None),
+        None => {
+            let mut deps = resolve
+                .deps_not_replaced(id)
+                .map(encodable_package_id)
+                .collect::<Vec<_>>();
+            deps.sort();
+            (None, Some(deps))
+        }
+    };
+
+    EncodableDependency {
+        name: id.name().to_string(),
+        version: id.version().to_string(),
+        source: encode_source(id.source_id()),
+        dependencies: deps,
+        replace,
+    }
+}
+
+pub fn encodable_package_id(id: &PackageId) -> EncodablePackageId {
+    EncodablePackageId {
+        name: id.name().to_string(),
+        version: id.version().to_string(),
+        source: encode_source(id.source_id()).map(|s| s.with_precise(None)),
+    }
+}
+
+fn encode_source(id: &SourceId) -> Option<SourceId> {
+    if id.is_path() {
+        None
+    } else {
+        Some(id.clone())
+    }
+}
diff --git a/src/cargo/core/resolver/errors.rs b/src/cargo/core/resolver/errors.rs
new file mode 100644 (file)
index 0000000..74979c6
--- /dev/null
@@ -0,0 +1,269 @@
+use std::collections::HashMap;
+use std::fmt;
+
+use core::{Dependency, PackageId, Registry, Summary};
+use failure::{Error, Fail};
+use semver;
+use util::config::Config;
+use util::lev_distance::lev_distance;
+
+use super::context::Context;
+use super::types::{Candidate, ConflictReason};
+
+/// Error during resolution providing a path of `PackageId`s.
+pub struct ResolveError {
+    cause: Error,
+    package_path: Vec<PackageId>,
+}
+
+impl ResolveError {
+    pub fn new<E: Into<Error>>(cause: E, package_path: Vec<PackageId>) -> Self {
+        Self {
+            cause: cause.into(),
+            package_path,
+        }
+    }
+
+    /// Returns a path of packages from the package whose requirements could not be resolved up to
+    /// the root.
+    pub fn package_path(&self) -> &[PackageId] {
+        &self.package_path
+    }
+}
+
+impl Fail for ResolveError {
+    fn cause(&self) -> Option<&Fail> {
+        self.cause.as_fail().cause()
+    }
+}
+
+impl fmt::Debug for ResolveError {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.cause.fmt(f)
+    }
+}
+
+impl fmt::Display for ResolveError {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.cause.fmt(f)
+    }
+}
+
+pub(super) fn activation_error(
+    cx: &Context,
+    registry: &mut Registry,
+    parent: &Summary,
+    dep: &Dependency,
+    conflicting_activations: &HashMap<PackageId, ConflictReason>,
+    candidates: &[Candidate],
+    config: Option<&Config>,
+) -> ResolveError {
+    let graph = cx.graph();
+    let to_resolve_err = |err| {
+        ResolveError::new(
+            err,
+            graph
+                .path_to_top(parent.package_id())
+                .into_iter()
+                .cloned()
+                .collect(),
+        )
+    };
+
+    if !candidates.is_empty() {
+        let mut msg = format!("failed to select a version for `{}`.", dep.package_name());
+        msg.push_str("\n    ... required by ");
+        msg.push_str(&describe_path(&graph.path_to_top(parent.package_id())));
+
+        msg.push_str("\nversions that meet the requirements `");
+        msg.push_str(&dep.version_req().to_string());
+        msg.push_str("` are: ");
+        msg.push_str(
+            &candidates
+                .iter()
+                .map(|v| v.summary.version())
+                .map(|v| v.to_string())
+                .collect::<Vec<_>>()
+                .join(", "),
+        );
+
+        let mut conflicting_activations: Vec<_> = conflicting_activations.iter().collect();
+        conflicting_activations.sort_unstable();
+        let (links_errors, mut other_errors): (Vec<_>, Vec<_>) = conflicting_activations
+            .drain(..)
+            .rev()
+            .partition(|&(_, r)| r.is_links());
+
+        for &(p, r) in links_errors.iter() {
+            if let ConflictReason::Links(ref link) = *r {
+                msg.push_str("\n\nthe package `");
+                msg.push_str(&*dep.package_name());
+                msg.push_str("` links to the native library `");
+                msg.push_str(link);
+                msg.push_str("`, but it conflicts with a previous package which links to `");
+                msg.push_str(link);
+                msg.push_str("` as well:\n");
+            }
+            msg.push_str(&describe_path(&graph.path_to_top(p)));
+        }
+
+        let (features_errors, other_errors): (Vec<_>, Vec<_>) = other_errors
+            .drain(..)
+            .partition(|&(_, r)| r.is_missing_features());
+
+        for &(p, r) in features_errors.iter() {
+            if let ConflictReason::MissingFeatures(ref features) = *r {
+                msg.push_str("\n\nthe package `");
+                msg.push_str(&*p.name());
+                msg.push_str("` depends on `");
+                msg.push_str(&*dep.package_name());
+                msg.push_str("`, with features: `");
+                msg.push_str(features);
+                msg.push_str("` but `");
+                msg.push_str(&*dep.package_name());
+                msg.push_str("` does not have these features.\n");
+            }
+            // p == parent so the full path is redundant.
+        }
+
+        if !other_errors.is_empty() {
+            msg.push_str(
+                "\n\nall possible versions conflict with \
+                 previously selected packages.",
+            );
+        }
+
+        for &(p, _) in other_errors.iter() {
+            msg.push_str("\n\n  previously selected ");
+            msg.push_str(&describe_path(&graph.path_to_top(p)));
+        }
+
+        msg.push_str("\n\nfailed to select a version for `");
+        msg.push_str(&*dep.package_name());
+        msg.push_str("` which could resolve this conflict");
+
+        return to_resolve_err(format_err!("{}", msg));
+    }
+
+    // We didn't actually find any candidates, so we need to
+    // give an error message that nothing was found.
+    //
+    // Maybe the user mistyped the ver_req? Like `dep="2"` when `dep="0.2"`
+    // was meant. So we re-query the registry with `deb="*"` so we can
+    // list a few versions that were actually found.
+    let all_req = semver::VersionReq::parse("*").unwrap();
+    let mut new_dep = dep.clone();
+    new_dep.set_version_req(all_req);
+    let mut candidates = match registry.query_vec(&new_dep, false) {
+        Ok(candidates) => candidates,
+        Err(e) => return to_resolve_err(e),
+    };
+    candidates.sort_unstable_by(|a, b| b.version().cmp(a.version()));
+
+    let mut msg = if !candidates.is_empty() {
+        let versions = {
+            let mut versions = candidates
+                .iter()
+                .take(3)
+                .map(|cand| cand.version().to_string())
+                .collect::<Vec<_>>();
+
+            if candidates.len() > 3 {
+                versions.push("...".into());
+            }
+
+            versions.join(", ")
+        };
+
+        let mut msg = format!(
+            "failed to select a version for the requirement `{} = \"{}\"`\n  \
+             candidate versions found which didn't match: {}\n  \
+             location searched: {}\n",
+            dep.package_name(),
+            dep.version_req(),
+            versions,
+            registry.describe_source(dep.source_id()),
+        );
+        msg.push_str("required by ");
+        msg.push_str(&describe_path(&graph.path_to_top(parent.package_id())));
+
+        // If we have a path dependency with a locked version, then this may
+        // indicate that we updated a sub-package and forgot to run `cargo
+        // update`. In this case try to print a helpful error!
+        if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') {
+            msg.push_str(
+                "\nconsider running `cargo update` to update \
+                 a path dependency's locked version",
+            );
+        }
+
+        if registry.is_replaced(dep.source_id()) {
+            msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?");
+        }
+
+        msg
+    } else {
+        // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing`
+        // was meant. So we try asking the registry for a `fuzzy` search for suggestions.
+        let mut candidates = Vec::new();
+        if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.name()), true) {
+            return to_resolve_err(e);
+        };
+        candidates.sort_unstable();
+        candidates.dedup();
+        let mut candidates: Vec<_> = candidates
+            .iter()
+            .map(|n| (lev_distance(&*new_dep.package_name(), &*n), n))
+            .filter(|&(d, _)| d < 4)
+            .collect();
+        candidates.sort_by_key(|o| o.0);
+        let mut msg = format!(
+            "no matching package named `{}` found\n\
+             location searched: {}\n",
+            dep.package_name(),
+            dep.source_id()
+        );
+        if !candidates.is_empty() {
+            let mut names = candidates
+                .iter()
+                .take(3)
+                .map(|c| c.1.as_str())
+                .collect::<Vec<_>>();
+
+            if candidates.len() > 3 {
+                names.push("...");
+            }
+
+            msg.push_str("did you mean: ");
+            msg.push_str(&names.join(", "));
+            msg.push_str("\n");
+        }
+        msg.push_str("required by ");
+        msg.push_str(&describe_path(&graph.path_to_top(parent.package_id())));
+
+        msg
+    };
+
+    if let Some(config) = config {
+        if config.cli_unstable().offline {
+            msg.push_str(
+                "\nAs a reminder, you're using offline mode (-Z offline) \
+                 which can sometimes cause surprising resolution failures, \
+                 if this error is too confusing you may with to retry \
+                 without the offline flag.",
+            );
+        }
+    }
+
+    to_resolve_err(format_err!("{}", msg))
+}
+
+/// Returns String representation of dependency chain for a particular `pkgid`.
+pub(super) fn describe_path(path: &[&PackageId]) -> String {
+    use std::fmt::Write;
+    let mut dep_path_desc = format!("package `{}`", path[0]);
+    for dep in path[1..].iter() {
+        write!(dep_path_desc, "\n    ... which is depended on by `{}`", dep).unwrap();
+    }
+    dep_path_desc
+}
diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs
new file mode 100644 (file)
index 0000000..196c250
--- /dev/null
@@ -0,0 +1,908 @@
+//! Resolution of the entire dependency graph for a crate
+//!
+//! This module implements the core logic in taking the world of crates and
+//! constraints and creating a resolved graph with locked versions for all
+//! crates and their dependencies. This is separate from the registry module
+//! which is more worried about discovering crates from various sources, this
+//! module just uses the Registry trait as a source to learn about crates from.
+//!
+//! Actually solving a constraint graph is an NP-hard problem. This algorithm
+//! is basically a nice heuristic to make sure we get roughly the best answer
+//! most of the time. The constraints that we're working with are:
+//!
+//! 1. Each crate can have any number of dependencies. Each dependency can
+//!    declare a version range that it is compatible with.
+//! 2. Crates can be activated with multiple version (e.g. show up in the
+//!    dependency graph twice) so long as each pairwise instance have
+//!    semver-incompatible versions.
+//!
+//! The algorithm employed here is fairly simple, we simply do a DFS, activating
+//! the "newest crate" (highest version) first and then going to the next
+//! option. The heuristics we employ are:
+//!
+//! * Never try to activate a crate version which is incompatible. This means we
+//!   only try crates which will actually satisfy a dependency and we won't ever
+//!   try to activate a crate that's semver compatible with something else
+//!   activated (as we're only allowed to have one) nor try to activate a crate
+//!   that has the same links attribute as something else
+//!   activated.
+//! * Always try to activate the highest version crate first. The default
+//!   dependency in Cargo (e.g. when you write `foo = "0.1.2"`) is
+//!   semver-compatible, so selecting the highest version possible will allow us
+//!   to hopefully satisfy as many dependencies at once.
+//!
+//! Beyond that, what's implemented below is just a naive backtracking version
+//! which should in theory try all possible combinations of dependencies and
+//! versions to see if one works. The first resolution that works causes
+//! everything to bail out immediately and return success, and only if *nothing*
+//! works do we actually return an error up the stack.
+//!
+//! ## Performance
+//!
+//! Note that this is a relatively performance-critical portion of Cargo. The
+//! data that we're processing is proportional to the size of the dependency
+//! graph, which can often be quite large (e.g. take a look at Servo). To make
+//! matters worse the DFS algorithm we're implemented is inherently quite
+//! inefficient. When we add the requirement of backtracking on top it means
+//! that we're implementing something that probably shouldn't be allocating all
+//! over the place.
+
+use std::collections::{BTreeMap, HashMap, HashSet};
+use std::mem;
+use std::rc::Rc;
+use std::time::{Duration, Instant};
+
+use semver;
+
+use core::interning::InternedString;
+use core::PackageIdSpec;
+use core::{Dependency, PackageId, Registry, Summary};
+use util::config::Config;
+use util::errors::CargoResult;
+use util::profile;
+
+use self::context::{Activations, Context};
+use self::types::{ActivateError, ActivateResult, Candidate, ConflictReason, DepsFrame, GraphNode};
+use self::types::{RcVecIter, RegistryQueryer, RemainingDeps, ResolverProgress};
+
+pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
+pub use self::encode::{Metadata, WorkspaceResolve};
+pub use self::resolve::Resolve;
+pub use self::types::Method;
+pub use self::errors::ResolveError;
+
+mod conflict_cache;
+mod context;
+mod encode;
+mod resolve;
+mod types;
+mod errors;
+
+/// Builds the list of all packages required to build the first argument.
+///
+/// * `summaries` - the list of package summaries along with how to resolve
+///   their features. This is a list of all top-level packages that are intended
+///   to be part of the lock file (resolve output). These typically are a list
+///   of all workspace members.
+///
+/// * `replacements` - this is a list of `[replace]` directives found in the
+///   root of the workspace. The list here is a `PackageIdSpec` of what to
+///   replace and a `Dependency` to replace that with. In general it's not
+///   recommended to use `[replace]` any more and use `[patch]` instead, which
+///   is supported elsewhere.
+///
+/// * `registry` - this is the source from which all package summaries are
+///   loaded. It's expected that this is extensively configured ahead of time
+///   and is idempotent with our requests to it (aka returns the same results
+///   for the same query every time). Typically this is an instance of a
+///   `PackageRegistry`.
+///
+/// * `try_to_use` - this is a list of package ids which were previously found
+///   in the lock file. We heuristically prefer the ids listed in `try_to_use`
+///   when sorting candidates to activate, but otherwise this isn't used
+///   anywhere else.
+///
+/// * `config` - a location to print warnings and such, or `None` if no warnings
+///   should be printed
+///
+/// * `print_warnings` - whether or not to print backwards-compatibility
+///   warnings and such
+pub fn resolve(
+    summaries: &[(Summary, Method)],
+    replacements: &[(PackageIdSpec, Dependency)],
+    registry: &mut Registry,
+    try_to_use: &HashSet<&PackageId>,
+    config: Option<&Config>,
+    print_warnings: bool,
+) -> CargoResult<Resolve> {
+    let cx = Context::new();
+    let _p = profile::start("resolving");
+    let minimal_versions = match config {
+        Some(config) => config.cli_unstable().minimal_versions,
+        None => false,
+    };
+    let mut registry = RegistryQueryer::new(registry, replacements, try_to_use, minimal_versions);
+    let cx = activate_deps_loop(cx, &mut registry, summaries, config)?;
+
+    let mut cksums = HashMap::new();
+    for summary in cx.activations.values().flat_map(|v| v.iter()) {
+        let cksum = summary.checksum().map(|s| s.to_string());
+        cksums.insert(summary.package_id().clone(), cksum);
+    }
+    let resolve = Resolve::new(
+        cx.graph(),
+        cx.resolve_replacements(),
+        cx.resolve_features
+            .iter()
+            .map(|(k, v)| (k.clone(), v.iter().map(|x| x.to_string()).collect()))
+            .collect(),
+        cksums,
+        BTreeMap::new(),
+        Vec::new(),
+    );
+
+    check_cycles(&resolve, &cx.activations)?;
+    check_duplicate_pkgs_in_lockfile(&resolve)?;
+    trace!("resolved: {:?}", resolve);
+
+    // If we have a shell, emit warnings about required deps used as feature.
+    if let Some(config) = config {
+        if print_warnings {
+            let mut shell = config.shell();
+            let mut warnings = &cx.warnings;
+            while let Some(ref head) = warnings.head {
+                shell.warn(&head.0)?;
+                warnings = &head.1;
+            }
+        }
+    }
+
+    Ok(resolve)
+}
+
+/// Recursively activates the dependencies for `top`, in depth-first order,
+/// backtracking across possible candidates for each dependency as necessary.
+///
+/// If all dependencies can be activated and resolved to a version in the
+/// dependency graph, cx.resolve is returned.
+fn activate_deps_loop(
+    mut cx: Context,
+    registry: &mut RegistryQueryer,
+    summaries: &[(Summary, Method)],
+    config: Option<&Config>,
+) -> CargoResult<Context> {
+    let mut backtrack_stack = Vec::new();
+    let mut remaining_deps = RemainingDeps::new();
+
+    // `past_conflicting_activations` is a cache of the reasons for each time we
+    // backtrack.
+    let mut past_conflicting_activations = conflict_cache::ConflictCache::new();
+
+    // Activate all the initial summaries to kick off some work.
+    for &(ref summary, ref method) in summaries {
+        debug!("initial activation: {}", summary.package_id());
+        let candidate = Candidate {
+            summary: summary.clone(),
+            replace: None,
+        };
+        let res = activate(&mut cx, registry, None, candidate, method);
+        match res {
+            Ok(Some((frame, _))) => remaining_deps.push(frame),
+            Ok(None) => (),
+            Err(ActivateError::Fatal(e)) => return Err(e),
+            Err(ActivateError::Conflict(_, _)) => panic!("bad error from activate"),
+        }
+    }
+
+    let mut printed = ResolverProgress::new();
+
+    // Main resolution loop, this is the workhorse of the resolution algorithm.
+    //
+    // You'll note that a few stacks are maintained on the side, which might
+    // seem odd when this algorithm looks like it could be implemented
+    // recursively. While correct, this is implemented iteratively to avoid
+    // blowing the stack (the recursion depth is proportional to the size of the
+    // input).
+    //
+    // The general sketch of this loop is to run until there are no dependencies
+    // left to activate, and for each dependency to attempt to activate all of
+    // its own dependencies in turn. The `backtrack_stack` is a side table of
+    // backtracking states where if we hit an error we can return to in order to
+    // attempt to continue resolving.
+    while let Some((just_here_for_the_error_messages, frame)) =
+        remaining_deps.pop_most_constrained()
+    {
+        let (mut parent, (mut cur, (mut dep, candidates, mut features))) = frame;
+
+        // If we spend a lot of time here (we shouldn't in most cases) then give
+        // a bit of a visual indicator as to what we're doing.
+        printed.shell_status(config)?;
+
+        trace!(
+            "{}[{}]>{} {} candidates",
+            parent.name(),
+            cur,
+            dep.package_name(),
+            candidates.len()
+        );
+        trace!(
+            "{}[{}]>{} {} prev activations",
+            parent.name(),
+            cur,
+            dep.package_name(),
+            cx.prev_active(&dep).len()
+        );
+
+        let just_here_for_the_error_messages = just_here_for_the_error_messages
+            && past_conflicting_activations
+                .conflicting(&cx, &dep)
+                .is_some();
+
+        let mut remaining_candidates = RemainingCandidates::new(&candidates);
+
+        // `conflicting_activations` stores all the reasons we were unable to
+        // activate candidates. One of these reasons will have to go away for
+        // backtracking to find a place to restart. It is also the list of
+        // things to explain in the error message if we fail to resolve.
+        //
+        // This is a map of package id to a reason why that packaged caused a
+        // conflict for us.
+        let mut conflicting_activations = HashMap::new();
+
+        // When backtracking we don't fully update `conflicting_activations`
+        // especially for the cases that we didn't make a backtrack frame in the
+        // first place.  This `backtracked` var stores whether we are continuing
+        // from a restored backtrack frame so that we can skip caching
+        // `conflicting_activations` in `past_conflicting_activations`
+        let mut backtracked = false;
+
+        loop {
+            let next = remaining_candidates.next(&mut conflicting_activations, &cx, &dep);
+
+            let (candidate, has_another) = next.ok_or(()).or_else(|_| {
+                // If we get here then our `remaining_candidates` was just
+                // exhausted, so `dep` failed to activate.
+                //
+                // It's our job here to backtrack, if possible, and find a
+                // different candidate to activate. If we can't find any
+                // candidates whatsoever then it's time to bail entirely.
+                trace!(
+                    "{}[{}]>{} -- no candidates",
+                    parent.name(),
+                    cur,
+                    dep.package_name()
+                );
+
+                // Use our list of `conflicting_activations` to add to our
+                // global list of past conflicting activations, effectively
+                // globally poisoning `dep` if `conflicting_activations` ever
+                // shows up again. We'll use the `past_conflicting_activations`
+                // below to determine if a dependency is poisoned and skip as
+                // much work as possible.
+                //
+                // If we're only here for the error messages then there's no
+                // need to try this as this dependency is already known to be
+                // bad.
+                //
+                // As we mentioned above with the `backtracked` variable if this
+                // local is set to `true` then our `conflicting_activations` may
+                // not be right, so we can't push into our global cache.
+                if !just_here_for_the_error_messages && !backtracked {
+                    past_conflicting_activations.insert(&dep, &conflicting_activations);
+                }
+
+                match find_candidate(
+                    &mut backtrack_stack,
+                    &parent,
+                    backtracked,
+                    &conflicting_activations,
+                ) {
+                    Some((candidate, has_another, frame)) => {
+                        // Reset all of our local variables used with the
+                        // contents of `frame` to complete our backtrack.
+                        cur = frame.cur;
+                        cx = frame.context;
+                        remaining_deps = frame.remaining_deps;
+                        remaining_candidates = frame.remaining_candidates;
+                        parent = frame.parent;
+                        dep = frame.dep;
+                        features = frame.features;
+                        conflicting_activations = frame.conflicting_activations;
+                        backtracked = true;
+                        Ok((candidate, has_another))
+                    }
+                    None => {
+                        debug!("no candidates found");
+                        Err(errors::activation_error(
+                            &cx,
+                            registry.registry,
+                            &parent,
+                            &dep,
+                            &conflicting_activations,
+                            &candidates,
+                            config,
+                        ))
+                    }
+                }
+            })?;
+
+            // If we're only here for the error messages then we know that this
+            // activation will fail one way or another. To that end if we've got
+            // more candidates we want to fast-forward to the last one as
+            // otherwise we'll just backtrack here anyway (helping us to skip
+            // some work).
+            if just_here_for_the_error_messages && !backtracked && has_another {
+                continue;
+            }
+
+            // We have a `candidate`. Create a `BacktrackFrame` so we can add it
+            // to the `backtrack_stack` later if activation succeeds.
+            //
+            // Note that if we don't actually have another candidate then there
+            // will be nothing to backtrack to so we skip construction of the
+            // frame. This is a relatively important optimization as a number of
+            // the `clone` calls below can be quite expensive, so we avoid them
+            // if we can.
+            let backtrack = if has_another {
+                Some(BacktrackFrame {
+                    cur,
+                    context: Context::clone(&cx),
+                    remaining_deps: remaining_deps.clone(),
+                    remaining_candidates: remaining_candidates.clone(),
+                    parent: Summary::clone(&parent),
+                    dep: Dependency::clone(&dep),
+                    features: Rc::clone(&features),
+                    conflicting_activations: conflicting_activations.clone(),
+                })
+            } else {
+                None
+            };
+
+            let pid = candidate.summary.package_id().clone();
+            let method = Method::Required {
+                dev_deps: false,
+                features: &features,
+                all_features: false,
+                uses_default_features: dep.uses_default_features(),
+            };
+            trace!(
+                "{}[{}]>{} trying {}",
+                parent.name(),
+                cur,
+                dep.package_name(),
+                candidate.summary.version()
+            );
+            let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, &method);
+
+            let successfully_activated = match res {
+                // Success! We've now activated our `candidate` in our context
+                // and we're almost ready to move on. We may want to scrap this
+                // frame in the end if it looks like it's not going to end well,
+                // so figure that out here.
+                Ok(Some((mut frame, dur))) => {
+                    printed.elapsed(dur);
+
+                    // Our `frame` here is a new package with its own list of
+                    // dependencies. Do a sanity check here of all those
+                    // dependencies by cross-referencing our global
+                    // `past_conflicting_activations`. Recall that map is a
+                    // global cache which lists sets of packages where, when
+                    // activated, the dependency is unresolvable.
+                    //
+                    // If any our our frame's dependencies fit in that bucket,
+                    // aka known unresolvable, then we extend our own set of
+                    // conflicting activations with theirs. We can do this
+                    // because the set of conflicts we found implies the
+                    // dependency can't be activated which implies that we
+                    // ourselves can't be activated, so we know that they
+                    // conflict with us.
+                    let mut has_past_conflicting_dep = just_here_for_the_error_messages;
+                    if !has_past_conflicting_dep {
+                        if let Some(conflicting) = frame
+                            .remaining_siblings
+                            .clone()
+                            .filter_map(|(_, (ref new_dep, _, _))| {
+                                past_conflicting_activations.conflicting(&cx, new_dep)
+                            }).next()
+                        {
+                            // If one of our deps is known unresolvable
+                            // then we will not succeed.
+                            // How ever if we are part of the reason that
+                            // one of our deps conflicts then
+                            // we can make a stronger statement
+                            // because we will definitely be activated when
+                            // we try our dep.
+                            conflicting_activations.extend(
+                                conflicting
+                                    .iter()
+                                    .filter(|&(p, _)| p != &pid)
+                                    .map(|(p, r)| (p.clone(), r.clone())),
+                            );
+
+                            has_past_conflicting_dep = true;
+                        }
+                    }
+                    // If any of `remaining_deps` are known unresolvable with
+                    // us activated, then we extend our own set of
+                    // conflicting activations with theirs and its parent. We can do this
+                    // because the set of conflicts we found implies the
+                    // dependency can't be activated which implies that we
+                    // ourselves are incompatible with that dep, so we know that deps
+                    // parent conflict with us.
+                    if !has_past_conflicting_dep {
+                        if let Some(known_related_bad_deps) =
+                            past_conflicting_activations.dependencies_conflicting_with(&pid)
+                        {
+                            if let Some((other_parent, conflict)) = remaining_deps
+                                .iter()
+                                // for deps related to us
+                                .filter(|&(_, ref other_dep)| {
+                                    known_related_bad_deps.contains(other_dep)
+                                }).filter_map(|(other_parent, other_dep)| {
+                                    past_conflicting_activations
+                                        .find_conflicting(&cx, &other_dep, |con| {
+                                            con.contains_key(&pid)
+                                        }).map(|con| (other_parent, con))
+                                }).next()
+                            {
+                                let rel = conflict.get(&pid).unwrap().clone();
+
+                                // The conflict we found is
+                                // "other dep will not succeed if we are activated."
+                                // We want to add
+                                // "our dep will not succeed if other dep is in remaining_deps"
+                                // but that is not how the cache is set up.
+                                // So we add the less general but much faster,
+                                // "our dep will not succeed if other dep's parent is activated".
+                                conflicting_activations.extend(
+                                    conflict
+                                        .iter()
+                                        .filter(|&(p, _)| p != &pid)
+                                        .map(|(p, r)| (p.clone(), r.clone())),
+                                );
+                                conflicting_activations.insert(other_parent.clone(), rel);
+                                has_past_conflicting_dep = true;
+                            }
+                        }
+                    }
+
+                    // Ok if we're in a "known failure" state for this frame we
+                    // may want to skip it altogether though. We don't want to
+                    // skip it though in the case that we're displaying error
+                    // messages to the user!
+                    //
+                    // Here we need to figure out if the user will see if we
+                    // skipped this candidate (if it's known to fail, aka has a
+                    // conflicting dep and we're the last candidate). If we're
+                    // here for the error messages, we can't skip it (but we can
+                    // prune extra work). If we don't have any candidates in our
+                    // backtrack stack then we're the last line of defense, so
+                    // we'll want to present an error message for sure.
+                    let activate_for_error_message = has_past_conflicting_dep && !has_another && {
+                        just_here_for_the_error_messages || {
+                            find_candidate(
+                                &mut backtrack_stack.clone(),
+                                &parent,
+                                backtracked,
+                                &conflicting_activations,
+                            ).is_none()
+                        }
+                    };
+
+                    // If we're only here for the error messages then we know
+                    // one of our candidate deps will fail, meaning we will
+                    // fail and that none of the backtrack frames will find a
+                    // candidate that will help. Consequently let's clean up the
+                    // no longer needed backtrack frames.
+                    if activate_for_error_message {
+                        backtrack_stack.clear();
+                    }
+
+                    // If we don't know for a fact that we'll fail or if we're
+                    // just here for the error message then we push this frame
+                    // onto our list of to-be-resolve, which will generate more
+                    // work for us later on.
+                    //
+                    // Otherwise we're guaranteed to fail and were not here for
+                    // error messages, so we skip work and don't push anything
+                    // onto our stack.
+                    frame.just_for_error_messages = has_past_conflicting_dep;
+                    if !has_past_conflicting_dep || activate_for_error_message {
+                        remaining_deps.push(frame);
+                        true
+                    } else {
+                        trace!(
+                            "{}[{}]>{} skipping {} ",
+                            parent.name(),
+                            cur,
+                            dep.package_name(),
+                            pid.version()
+                        );
+                        false
+                    }
+                }
+
+                // This candidate's already activated, so there's no extra work
+                // for us to do. Let's keep going.
+                Ok(None) => true,
+
+                // We failed with a super fatal error (like a network error), so
+                // bail out as quickly as possible as we can't reliably
+                // backtrack from errors like these
+                Err(ActivateError::Fatal(e)) => return Err(e),
+
+                // We failed due to a bland conflict, bah! Record this in our
+                // frame's list of conflicting activations as to why this
+                // candidate failed, and then move on.
+                Err(ActivateError::Conflict(id, reason)) => {
+                    conflicting_activations.insert(id, reason);
+                    false
+                }
+            };
+
+            // If we've successfully activated then save off the backtrack frame
+            // if one was created, and otherwise break out of the inner
+            // activation loop as we're ready to move to the next dependency
+            if successfully_activated {
+                backtrack_stack.extend(backtrack);
+                break;
+            }
+
+            // We've failed to activate this dependency, oh dear! Our call to
+            // `activate` above may have altered our `cx` local variable, so
+            // restore it back if we've got a backtrack frame.
+            //
+            // If we don't have a backtrack frame then we're just using the `cx`
+            // for error messages anyway so we can live with a little
+            // imprecision.
+            if let Some(b) = backtrack {
+                cx = b.context;
+            }
+        }
+
+        // Ok phew, that loop was a big one! If we've broken out then we've
+        // successfully activated a candidate. Our stacks are all in place that
+        // we're ready to move on to the next dependency that needs activation,
+        // so loop back to the top of the function here.
+    }
+
+    Ok(cx)
+}
+
+/// Attempts to activate the summary `candidate` in the context `cx`.
+///
+/// This function will pull dependency summaries from the registry provided, and
+/// the dependencies of the package will be determined by the `method` provided.
+/// If `candidate` was activated, this function returns the dependency frame to
+/// iterate through next.
+fn activate(
+    cx: &mut Context,
+    registry: &mut RegistryQueryer,
+    parent: Option<(&Summary, &Dependency)>,
+    candidate: Candidate,
+    method: &Method,
+) -> ActivateResult<Option<(DepsFrame, Duration)>> {
+    if let Some((parent, dep)) = parent {
+        cx.resolve_graph.push(GraphNode::Link(
+            parent.package_id().clone(),
+            candidate.summary.package_id().clone(),
+            dep.clone(),
+        ));
+    }
+
+    let activated = cx.flag_activated(&candidate.summary, method)?;
+
+    let candidate = match candidate.replace {
+        Some(replace) => {
+            cx.resolve_replacements.push((
+                candidate.summary.package_id().clone(),
+                replace.package_id().clone(),
+            ));
+            if cx.flag_activated(&replace, method)? && activated {
+                return Ok(None);
+            }
+            trace!(
+                "activating {} (replacing {})",
+                replace.package_id(),
+                candidate.summary.package_id()
+            );
+            replace
+        }
+        None => {
+            if activated {
+                return Ok(None);
+            }
+            trace!("activating {}", candidate.summary.package_id());
+            candidate.summary
+        }
+    };
+
+    let now = Instant::now();
+    let deps = cx.build_deps(registry, parent.map(|p| p.0), &candidate, method)?;
+    let frame = DepsFrame {
+        parent: candidate,
+        just_for_error_messages: false,
+        remaining_siblings: RcVecIter::new(Rc::new(deps)),
+    };
+    Ok(Some((frame, now.elapsed())))
+}
+
+#[derive(Clone)]
+struct BacktrackFrame {
+    cur: usize,
+    context: Context,
+    remaining_deps: RemainingDeps,
+    remaining_candidates: RemainingCandidates,
+    parent: Summary,
+    dep: Dependency,
+    features: Rc<Vec<InternedString>>,
+    conflicting_activations: HashMap<PackageId, ConflictReason>,
+}
+
+/// A helper "iterator" used to extract candidates within a current `Context` of
+/// a dependency graph.
+///
+/// This struct doesn't literally implement the `Iterator` trait (requires a few
+/// more inputs) but in general acts like one. Each `RemainingCandidates` is
+/// created with a list of candidates to choose from. When attempting to iterate
+/// over the list of candidates only *valid* candidates are returned. Validity
+/// is defined within a `Context`.
+///
+/// Candidates passed to `new` may not be returned from `next` as they could be
+/// filtered out, and as they are filtered the causes will be added to `conflicting_prev_active`.
+#[derive(Clone)]
+struct RemainingCandidates {
+    remaining: RcVecIter<Candidate>,
+    // This is a inlined peekable generator
+    has_another: Option<Candidate>,
+}
+
+impl RemainingCandidates {
+    fn new(candidates: &Rc<Vec<Candidate>>) -> RemainingCandidates {
+        RemainingCandidates {
+            remaining: RcVecIter::new(Rc::clone(candidates)),
+            has_another: None,
+        }
+    }
+
+    /// Attempts to find another candidate to check from this list.
+    ///
+    /// This method will attempt to move this iterator forward, returning a
+    /// candidate that's possible to activate. The `cx` argument is the current
+    /// context which determines validity for candidates returned, and the `dep`
+    /// is the dependency listing that we're activating for.
+    ///
+    /// If successful a `(Candidate, bool)` pair will be returned. The
+    /// `Candidate` is the candidate to attempt to activate, and the `bool` is
+    /// an indicator of whether there are remaining candidates to try of if
+    /// we've reached the end of iteration.
+    ///
+    /// If we've reached the end of the iterator here then `Err` will be
+    /// returned. The error will contain a map of package id to conflict reason,
+    /// where each package id caused a candidate to be filtered out from the
+    /// original list for the reason listed.
+    fn next(
+        &mut self,
+        conflicting_prev_active: &mut HashMap<PackageId, ConflictReason>,
+        cx: &Context,
+        dep: &Dependency,
+    ) -> Option<(Candidate, bool)> {
+        let prev_active = cx.prev_active(dep);
+
+        for (_, b) in self.remaining.by_ref() {
+            // The `links` key in the manifest dictates that there's only one
+            // package in a dependency graph, globally, with that particular
+            // `links` key. If this candidate links to something that's already
+            // linked to by a different package then we've gotta skip this.
+            if let Some(link) = b.summary.links() {
+                if let Some(a) = cx.links.get(&link) {
+                    if a != b.summary.package_id() {
+                        conflicting_prev_active
+                            .entry(a.clone())
+                            .or_insert_with(|| ConflictReason::Links(link));
+                        continue;
+                    }
+                }
+            }
+
+            // Otherwise the condition for being a valid candidate relies on
+            // semver. Cargo dictates that you can't duplicate multiple
+            // semver-compatible versions of a crate. For example we can't
+            // simultaneously activate `foo 1.0.2` and `foo 1.2.0`. We can,
+            // however, activate `1.0.2` and `2.0.0`.
+            //
+            // Here we throw out our candidate if it's *compatible*, yet not
+            // equal, to all previously activated versions.
+            if let Some(a) = prev_active
+                .iter()
+                .find(|a| compatible(a.version(), b.summary.version()))
+            {
+                if *a != b.summary {
+                    conflicting_prev_active
+                        .entry(a.package_id().clone())
+                        .or_insert(ConflictReason::Semver);
+                    continue;
+                }
+            }
+
+            // Well if we made it this far then we've got a valid dependency. We
+            // want this iterator to be inherently "peekable" so we don't
+            // necessarily return the item just yet. Instead we stash it away to
+            // get returned later, and if we replaced something then that was
+            // actually the candidate to try first so we return that.
+            if let Some(r) = mem::replace(&mut self.has_another, Some(b)) {
+                return Some((r, true));
+            }
+        }
+
+        // Alright we've entirely exhausted our list of candidates. If we've got
+        // something stashed away return that here (also indicating that there's
+        // nothing else).
+        self.has_another.take().map(|r| (r, false))
+    }
+}
+
+// Returns if `a` and `b` are compatible in the semver sense. This is a
+// commutative operation.
+//
+// Versions `a` and `b` are compatible if their left-most nonzero digit is the
+// same.
+fn compatible(a: &semver::Version, b: &semver::Version) -> bool {
+    if a.major != b.major {
+        return false;
+    }
+    if a.major != 0 {
+        return true;
+    }
+    if a.minor != b.minor {
+        return false;
+    }
+    if a.minor != 0 {
+        return true;
+    }
+    a.patch == b.patch
+}
+
+/// Looks through the states in `backtrack_stack` for dependencies with
+/// remaining candidates. For each one, also checks if rolling back
+/// could change the outcome of the failed resolution that caused backtracking
+/// in the first place. Namely, if we've backtracked past the parent of the
+/// failed dep, or any of the packages flagged as giving us trouble in
+/// `conflicting_activations`.
+///
+/// Read <https://github.com/rust-lang/cargo/pull/4834>
+/// For several more detailed explanations of the logic here.
+fn find_candidate(
+    backtrack_stack: &mut Vec<BacktrackFrame>,
+    parent: &Summary,
+    backtracked: bool,
+    conflicting_activations: &HashMap<PackageId, ConflictReason>,
+) -> Option<(Candidate, bool, BacktrackFrame)> {
+    while let Some(mut frame) = backtrack_stack.pop() {
+        let next = frame.remaining_candidates.next(
+            &mut frame.conflicting_activations,
+            &frame.context,
+            &frame.dep,
+        );
+        let (candidate, has_another) = match next {
+            Some(pair) => pair,
+            None => continue,
+        };
+        // When we're calling this method we know that `parent` failed to
+        // activate. That means that some dependency failed to get resolved for
+        // whatever reason, and all of those reasons (plus maybe some extras)
+        // are listed in `conflicting_activations`.
+        //
+        // This means that if all members of `conflicting_activations` are still
+        // active in this back up we know that we're guaranteed to not actually
+        // make any progress. As a result if we hit this condition we can
+        // completely skip this backtrack frame and move on to the next.
+        if !backtracked {
+            if frame
+                .context
+                .is_conflicting(Some(parent.package_id()), conflicting_activations)
+            {
+                trace!(
+                    "{} = \"{}\" skip as not solving {}: {:?}",
+                    frame.dep.package_name(),
+                    frame.dep.version_req(),
+                    parent.package_id(),
+                    conflicting_activations
+                );
+                continue;
+            }
+        }
+
+        return Some((candidate, has_another, frame));
+    }
+    None
+}
+
+fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()> {
+    let summaries: HashMap<&PackageId, &Summary> = activations
+        .values()
+        .flat_map(|v| v.iter())
+        .map(|s| (s.package_id(), s))
+        .collect();
+
+    // Sort packages to produce user friendly deterministic errors.
+    let mut all_packages: Vec<_> = resolve.iter().collect();
+    all_packages.sort_unstable();
+    let mut checked = HashSet::new();
+    for pkg in all_packages {
+        if !checked.contains(pkg) {
+            visit(resolve, pkg, &summaries, &mut HashSet::new(), &mut checked)?
+        }
+    }
+    return Ok(());
+
+    fn visit<'a>(
+        resolve: &'a Resolve,
+        id: &'a PackageId,
+        summaries: &HashMap<&'a PackageId, &Summary>,
+        visited: &mut HashSet<&'a PackageId>,
+        checked: &mut HashSet<&'a PackageId>,
+    ) -> CargoResult<()> {
+        // See if we visited ourselves
+        if !visited.insert(id) {
+            bail!(
+                "cyclic package dependency: package `{}` depends on itself. Cycle:\n{}",
+                id,
+                errors::describe_path(&resolve.path_to_top(id))
+            );
+        }
+
+        // If we've already checked this node no need to recurse again as we'll
+        // just conclude the same thing as last time, so we only execute the
+        // recursive step if we successfully insert into `checked`.
+        //
+        // Note that if we hit an intransitive dependency then we clear out the
+        // visitation list as we can't induce a cycle through transitive
+        // dependencies.
+        if checked.insert(id) {
+            let summary = summaries[id];
+            for dep in resolve.deps_not_replaced(id) {
+                let is_transitive = summary
+                    .dependencies()
+                    .iter()
+                    .any(|d| d.matches_id(dep) && d.is_transitive());
+                let mut empty = HashSet::new();
+                let visited = if is_transitive {
+                    &mut *visited
+                } else {
+                    &mut empty
+                };
+                visit(resolve, dep, summaries, visited, checked)?;
+
+                if let Some(id) = resolve.replacement(dep) {
+                    visit(resolve, id, summaries, visited, checked)?;
+                }
+            }
+        }
+
+        // Ok, we're done, no longer visiting our node any more
+        visited.remove(id);
+        Ok(())
+    }
+}
+
+/// Checks that packages are unique when written to lockfile.
+///
+/// When writing package id's to lockfile, we apply lossy encoding. In
+/// particular, we don't store paths of path dependencies. That means that
+/// *different* packages may collide in the lockfile, hence this check.
+fn check_duplicate_pkgs_in_lockfile(resolve: &Resolve) -> CargoResult<()> {
+    let mut unique_pkg_ids = HashMap::new();
+    for pkg_id in resolve.iter() {
+        let encodable_pkd_id = encode::encodable_package_id(pkg_id);
+        if let Some(prev_pkg_id) = unique_pkg_ids.insert(encodable_pkd_id, pkg_id) {
+            bail!(
+                "package collision in the lockfile: packages {} and {} are different, \
+                 but only one can be written to lockfile unambiguously",
+                prev_pkg_id,
+                pkg_id
+            )
+        }
+    }
+    Ok(())
+}
diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs
new file mode 100644 (file)
index 0000000..dff8010
--- /dev/null
@@ -0,0 +1,286 @@
+use std::borrow::Borrow;
+use std::collections::{HashMap, HashSet};
+use std::fmt;
+use std::hash::Hash;
+use std::iter::FromIterator;
+
+use url::Url;
+
+use core::{Dependency, PackageId, PackageIdSpec, Summary, Target};
+use util::errors::CargoResult;
+use util::Graph;
+
+use super::encode::Metadata;
+
+/// Represents a fully resolved package dependency graph. Each node in the graph
+/// is a package and edges represent dependencies between packages.
+///
+/// Each instance of `Resolve` also understands the full set of features used
+/// for each package.
+#[derive(PartialEq)]
+pub struct Resolve {
+    /// A graph, whose vertices are packages and edges are dependency specifications
+    /// from Cargo.toml. We need a `Vec` here because the same package
+    /// might be present in both `[dependencies]` and `[build-dependencies]`.
+    graph: Graph<PackageId, Vec<Dependency>>,
+    replacements: HashMap<PackageId, PackageId>,
+    reverse_replacements: HashMap<PackageId, PackageId>,
+    empty_features: HashSet<String>,
+    features: HashMap<PackageId, HashSet<String>>,
+    checksums: HashMap<PackageId, Option<String>>,
+    metadata: Metadata,
+    unused_patches: Vec<PackageId>,
+}
+
+impl Resolve {
+    pub fn new(
+        graph: Graph<PackageId, Vec<Dependency>>,
+        replacements: HashMap<PackageId, PackageId>,
+        features: HashMap<PackageId, HashSet<String>>,
+        checksums: HashMap<PackageId, Option<String>>,
+        metadata: Metadata,
+        unused_patches: Vec<PackageId>,
+    ) -> Resolve {
+        let reverse_replacements = replacements
+            .iter()
+            .map(|p| (p.1.clone(), p.0.clone()))
+            .collect();
+        Resolve {
+            graph,
+            replacements,
+            features,
+            checksums,
+            metadata,
+            unused_patches,
+            empty_features: HashSet::new(),
+            reverse_replacements,
+        }
+    }
+
+    /// Resolves one of the paths from the given dependent package up to
+    /// the root.
+    pub fn path_to_top<'a>(&'a self, pkg: &'a PackageId) -> Vec<&'a PackageId> {
+        self.graph.path_to_top(pkg)
+    }
+
+    pub fn register_used_patches(&mut self, patches: &HashMap<Url, Vec<Summary>>) {
+        for summary in patches.values().flat_map(|v| v) {
+            if self.iter().any(|id| id == summary.package_id()) {
+                continue;
+            }
+            self.unused_patches.push(summary.package_id().clone());
+        }
+    }
+
+    pub fn merge_from(&mut self, previous: &Resolve) -> CargoResult<()> {
+        // Given a previous instance of resolve, it should be forbidden to ever
+        // have a checksums which *differ*. If the same package id has differing
+        // checksums, then something has gone wrong such as:
+        //
+        // * Something got seriously corrupted
+        // * A "mirror" isn't actually a mirror as some changes were made
+        // * A replacement source wasn't actually a replacment, some changes
+        //   were made
+        //
+        // In all of these cases, we want to report an error to indicate that
+        // something is awry. Normal execution (esp just using crates.io) should
+        // never run into this.
+        for (id, cksum) in previous.checksums.iter() {
+            if let Some(mine) = self.checksums.get(id) {
+                if mine == cksum {
+                    continue;
+                }
+
+                // If the previous checksum wasn't calculated, the current
+                // checksum is `Some`. This may indicate that a source was
+                // erroneously replaced or was replaced with something that
+                // desires stronger checksum guarantees than can be afforded
+                // elsewhere.
+                if cksum.is_none() {
+                    bail!(
+                        "\
+checksum for `{}` was not previously calculated, but a checksum could now \
+be calculated
+
+this could be indicative of a few possible situations:
+
+    * the source `{}` did not previously support checksums,
+      but was replaced with one that does
+    * newer Cargo implementations know how to checksum this source, but this
+      older implementation does not
+    * the lock file is corrupt
+",
+                        id,
+                        id.source_id()
+                    )
+
+                // If our checksum hasn't been calculated, then it could mean
+                // that future Cargo figured out how to checksum something or
+                // more realistically we were overridden with a source that does
+                // not have checksums.
+                } else if mine.is_none() {
+                    bail!(
+                        "\
+checksum for `{}` could not be calculated, but a checksum is listed in \
+the existing lock file
+
+this could be indicative of a few possible situations:
+
+    * the source `{}` supports checksums,
+      but was replaced with one that doesn't
+    * the lock file is corrupt
+
+unable to verify that `{0}` is the same as when the lockfile was generated
+",
+                        id,
+                        id.source_id()
+                    )
+
+                // If the checksums aren't equal, and neither is None, then they
+                // must both be Some, in which case the checksum now differs.
+                // That's quite bad!
+                } else {
+                    bail!(
+                        "\
+checksum for `{}` changed between lock files
+
+this could be indicative of a few possible errors:
+
+    * the lock file is corrupt
+    * a replacement source in use (e.g. a mirror) returned a different checksum
+    * the source itself may be corrupt in one way or another
+
+unable to verify that `{0}` is the same as when the lockfile was generated
+",
+                        id
+                    );
+                }
+            }
+        }
+
+        // Be sure to just copy over any unknown metadata.
+        self.metadata = previous.metadata.clone();
+        Ok(())
+    }
+
+    pub fn contains<Q: ?Sized>(&self, k: &Q) -> bool
+    where
+        PackageId: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        self.graph.contains(k)
+    }
+
+    pub fn iter(&self) -> impl Iterator<Item = &PackageId> {
+        self.graph.iter()
+    }
+
+    pub fn deps(&self, pkg: &PackageId) -> impl Iterator<Item = (&PackageId, &[Dependency])> {
+        self.graph
+            .edges(pkg)
+            .map(move |(id, deps)| (self.replacement(id).unwrap_or(id), deps.as_slice()))
+    }
+
+    pub fn deps_not_replaced(&self, pkg: &PackageId) -> impl Iterator<Item = &PackageId> {
+        self.graph.edges(pkg).map(|(id, _)| id)
+    }
+
+    pub fn replacement(&self, pkg: &PackageId) -> Option<&PackageId> {
+        self.replacements.get(pkg)
+    }
+
+    pub fn replacements(&self) -> &HashMap<PackageId, PackageId> {
+        &self.replacements
+    }
+
+    pub fn features(&self, pkg: &PackageId) -> &HashSet<String> {
+        self.features.get(pkg).unwrap_or(&self.empty_features)
+    }
+
+    pub fn features_sorted(&self, pkg: &PackageId) -> Vec<&str> {
+        let mut v = Vec::from_iter(self.features(pkg).iter().map(|s| s.as_ref()));
+        v.sort_unstable();
+        v
+    }
+
+    pub fn query(&self, spec: &str) -> CargoResult<&PackageId> {
+        PackageIdSpec::query_str(spec, self.iter())
+    }
+
+    pub fn unused_patches(&self) -> &[PackageId] {
+        &self.unused_patches
+    }
+
+    pub fn checksums(&self) -> &HashMap<PackageId, Option<String>> {
+        &self.checksums
+    }
+
+    pub fn metadata(&self) -> &Metadata {
+        &self.metadata
+    }
+
+    pub fn extern_crate_name(
+        &self,
+        from: &PackageId,
+        to: &PackageId,
+        to_target: &Target,
+    ) -> CargoResult<String> {
+        let deps = if from == to {
+            &[]
+        } else {
+            self.dependencies_listed(from, to)
+        };
+
+        let crate_name = to_target.crate_name();
+        let mut names = deps.iter().map(|d| {
+            d.explicit_name_in_toml()
+                .map(|s| s.as_str().replace("-", "_"))
+                .unwrap_or(crate_name.clone())
+        });
+        let name = names.next().unwrap_or(crate_name.clone());
+        for n in names {
+            if n == name {
+                continue;
+            }
+            bail!(
+                "multiple dependencies listed for the same crate must \
+                 all have the same name, but the dependency on `{}` \
+                 is listed as having different names",
+                to
+            );
+        }
+        Ok(name.to_string())
+    }
+
+    fn dependencies_listed(&self, from: &PackageId, to: &PackageId) -> &[Dependency] {
+        // We've got a dependency on `from` to `to`, but this dependency edge
+        // may be affected by [replace]. If the `to` package is listed as the
+        // target of a replacement (aka the key of a reverse replacement map)
+        // then we try to find our dependency edge through that. If that fails
+        // then we go down below assuming it's not replaced.
+        //
+        // Note that we don't treat `from` as if it's been replaced because
+        // that's where the dependency originates from, and we only replace
+        // targets of dependencies not the originator.
+        if let Some(replace) = self.reverse_replacements.get(to) {
+            if let Some(deps) = self.graph.edge(from, replace) {
+                return deps;
+            }
+        }
+        match self.graph.edge(from, to) {
+            Some(ret) => ret,
+            None => panic!("no Dependency listed for `{}` => `{}`", from, to),
+        }
+    }
+}
+
+impl fmt::Debug for Resolve {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        writeln!(fmt, "graph: {:?}", self.graph)?;
+        writeln!(fmt, "\nfeatures: {{")?;
+        for (pkg, features) in &self.features {
+            writeln!(fmt, "  {}: {:?}", pkg, features)?;
+        }
+        write!(fmt, "}}")
+    }
+}
diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs
new file mode 100644 (file)
index 0000000..b949454
--- /dev/null
@@ -0,0 +1,505 @@
+use std::cmp::Ordering;
+use std::collections::{BinaryHeap, HashMap, HashSet};
+use std::ops::Range;
+use std::rc::Rc;
+use std::time::{Duration, Instant};
+
+use core::interning::InternedString;
+use core::{Dependency, PackageId, PackageIdSpec, Registry, Summary};
+use util::{CargoError, CargoResult, Config};
+
+pub struct ResolverProgress {
+    ticks: u16,
+    start: Instant,
+    time_to_print: Duration,
+    printed: bool,
+    deps_time: Duration,
+}
+
+impl ResolverProgress {
+    pub fn new() -> ResolverProgress {
+        ResolverProgress {
+            ticks: 0,
+            start: Instant::now(),
+            time_to_print: Duration::from_millis(500),
+            printed: false,
+            deps_time: Duration::new(0, 0),
+        }
+    }
+    pub fn shell_status(&mut self, config: Option<&Config>) -> CargoResult<()> {
+        // If we spend a lot of time here (we shouldn't in most cases) then give
+        // a bit of a visual indicator as to what we're doing. Only enable this
+        // when stderr is a tty (a human is likely to be watching) to ensure we
+        // get deterministic output otherwise when observed by tools.
+        //
+        // Also note that we hit this loop a lot, so it's fairly performance
+        // sensitive. As a result try to defer a possibly expensive operation
+        // like `Instant::now` by only checking every N iterations of this loop
+        // to amortize the cost of the current time lookup.
+        self.ticks += 1;
+        if let Some(config) = config {
+            if config.shell().is_err_tty()
+                && !self.printed
+                && self.ticks % 1000 == 0
+                && self.start.elapsed() - self.deps_time > self.time_to_print
+            {
+                self.printed = true;
+                config.shell().status("Resolving", "dependency graph...")?;
+            }
+        }
+        // The largest test in our suite takes less then 5000 ticks
+        // with all the algorithm improvements.
+        // If any of them are removed then it takes more than I am willing to measure.
+        // So lets fail the test fast if we have ben running for two long.
+        debug_assert!(self.ticks < 50_000);
+        // The largest test in our suite takes less then 30 sec
+        // with all the improvements to how fast a tick can go.
+        // If any of them are removed then it takes more than I am willing to measure.
+        // So lets fail the test fast if we have ben running for two long.
+        if cfg!(debug_assertions) && (self.ticks % 1000 == 0) {
+            assert!(self.start.elapsed() - self.deps_time < Duration::from_secs(90));
+        }
+        Ok(())
+    }
+    pub fn elapsed(&mut self, dur: Duration) {
+        self.deps_time += dur;
+    }
+}
+
+pub struct RegistryQueryer<'a> {
+    pub registry: &'a mut (Registry + 'a),
+    replacements: &'a [(PackageIdSpec, Dependency)],
+    try_to_use: &'a HashSet<&'a PackageId>,
+    cache: HashMap<Dependency, Rc<Vec<Candidate>>>,
+    // If set the list of dependency candidates will be sorted by minimal
+    // versions first. That allows `cargo update -Z minimal-versions` which will
+    // specify minimum dependency versions to be used.
+    minimal_versions: bool,
+}
+
+impl<'a> RegistryQueryer<'a> {
+    pub fn new(
+        registry: &'a mut Registry,
+        replacements: &'a [(PackageIdSpec, Dependency)],
+        try_to_use: &'a HashSet<&'a PackageId>,
+        minimal_versions: bool,
+    ) -> Self {
+        RegistryQueryer {
+            registry,
+            replacements,
+            cache: HashMap::new(),
+            try_to_use,
+            minimal_versions,
+        }
+    }
+
+    /// Queries the `registry` to return a list of candidates for `dep`.
+    ///
+    /// This method is the location where overrides are taken into account. If
+    /// any candidates are returned which match an override then the override is
+    /// applied by performing a second query for what the override should
+    /// return.
+    pub fn query(&mut self, dep: &Dependency) -> CargoResult<Rc<Vec<Candidate>>> {
+        if let Some(out) = self.cache.get(dep).cloned() {
+            return Ok(out);
+        }
+
+        let mut ret = Vec::new();
+        self.registry.query(
+            dep,
+            &mut |s| {
+                ret.push(Candidate {
+                    summary: s,
+                    replace: None,
+                });
+            },
+            false,
+        )?;
+        for candidate in ret.iter_mut() {
+            let summary = &candidate.summary;
+
+            let mut potential_matches = self
+                .replacements
+                .iter()
+                .filter(|&&(ref spec, _)| spec.matches(summary.package_id()));
+
+            let &(ref spec, ref dep) = match potential_matches.next() {
+                None => continue,
+                Some(replacement) => replacement,
+            };
+            debug!(
+                "found an override for {} {}",
+                dep.package_name(),
+                dep.version_req()
+            );
+
+            let mut summaries = self.registry.query_vec(dep, false)?.into_iter();
+            let s = summaries.next().ok_or_else(|| {
+                format_err!(
+                    "no matching package for override `{}` found\n\
+                     location searched: {}\n\
+                     version required: {}",
+                    spec,
+                    dep.source_id(),
+                    dep.version_req()
+                )
+            })?;
+            let summaries = summaries.collect::<Vec<_>>();
+            if !summaries.is_empty() {
+                let bullets = summaries
+                    .iter()
+                    .map(|s| format!("  * {}", s.package_id()))
+                    .collect::<Vec<_>>();
+                bail!(
+                    "the replacement specification `{}` matched \
+                     multiple packages:\n  * {}\n{}",
+                    spec,
+                    s.package_id(),
+                    bullets.join("\n")
+                );
+            }
+
+            // The dependency should be hard-coded to have the same name and an
+            // exact version requirement, so both of these assertions should
+            // never fail.
+            assert_eq!(s.version(), summary.version());
+            assert_eq!(s.name(), summary.name());
+
+            let replace = if s.source_id() == summary.source_id() {
+                debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s);
+                None
+            } else {
+                Some(s)
+            };
+            let matched_spec = spec.clone();
+
+            // Make sure no duplicates
+            if let Some(&(ref spec, _)) = potential_matches.next() {
+                bail!(
+                    "overlapping replacement specifications found:\n\n  \
+                     * {}\n  * {}\n\nboth specifications match: {}",
+                    matched_spec,
+                    spec,
+                    summary.package_id()
+                );
+            }
+
+            for dep in summary.dependencies() {
+                debug!("\t{} => {}", dep.package_name(), dep.version_req());
+            }
+
+            candidate.replace = replace;
+        }
+
+        // When we attempt versions for a package we'll want to do so in a
+        // sorted fashion to pick the "best candidates" first. Currently we try
+        // prioritized summaries (those in `try_to_use`) and failing that we
+        // list everything from the maximum version to the lowest version.
+        ret.sort_unstable_by(|a, b| {
+            let a_in_previous = self.try_to_use.contains(a.summary.package_id());
+            let b_in_previous = self.try_to_use.contains(b.summary.package_id());
+            let previous_cmp = a_in_previous.cmp(&b_in_previous).reverse();
+            match previous_cmp {
+                Ordering::Equal => {
+                    let cmp = a.summary.version().cmp(b.summary.version());
+                    if self.minimal_versions {
+                        // Lower version ordered first.
+                        cmp
+                    } else {
+                        // Higher version ordered first.
+                        cmp.reverse()
+                    }
+                }
+                _ => previous_cmp,
+            }
+        });
+
+        let out = Rc::new(ret);
+
+        self.cache.insert(dep.clone(), out.clone());
+
+        Ok(out)
+    }
+}
+
+#[derive(Clone, Copy)]
+pub enum Method<'a> {
+    Everything, // equivalent to Required { dev_deps: true, all_features: true, .. }
+    Required {
+        dev_deps: bool,
+        features: &'a [InternedString],
+        all_features: bool,
+        uses_default_features: bool,
+    },
+}
+
+impl<'r> Method<'r> {
+    pub fn split_features(features: &[String]) -> Vec<InternedString> {
+        features
+            .iter()
+            .flat_map(|s| s.split_whitespace())
+            .flat_map(|s| s.split(','))
+            .filter(|s| !s.is_empty())
+            .map(|s| InternedString::new(s))
+            .collect::<Vec<InternedString>>()
+    }
+}
+
+#[derive(Clone)]
+pub struct Candidate {
+    pub summary: Summary,
+    pub replace: Option<Summary>,
+}
+
+#[derive(Clone)]
+pub struct DepsFrame {
+    pub parent: Summary,
+    pub just_for_error_messages: bool,
+    pub remaining_siblings: RcVecIter<DepInfo>,
+}
+
+impl DepsFrame {
+    /// Returns the least number of candidates that any of this frame's siblings
+    /// has.
+    ///
+    /// The `remaining_siblings` array is already sorted with the smallest
+    /// number of candidates at the front, so we just return the number of
+    /// candidates in that entry.
+    fn min_candidates(&self) -> usize {
+        self.remaining_siblings
+            .peek()
+            .map(|(_, (_, candidates, _))| candidates.len())
+            .unwrap_or(0)
+    }
+
+    pub fn flatten(&self) -> impl Iterator<Item = (&PackageId, Dependency)> {
+        self.remaining_siblings
+            .clone()
+            .map(move |(_, (d, _, _))| (self.parent.package_id(), d))
+    }
+}
+
+impl PartialEq for DepsFrame {
+    fn eq(&self, other: &DepsFrame) -> bool {
+        self.just_for_error_messages == other.just_for_error_messages
+            && self.min_candidates() == other.min_candidates()
+    }
+}
+
+impl Eq for DepsFrame {}
+
+impl PartialOrd for DepsFrame {
+    fn partial_cmp(&self, other: &DepsFrame) -> Option<Ordering> {
+        Some(self.cmp(other))
+    }
+}
+
+impl Ord for DepsFrame {
+    fn cmp(&self, other: &DepsFrame) -> Ordering {
+        self.just_for_error_messages
+            .cmp(&other.just_for_error_messages)
+            .then_with(||
+            // the frame with the sibling that has the least number of candidates
+            // needs to get bubbled up to the top of the heap we use below, so
+            // reverse comparison here.
+            self.min_candidates().cmp(&other.min_candidates()).reverse())
+    }
+}
+
+/// Note that a `BinaryHeap` is used for the remaining dependencies that need
+/// activation. This heap is sorted such that the "largest value" is the most
+/// constrained dependency, or the one with the least candidates.
+///
+/// This helps us get through super constrained portions of the dependency
+/// graph quickly and hopefully lock down what later larger dependencies can
+/// use (those with more candidates).
+#[derive(Clone)]
+pub struct RemainingDeps(BinaryHeap<DepsFrame>);
+
+impl RemainingDeps {
+    pub fn new() -> RemainingDeps {
+        RemainingDeps(BinaryHeap::new())
+    }
+    pub fn push(&mut self, x: DepsFrame) {
+        self.0.push(x)
+    }
+    pub fn pop_most_constrained(&mut self) -> Option<(bool, (Summary, (usize, DepInfo)))> {
+        while let Some(mut deps_frame) = self.0.pop() {
+            let just_here_for_the_error_messages = deps_frame.just_for_error_messages;
+
+            // Figure out what our next dependency to activate is, and if nothing is
+            // listed then we're entirely done with this frame (yay!) and we can
+            // move on to the next frame.
+            if let Some(sibling) = deps_frame.remaining_siblings.next() {
+                let parent = Summary::clone(&deps_frame.parent);
+                self.0.push(deps_frame);
+                return Some((just_here_for_the_error_messages, (parent, sibling)));
+            }
+        }
+        None
+    }
+    pub fn iter(&mut self) -> impl Iterator<Item = (&PackageId, Dependency)> {
+        self.0.iter().flat_map(|other| other.flatten())
+    }
+}
+
+// Information about the dependencies for a crate, a tuple of:
+//
+// (dependency info, candidates, features activated)
+pub type DepInfo = (Dependency, Rc<Vec<Candidate>>, Rc<Vec<InternedString>>);
+
+pub type ActivateResult<T> = Result<T, ActivateError>;
+
+pub enum ActivateError {
+    Fatal(CargoError),
+    Conflict(PackageId, ConflictReason),
+}
+
+impl From<::failure::Error> for ActivateError {
+    fn from(t: ::failure::Error) -> Self {
+        ActivateError::Fatal(t)
+    }
+}
+
+impl From<(PackageId, ConflictReason)> for ActivateError {
+    fn from(t: (PackageId, ConflictReason)) -> Self {
+        ActivateError::Conflict(t.0, t.1)
+    }
+}
+
+/// All possible reasons that a package might fail to activate.
+///
+/// We maintain a list of conflicts for error reporting as well as backtracking
+/// purposes. Each reason here is why candidates may be rejected or why we may
+/// fail to resolve a dependency.
+#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)]
+pub enum ConflictReason {
+    /// There was a semver conflict, for example we tried to activate a package
+    /// 1.0.2 but 1.1.0 was already activated (aka a compatible semver version
+    /// is already activated)
+    Semver,
+
+    /// The `links` key is being violated. For example one crate in the
+    /// dependency graph has `links = "foo"` but this crate also had that, and
+    /// we're only allowed one per dependency graph.
+    Links(InternedString),
+
+    /// A dependency listed features that weren't actually available on the
+    /// candidate. For example we tried to activate feature `foo` but the
+    /// candidate we're activating didn't actually have the feature `foo`.
+    MissingFeatures(String),
+}
+
+impl ConflictReason {
+    pub fn is_links(&self) -> bool {
+        if let ConflictReason::Links(_) = *self {
+            return true;
+        }
+        false
+    }
+
+    pub fn is_missing_features(&self) -> bool {
+        if let ConflictReason::MissingFeatures(_) = *self {
+            return true;
+        }
+        false
+    }
+}
+
+pub struct RcVecIter<T> {
+    vec: Rc<Vec<T>>,
+    rest: Range<usize>,
+}
+
+impl<T> RcVecIter<T> {
+    pub fn new(vec: Rc<Vec<T>>) -> RcVecIter<T> {
+        RcVecIter {
+            rest: 0..vec.len(),
+            vec,
+        }
+    }
+
+    fn peek(&self) -> Option<(usize, &T)> {
+        self.rest
+            .clone()
+            .next()
+            .and_then(|i| self.vec.get(i).map(|val| (i, &*val)))
+    }
+}
+
+// Not derived to avoid `T: Clone`
+impl<T> Clone for RcVecIter<T> {
+    fn clone(&self) -> RcVecIter<T> {
+        RcVecIter {
+            vec: self.vec.clone(),
+            rest: self.rest.clone(),
+        }
+    }
+}
+
+impl<T> Iterator for RcVecIter<T>
+where
+    T: Clone,
+{
+    type Item = (usize, T);
+
+    fn next(&mut self) -> Option<Self::Item> {
+        self.rest
+            .next()
+            .and_then(|i| self.vec.get(i).map(|val| (i, val.clone())))
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        // rest is a std::ops::Range, which is an ExactSizeIterator.
+        self.rest.size_hint()
+    }
+}
+
+impl<T: Clone> ExactSizeIterator for RcVecIter<T> {}
+
+pub struct RcList<T> {
+    pub head: Option<Rc<(T, RcList<T>)>>,
+}
+
+impl<T> RcList<T> {
+    pub fn new() -> RcList<T> {
+        RcList { head: None }
+    }
+
+    pub fn push(&mut self, data: T) {
+        let node = Rc::new((
+            data,
+            RcList {
+                head: self.head.take(),
+            },
+        ));
+        self.head = Some(node);
+    }
+}
+
+// Not derived to avoid `T: Clone`
+impl<T> Clone for RcList<T> {
+    fn clone(&self) -> RcList<T> {
+        RcList {
+            head: self.head.clone(),
+        }
+    }
+}
+
+// Avoid stack overflows on drop by turning recursion into a loop
+impl<T> Drop for RcList<T> {
+    fn drop(&mut self) {
+        let mut cur = self.head.take();
+        while let Some(head) = cur {
+            match Rc::try_unwrap(head) {
+                Ok((_data, mut next)) => cur = next.head.take(),
+                Err(_) => break,
+            }
+        }
+    }
+}
+
+pub enum GraphNode {
+    Add(PackageId),
+    Link(PackageId, PackageId, Dependency),
+}
diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs
new file mode 100644 (file)
index 0000000..77f897d
--- /dev/null
@@ -0,0 +1,410 @@
+use std::fmt;
+use std::io::prelude::*;
+
+use atty;
+use termcolor::Color::{Cyan, Green, Red, Yellow};
+use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor};
+
+use util::errors::CargoResult;
+
+/// The requested verbosity of output
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum Verbosity {
+    Verbose,
+    Normal,
+    Quiet,
+}
+
+/// An abstraction around a `Write`able object that remembers preferences for output verbosity and
+/// color.
+pub struct Shell {
+    /// the `Write`able object, either with or without color support (represented by different enum
+    /// variants)
+    err: ShellOut,
+    /// How verbose messages should be
+    verbosity: Verbosity,
+}
+
+impl fmt::Debug for Shell {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match self.err {
+            ShellOut::Write(_) => f.debug_struct("Shell")
+                .field("verbosity", &self.verbosity)
+                .finish(),
+            ShellOut::Stream { color_choice, .. } => f.debug_struct("Shell")
+                .field("verbosity", &self.verbosity)
+                .field("color_choice", &color_choice)
+                .finish(),
+        }
+    }
+}
+
+/// A `Write`able object, either with or without color support
+enum ShellOut {
+    /// A plain write object without color support
+    Write(Box<Write>),
+    /// Color-enabled stdio, with information on whether color should be used
+    Stream {
+        stream: StandardStream,
+        tty: bool,
+        color_choice: ColorChoice,
+    },
+}
+
+/// Whether messages should use color output
+#[derive(Debug, PartialEq, Clone, Copy)]
+pub enum ColorChoice {
+    /// Force color output
+    Always,
+    /// Force disable color output
+    Never,
+    /// Intelligently guess whether to use color output
+    CargoAuto,
+}
+
+impl Shell {
+    /// Create a new shell (color choice and verbosity), defaulting to 'auto' color and verbose
+    /// output.
+    pub fn new() -> Shell {
+        Shell {
+            err: ShellOut::Stream {
+                stream: StandardStream::stderr(ColorChoice::CargoAuto.to_termcolor_color_choice()),
+                color_choice: ColorChoice::CargoAuto,
+                tty: atty::is(atty::Stream::Stderr),
+            },
+            verbosity: Verbosity::Verbose,
+        }
+    }
+
+    /// Create a shell from a plain writable object, with no color, and max verbosity.
+    pub fn from_write(out: Box<Write>) -> Shell {
+        Shell {
+            err: ShellOut::Write(out),
+            verbosity: Verbosity::Verbose,
+        }
+    }
+
+    /// Print a message, where the status will have `color` color, and can be justified. The
+    /// messages follows without color.
+    fn print(
+        &mut self,
+        status: &fmt::Display,
+        message: Option<&fmt::Display>,
+        color: Color,
+        justified: bool,
+    ) -> CargoResult<()> {
+        match self.verbosity {
+            Verbosity::Quiet => Ok(()),
+            _ => self.err.print(status, message, color, justified),
+        }
+    }
+
+    /// Returns the width of the terminal in spaces, if any
+    pub fn err_width(&self) -> Option<usize> {
+        match self.err {
+            ShellOut::Stream { tty: true, .. } => imp::stderr_width(),
+            _ => None,
+        }
+    }
+
+    /// Returns whether stderr is a tty
+    pub fn is_err_tty(&self) -> bool {
+        match self.err {
+            ShellOut::Stream { tty, .. } => tty,
+            _ => false,
+        }
+    }
+
+    /// Get a reference to the underlying writer
+    pub fn err(&mut self) -> &mut Write {
+        self.err.as_write()
+    }
+
+    /// Shortcut to right-align and color green a status message.
+    pub fn status<T, U>(&mut self, status: T, message: U) -> CargoResult<()>
+    where
+        T: fmt::Display,
+        U: fmt::Display,
+    {
+        self.print(&status, Some(&message), Green, true)
+    }
+
+    pub fn status_header<T>(&mut self, status: T) -> CargoResult<()>
+    where
+        T: fmt::Display,
+    {
+        self.print(&status, None, Cyan, true)
+    }
+
+    /// Shortcut to right-align a status message.
+    pub fn status_with_color<T, U>(
+        &mut self,
+        status: T,
+        message: U,
+        color: Color,
+    ) -> CargoResult<()>
+    where
+        T: fmt::Display,
+        U: fmt::Display,
+    {
+        self.print(&status, Some(&message), color, true)
+    }
+
+    /// Run the callback only if we are in verbose mode
+    pub fn verbose<F>(&mut self, mut callback: F) -> CargoResult<()>
+    where
+        F: FnMut(&mut Shell) -> CargoResult<()>,
+    {
+        match self.verbosity {
+            Verbosity::Verbose => callback(self),
+            _ => Ok(()),
+        }
+    }
+
+    /// Run the callback if we are not in verbose mode.
+    pub fn concise<F>(&mut self, mut callback: F) -> CargoResult<()>
+    where
+        F: FnMut(&mut Shell) -> CargoResult<()>,
+    {
+        match self.verbosity {
+            Verbosity::Verbose => Ok(()),
+            _ => callback(self),
+        }
+    }
+
+    /// Print a red 'error' message
+    pub fn error<T: fmt::Display>(&mut self, message: T) -> CargoResult<()> {
+        self.print(&"error:", Some(&message), Red, false)
+    }
+
+    /// Print an amber 'warning' message
+    pub fn warn<T: fmt::Display>(&mut self, message: T) -> CargoResult<()> {
+        match self.verbosity {
+            Verbosity::Quiet => Ok(()),
+            _ => self.print(&"warning:", Some(&message), Yellow, false),
+        }
+    }
+
+    /// Update the verbosity of the shell
+    pub fn set_verbosity(&mut self, verbosity: Verbosity) {
+        self.verbosity = verbosity;
+    }
+
+    /// Get the verbosity of the shell
+    pub fn verbosity(&self) -> Verbosity {
+        self.verbosity
+    }
+
+    /// Update the color choice (always, never, or auto) from a string.
+    pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> {
+        if let ShellOut::Stream {
+            ref mut stream,
+            ref mut color_choice,
+            ..
+        } = self.err
+        {
+            let cfg = match color {
+                Some("always") => ColorChoice::Always,
+                Some("never") => ColorChoice::Never,
+
+                Some("auto") | None => ColorChoice::CargoAuto,
+
+                Some(arg) => bail!(
+                    "argument for --color must be auto, always, or \
+                     never, but found `{}`",
+                    arg
+                ),
+            };
+            *color_choice = cfg;
+            *stream = StandardStream::stderr(cfg.to_termcolor_color_choice());
+        }
+        Ok(())
+    }
+
+    /// Get the current color choice
+    ///
+    /// If we are not using a color stream, this will always return Never, even if the color choice
+    /// has been set to something else.
+    pub fn color_choice(&self) -> ColorChoice {
+        match self.err {
+            ShellOut::Stream { color_choice, .. } => color_choice,
+            ShellOut::Write(_) => ColorChoice::Never,
+        }
+    }
+
+    /// Whether the shell supports color.
+    pub fn supports_color(&self) -> bool {
+        match &self.err {
+            ShellOut::Write(_) => false,
+            ShellOut::Stream { stream, .. } => stream.supports_color(),
+        }
+    }
+
+    /// Prints a message and translates ANSI escape code into console colors.
+    pub fn print_ansi(&mut self, message: &[u8]) -> CargoResult<()> {
+        #[cfg(windows)]
+        {
+            if let ShellOut::Stream { stream, .. } = &mut self.err {
+                ::fwdansi::write_ansi(stream, message)?;
+                return Ok(());
+            }
+        }
+        self.err().write_all(message)?;
+        Ok(())
+    }
+}
+
+impl Default for Shell {
+    fn default() -> Self {
+        Self::new()
+    }
+}
+
+impl ShellOut {
+    /// Print out a message with a status. The status comes first and is bold + the given color.
+    /// The status can be justified, in which case the max width that will right align is 12 chars.
+    fn print(
+        &mut self,
+        status: &fmt::Display,
+        message: Option<&fmt::Display>,
+        color: Color,
+        justified: bool,
+    ) -> CargoResult<()> {
+        match *self {
+            ShellOut::Stream { ref mut stream, .. } => {
+                stream.reset()?;
+                stream.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?;
+                if justified {
+                    write!(stream, "{:>12}", status)?;
+                } else {
+                    write!(stream, "{}", status)?;
+                }
+                stream.reset()?;
+                match message {
+                    Some(message) => writeln!(stream, " {}", message)?,
+                    None => write!(stream, " ")?,
+                }
+            }
+            ShellOut::Write(ref mut w) => {
+                if justified {
+                    write!(w, "{:>12}", status)?;
+                } else {
+                    write!(w, "{}", status)?;
+                }
+                match message {
+                    Some(message) => writeln!(w, " {}", message)?,
+                    None => write!(w, " ")?,
+                }
+            }
+        }
+        Ok(())
+    }
+
+    /// Get this object as a `io::Write`.
+    fn as_write(&mut self) -> &mut Write {
+        match *self {
+            ShellOut::Stream { ref mut stream, .. } => stream,
+            ShellOut::Write(ref mut w) => w,
+        }
+    }
+}
+
+impl ColorChoice {
+    /// Convert our color choice to termcolor's version
+    fn to_termcolor_color_choice(self) -> termcolor::ColorChoice {
+        match self {
+            ColorChoice::Always => termcolor::ColorChoice::Always,
+            ColorChoice::Never => termcolor::ColorChoice::Never,
+            ColorChoice::CargoAuto => {
+                if atty::is(atty::Stream::Stderr) {
+                    termcolor::ColorChoice::Auto
+                } else {
+                    termcolor::ColorChoice::Never
+                }
+            }
+        }
+    }
+}
+
+#[cfg(any(target_os = "linux", target_os = "macos"))]
+mod imp {
+    use std::mem;
+
+    use libc;
+
+    pub fn stderr_width() -> Option<usize> {
+        unsafe {
+            let mut winsize: libc::winsize = mem::zeroed();
+            if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) < 0 {
+                return None;
+            }
+            if winsize.ws_col > 0 {
+                Some(winsize.ws_col as usize)
+            } else {
+                None
+            }
+        }
+    }
+}
+
+#[cfg(all(unix, not(any(target_os = "linux", target_os = "macos"))))]
+mod imp {
+    pub fn stderr_width() -> Option<usize> {
+        None
+    }
+}
+
+#[cfg(windows)]
+mod imp {
+    extern crate winapi;
+
+    use std::{cmp, mem, ptr};
+    use self::winapi::um::fileapi::*;
+    use self::winapi::um::handleapi::*;
+    use self::winapi::um::processenv::*;
+    use self::winapi::um::winbase::*;
+    use self::winapi::um::wincon::*;
+    use self::winapi::um::winnt::*;
+
+    pub fn stderr_width() -> Option<usize> {
+        unsafe {
+            let stdout = GetStdHandle(STD_ERROR_HANDLE);
+            let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
+            if GetConsoleScreenBufferInfo(stdout, &mut csbi) != 0 {
+                return Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize)
+            }
+
+            // On mintty/msys/cygwin based terminals, the above fails with
+            // INVALID_HANDLE_VALUE. Use an alternate method which works
+            // in that case as well.
+            let h = CreateFileA("CONOUT$\0".as_ptr() as *const CHAR,
+                GENERIC_READ | GENERIC_WRITE,
+                FILE_SHARE_READ | FILE_SHARE_WRITE,
+                ptr::null_mut(),
+                OPEN_EXISTING,
+                0,
+                ptr::null_mut()
+            );
+            if h == INVALID_HANDLE_VALUE {
+                return None;
+            }
+
+            let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
+            let rc = GetConsoleScreenBufferInfo(h, &mut csbi);
+            CloseHandle(h);
+            if rc != 0 {
+                let width = (csbi.srWindow.Right - csbi.srWindow.Left) as usize;
+                // Unfortunately cygwin/mintty does not set the size of the
+                // backing console to match the actual window size. This
+                // always reports a size of 80 or 120 (not sure what
+                // determines that). Use a conservative max of 60 which should
+                // work in most circumstances. ConEmu does some magic to
+                // resize the console correctly, but there's no reasonable way
+                // to detect which kind of terminal we are running in, or if
+                // GetConsoleScreenBufferInfo returns accurate information.
+                return Some(cmp::min(60, width));
+            }
+            return None;
+        }
+    }
+}
diff --git a/src/cargo/core/source/mod.rs b/src/cargo/core/source/mod.rs
new file mode 100644 (file)
index 0000000..c03c29b
--- /dev/null
@@ -0,0 +1,289 @@
+use std::collections::hash_map::HashMap;
+use std::fmt;
+
+use core::{Dependency, Package, PackageId, Summary};
+use util::CargoResult;
+
+mod source_id;
+
+pub use self::source_id::{GitReference, SourceId};
+
+/// A Source finds and downloads remote packages based on names and
+/// versions.
+pub trait Source {
+    /// Returns the `SourceId` corresponding to this source
+    fn source_id(&self) -> &SourceId;
+
+    /// Returns the replaced `SourceId` corresponding to this source
+    fn replaced_source_id(&self) -> &SourceId {
+        self.source_id()
+    }
+
+    /// Returns whether or not this source will return summaries with
+    /// checksums listed.
+    fn supports_checksums(&self) -> bool;
+
+    /// Returns whether or not this source will return summaries with
+    /// the `precise` field in the source id listed.
+    fn requires_precise(&self) -> bool;
+
+    /// Attempt to find the packages that match a dependency request.
+    fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()>;
+
+    /// Attempt to find the packages that are close to a dependency request.
+    /// Each source gets to define what `close` means for it.
+    /// path/git sources may return all dependencies that are at that uri.
+    /// where as an Index source may return dependencies that have the same canonicalization.
+    fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()>;
+
+    fn query_vec(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
+        let mut ret = Vec::new();
+        self.query(dep, &mut |s| ret.push(s))?;
+        Ok(ret)
+    }
+
+    /// The update method performs any network operations required to
+    /// get the entire list of all names, versions and dependencies of
+    /// packages managed by the Source.
+    fn update(&mut self) -> CargoResult<()>;
+
+    /// The download method fetches the full package for each name and
+    /// version specified.
+    fn download(&mut self, package: &PackageId) -> CargoResult<MaybePackage>;
+
+    fn finish_download(&mut self, package: &PackageId, contents: Vec<u8>) -> CargoResult<Package>;
+
+    /// Generates a unique string which represents the fingerprint of the
+    /// current state of the source.
+    ///
+    /// This fingerprint is used to determine the "fresheness" of the source
+    /// later on. It must be guaranteed that the fingerprint of a source is
+    /// constant if and only if the output product will remain constant.
+    ///
+    /// The `pkg` argument is the package which this fingerprint should only be
+    /// interested in for when this source may contain multiple packages.
+    fn fingerprint(&self, pkg: &Package) -> CargoResult<String>;
+
+    /// If this source supports it, verifies the source of the package
+    /// specified.
+    ///
+    /// Note that the source may also have performed other checksum-based
+    /// verification during the `download` step, but this is intended to be run
+    /// just before a crate is compiled so it may perform more expensive checks
+    /// which may not be cacheable.
+    fn verify(&self, _pkg: &PackageId) -> CargoResult<()> {
+        Ok(())
+    }
+
+    /// Describes this source in a human readable fashion, used for display in
+    /// resolver error messages currently.
+    fn describe(&self) -> String;
+
+    /// Returns whether a source is being replaced by another here
+    fn is_replaced(&self) -> bool {
+        false
+    }
+}
+
+pub enum MaybePackage {
+    Ready(Package),
+    Download { url: String, descriptor: String },
+}
+
+impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
+    /// Forwards to `Source::source_id`
+    fn source_id(&self) -> &SourceId {
+        (**self).source_id()
+    }
+
+    /// Forwards to `Source::replaced_source_id`
+    fn replaced_source_id(&self) -> &SourceId {
+        (**self).replaced_source_id()
+    }
+
+    /// Forwards to `Source::supports_checksums`
+    fn supports_checksums(&self) -> bool {
+        (**self).supports_checksums()
+    }
+
+    /// Forwards to `Source::requires_precise`
+    fn requires_precise(&self) -> bool {
+        (**self).requires_precise()
+    }
+
+    /// Forwards to `Source::query`
+    fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        (**self).query(dep, f)
+    }
+
+    /// Forwards to `Source::query`
+    fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        (**self).fuzzy_query(dep, f)
+    }
+
+    /// Forwards to `Source::update`
+    fn update(&mut self) -> CargoResult<()> {
+        (**self).update()
+    }
+
+    /// Forwards to `Source::download`
+    fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
+        (**self).download(id)
+    }
+
+    fn finish_download(&mut self, id: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
+        (**self).finish_download(id, data)
+    }
+
+    /// Forwards to `Source::fingerprint`
+    fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
+        (**self).fingerprint(pkg)
+    }
+
+    /// Forwards to `Source::verify`
+    fn verify(&self, pkg: &PackageId) -> CargoResult<()> {
+        (**self).verify(pkg)
+    }
+
+    fn describe(&self) -> String {
+        (**self).describe()
+    }
+
+    fn is_replaced(&self) -> bool {
+        (**self).is_replaced()
+    }
+}
+
+impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T {
+    fn source_id(&self) -> &SourceId {
+        (**self).source_id()
+    }
+
+    fn replaced_source_id(&self) -> &SourceId {
+        (**self).replaced_source_id()
+    }
+
+    fn supports_checksums(&self) -> bool {
+        (**self).supports_checksums()
+    }
+
+    fn requires_precise(&self) -> bool {
+        (**self).requires_precise()
+    }
+
+    fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        (**self).query(dep, f)
+    }
+
+    fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        (**self).fuzzy_query(dep, f)
+    }
+
+    fn update(&mut self) -> CargoResult<()> {
+        (**self).update()
+    }
+
+    fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
+        (**self).download(id)
+    }
+
+    fn finish_download(&mut self, id: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
+        (**self).finish_download(id, data)
+    }
+
+    fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
+        (**self).fingerprint(pkg)
+    }
+
+    fn verify(&self, pkg: &PackageId) -> CargoResult<()> {
+        (**self).verify(pkg)
+    }
+
+    fn describe(&self) -> String {
+        (**self).describe()
+    }
+
+    fn is_replaced(&self) -> bool {
+        (**self).is_replaced()
+    }
+}
+
+/// A `HashMap` of `SourceId` -> `Box<Source>`
+#[derive(Default)]
+pub struct SourceMap<'src> {
+    map: HashMap<SourceId, Box<Source + 'src>>,
+}
+
+// impl debug on source requires specialization, if even desirable at all
+impl<'src> fmt::Debug for SourceMap<'src> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "SourceMap ")?;
+        f.debug_set().entries(self.map.keys()).finish()
+    }
+}
+
+impl<'src> SourceMap<'src> {
+    /// Create an empty map
+    pub fn new() -> SourceMap<'src> {
+        SourceMap {
+            map: HashMap::new(),
+        }
+    }
+
+    /// Like `HashMap::contains_key`
+    pub fn contains(&self, id: &SourceId) -> bool {
+        self.map.contains_key(id)
+    }
+
+    /// Like `HashMap::get`
+    pub fn get(&self, id: &SourceId) -> Option<&(Source + 'src)> {
+        let source = self.map.get(id);
+
+        source.map(|s| {
+            let s: &(Source + 'src) = &**s;
+            s
+        })
+    }
+
+    /// Like `HashMap::get_mut`
+    pub fn get_mut(&mut self, id: &SourceId) -> Option<&mut (Source + 'src)> {
+        self.map.get_mut(id).map(|s| {
+            let s: &mut (Source + 'src) = &mut **s;
+            s
+        })
+    }
+
+    /// Like `HashMap::get`, but first calculates the `SourceId` from a
+    /// `PackageId`
+    pub fn get_by_package_id(&self, pkg_id: &PackageId) -> Option<&(Source + 'src)> {
+        self.get(pkg_id.source_id())
+    }
+
+    /// Like `HashMap::insert`, but derives the SourceId key from the Source
+    pub fn insert(&mut self, source: Box<Source + 'src>) {
+        let id = source.source_id().clone();
+        self.map.insert(id, source);
+    }
+
+    /// Like `HashMap::is_empty`
+    pub fn is_empty(&self) -> bool {
+        self.map.is_empty()
+    }
+
+    /// Like `HashMap::len`
+    pub fn len(&self) -> usize {
+        self.map.len()
+    }
+
+    /// Like `HashMap::values`
+    pub fn sources<'a>(&'a self) -> impl Iterator<Item = &'a Box<Source + 'src>> {
+        self.map.values()
+    }
+
+    /// Like `HashMap::iter_mut`
+    pub fn sources_mut<'a>(
+        &'a mut self,
+    ) -> impl Iterator<Item = (&'a SourceId, &'a mut (Source + 'src))> {
+        self.map.iter_mut().map(|(a, b)| (a, &mut **b))
+    }
+}
diff --git a/src/cargo/core/source/source_id.rs b/src/cargo/core/source/source_id.rs
new file mode 100644 (file)
index 0000000..f30d18f
--- /dev/null
@@ -0,0 +1,587 @@
+use std::cmp::{self, Ordering};
+use std::fmt::{self, Formatter};
+use std::hash::{self, Hash};
+use std::path::Path;
+use std::sync::Arc;
+use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT};
+use std::sync::atomic::Ordering::SeqCst;
+
+use serde::ser;
+use serde::de;
+use url::Url;
+
+use ops;
+use sources::git;
+use sources::{GitSource, PathSource, RegistrySource, CRATES_IO_INDEX};
+use sources::DirectorySource;
+use util::{CargoResult, Config, ToUrl};
+
+/// Unique identifier for a source of packages.
+#[derive(Clone, Eq, Debug)]
+pub struct SourceId {
+    inner: Arc<SourceIdInner>,
+}
+
+#[derive(Eq, Clone, Debug)]
+struct SourceIdInner {
+    /// The source URL
+    url: Url,
+    /// `git::canonicalize_url(url)` for the url field
+    canonical_url: Url,
+    /// The source kind
+    kind: Kind,
+    // e.g. the exact git revision of the specified branch for a Git Source
+    precise: Option<String>,
+    /// Name of the registry source for alternative registries
+    name: Option<String>,
+}
+
+/// The possible kinds of code source. Along with SourceIdInner this fully defines the
+/// source
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+enum Kind {
+    /// Kind::Git(<git reference>) represents a git repository
+    Git(GitReference),
+    /// represents a local path
+    Path,
+    /// represents a remote registry
+    Registry,
+    /// represents a local filesystem-based registry
+    LocalRegistry,
+    /// represents a directory-based registry
+    Directory,
+}
+
+/// Information to find a specific commit in a git repository
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum GitReference {
+    /// from a tag
+    Tag(String),
+    /// from the HEAD of a branch
+    Branch(String),
+    /// from a specific revision
+    Rev(String),
+}
+
+impl SourceId {
+    /// Create a SourceId object from the kind and url.
+    ///
+    /// The canonical url will be calculated, but the precise field will not
+    fn new(kind: Kind, url: Url) -> CargoResult<SourceId> {
+        let source_id = SourceId {
+            inner: Arc::new(SourceIdInner {
+                kind,
+                canonical_url: git::canonicalize_url(&url)?,
+                url,
+                precise: None,
+                name: None,
+            }),
+        };
+        Ok(source_id)
+    }
+
+    /// Parses a source URL and returns the corresponding ID.
+    ///
+    /// ## Example
+    ///
+    /// ```
+    /// use cargo::core::SourceId;
+    /// SourceId::from_url("git+https://github.com/alexcrichton/\
+    ///                     libssh2-static-sys#80e71a3021618eb05\
+    ///                     656c58fb7c5ef5f12bc747f");
+    /// ```
+    pub fn from_url(string: &str) -> CargoResult<SourceId> {
+        let mut parts = string.splitn(2, '+');
+        let kind = parts.next().unwrap();
+        let url = parts
+            .next()
+            .ok_or_else(|| format_err!("invalid source `{}`", string))?;
+
+        match kind {
+            "git" => {
+                let mut url = url.to_url()?;
+                let mut reference = GitReference::Branch("master".to_string());
+                for (k, v) in url.query_pairs() {
+                    match &k[..] {
+                        // map older 'ref' to branch
+                        "branch" | "ref" => reference = GitReference::Branch(v.into_owned()),
+
+                        "rev" => reference = GitReference::Rev(v.into_owned()),
+                        "tag" => reference = GitReference::Tag(v.into_owned()),
+                        _ => {}
+                    }
+                }
+                let precise = url.fragment().map(|s| s.to_owned());
+                url.set_fragment(None);
+                url.set_query(None);
+                Ok(SourceId::for_git(&url, reference)?.with_precise(precise))
+            }
+            "registry" => {
+                let url = url.to_url()?;
+                Ok(SourceId::new(Kind::Registry, url)?.with_precise(Some("locked".to_string())))
+            }
+            "path" => {
+                let url = url.to_url()?;
+                SourceId::new(Kind::Path, url)
+            }
+            kind => Err(format_err!("unsupported source protocol: {}", kind)),
+        }
+    }
+
+    /// A view of the `SourceId` that can be `Display`ed as a URL
+    pub fn to_url(&self) -> SourceIdToUrl {
+        SourceIdToUrl {
+            inner: &*self.inner,
+        }
+    }
+
+    /// Create a SourceId from a filesystem path.
+    ///
+    /// Pass absolute path
+    pub fn for_path(path: &Path) -> CargoResult<SourceId> {
+        let url = path.to_url()?;
+        SourceId::new(Kind::Path, url)
+    }
+
+    /// Crate a SourceId from a git reference
+    pub fn for_git(url: &Url, reference: GitReference) -> CargoResult<SourceId> {
+        SourceId::new(Kind::Git(reference), url.clone())
+    }
+
+    /// Create a SourceId from a registry url
+    pub fn for_registry(url: &Url) -> CargoResult<SourceId> {
+        SourceId::new(Kind::Registry, url.clone())
+    }
+
+    /// Create a SourceId from a local registry path
+    pub fn for_local_registry(path: &Path) -> CargoResult<SourceId> {
+        let url = path.to_url()?;
+        SourceId::new(Kind::LocalRegistry, url)
+    }
+
+    /// Create a SourceId from a directory path
+    pub fn for_directory(path: &Path) -> CargoResult<SourceId> {
+        let url = path.to_url()?;
+        SourceId::new(Kind::Directory, url)
+    }
+
+    /// Returns the `SourceId` corresponding to the main repository.
+    ///
+    /// This is the main cargo registry by default, but it can be overridden in
+    /// a `.cargo/config`.
+    pub fn crates_io(config: &Config) -> CargoResult<SourceId> {
+        config.crates_io_source_id(|| {
+            let cfg = ops::registry_configuration(config, None)?;
+            let url = if let Some(ref index) = cfg.index {
+                static WARNED: AtomicBool = ATOMIC_BOOL_INIT;
+                if !WARNED.swap(true, SeqCst) {
+                    config.shell().warn(
+                        "custom registry support via \
+                         the `registry.index` configuration is \
+                         being removed, this functionality \
+                         will not work in the future",
+                    )?;
+                }
+                &index[..]
+            } else {
+                CRATES_IO_INDEX
+            };
+            let url = url.to_url()?;
+            SourceId::for_registry(&url)
+        })
+    }
+
+    pub fn alt_registry(config: &Config, key: &str) -> CargoResult<SourceId> {
+        let url = config.get_registry_index(key)?;
+        Ok(SourceId {
+            inner: Arc::new(SourceIdInner {
+                kind: Kind::Registry,
+                canonical_url: git::canonicalize_url(&url)?,
+                url,
+                precise: None,
+                name: Some(key.to_string()),
+            }),
+        })
+    }
+
+    /// Get this source URL
+    pub fn url(&self) -> &Url {
+        &self.inner.url
+    }
+
+    pub fn display_registry(&self) -> String {
+        if self.is_default_registry() {
+            "crates.io index".to_string()
+        } else {
+            format!("`{}` index", url_display(self.url()))
+        }
+    }
+
+    /// Is this source from a filesystem path
+    pub fn is_path(&self) -> bool {
+        self.inner.kind == Kind::Path
+    }
+
+    /// Is this source from a registry (either local or not)
+    pub fn is_registry(&self) -> bool {
+        match self.inner.kind {
+            Kind::Registry | Kind::LocalRegistry => true,
+            _ => false,
+        }
+    }
+
+    /// Is this source from an alternative registry
+    pub fn is_alt_registry(&self) -> bool {
+        self.is_registry() && self.inner.name.is_some()
+    }
+
+    /// Is this source from a git repository
+    pub fn is_git(&self) -> bool {
+        match self.inner.kind {
+            Kind::Git(_) => true,
+            _ => false,
+        }
+    }
+
+    /// Creates an implementation of `Source` corresponding to this ID.
+    pub fn load<'a>(&self, config: &'a Config) -> CargoResult<Box<super::Source + 'a>> {
+        trace!("loading SourceId; {}", self);
+        match self.inner.kind {
+            Kind::Git(..) => Ok(Box::new(GitSource::new(self, config)?)),
+            Kind::Path => {
+                let path = match self.inner.url.to_file_path() {
+                    Ok(p) => p,
+                    Err(()) => panic!("path sources cannot be remote"),
+                };
+                Ok(Box::new(PathSource::new(&path, self, config)))
+            }
+            Kind::Registry => Ok(Box::new(RegistrySource::remote(self, config))),
+            Kind::LocalRegistry => {
+                let path = match self.inner.url.to_file_path() {
+                    Ok(p) => p,
+                    Err(()) => panic!("path sources cannot be remote"),
+                };
+                Ok(Box::new(RegistrySource::local(self, &path, config)))
+            }
+            Kind::Directory => {
+                let path = match self.inner.url.to_file_path() {
+                    Ok(p) => p,
+                    Err(()) => panic!("path sources cannot be remote"),
+                };
+                Ok(Box::new(DirectorySource::new(&path, self, config)))
+            }
+        }
+    }
+
+    /// Get the value of the precise field
+    pub fn precise(&self) -> Option<&str> {
+        self.inner.precise.as_ref().map(|s| &s[..])
+    }
+
+    /// Get the git reference if this is a git source, otherwise None.
+    pub fn git_reference(&self) -> Option<&GitReference> {
+        match self.inner.kind {
+            Kind::Git(ref s) => Some(s),
+            _ => None,
+        }
+    }
+
+    /// Create a new SourceId from this source with the given `precise`
+    pub fn with_precise(&self, v: Option<String>) -> SourceId {
+        SourceId {
+            inner: Arc::new(SourceIdInner {
+                precise: v,
+                ..(*self.inner).clone()
+            }),
+        }
+    }
+
+    /// Whether the remote registry is the standard https://crates.io
+    pub fn is_default_registry(&self) -> bool {
+        match self.inner.kind {
+            Kind::Registry => {}
+            _ => return false,
+        }
+        self.inner.url.to_string() == CRATES_IO_INDEX
+    }
+
+    /// Hash `self`
+    ///
+    /// For paths, remove the workspace prefix so the same source will give the
+    /// same hash in different locations.
+    pub fn stable_hash<S: hash::Hasher>(&self, workspace: &Path, into: &mut S) {
+        if self.is_path() {
+            if let Ok(p) = self.inner
+                .url
+                .to_file_path()
+                .unwrap()
+                .strip_prefix(workspace)
+            {
+                self.inner.kind.hash(into);
+                p.to_str().unwrap().hash(into);
+                return;
+            }
+        }
+        self.hash(into)
+    }
+}
+
+impl PartialEq for SourceId {
+    fn eq(&self, other: &SourceId) -> bool {
+        (*self.inner).eq(&*other.inner)
+    }
+}
+
+impl PartialOrd for SourceId {
+    fn partial_cmp(&self, other: &SourceId) -> Option<Ordering> {
+        Some(self.cmp(other))
+    }
+}
+
+impl Ord for SourceId {
+    fn cmp(&self, other: &SourceId) -> Ordering {
+        self.inner.cmp(&other.inner)
+    }
+}
+
+impl ser::Serialize for SourceId {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        if self.is_path() {
+            None::<String>.serialize(s)
+        } else {
+            s.collect_str(&self.to_url())
+        }
+    }
+}
+
+impl<'de> de::Deserialize<'de> for SourceId {
+    fn deserialize<D>(d: D) -> Result<SourceId, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        let string = String::deserialize(d)?;
+        SourceId::from_url(&string).map_err(de::Error::custom)
+    }
+}
+
+fn url_display(url: &Url) -> String {
+    if url.scheme() == "file" {
+        if let Ok(path) = url.to_file_path() {
+            if let Some(path_str) = path.to_str() {
+                return path_str.to_string();
+            }
+        }
+    }
+
+    url.as_str().to_string()
+}
+
+impl fmt::Display for SourceId {
+    fn fmt(&self, f: &mut Formatter) -> fmt::Result {
+        match *self.inner {
+            SourceIdInner {
+                kind: Kind::Path,
+                ref url,
+                ..
+            } => write!(f, "{}", url_display(url)),
+            SourceIdInner {
+                kind: Kind::Git(ref reference),
+                ref url,
+                ref precise,
+                ..
+            } => {
+                // Don't replace the URL display for git references,
+                // because those are kind of expected to be URLs.
+                write!(f, "{}", url)?;
+                if let Some(pretty) = reference.pretty_ref() {
+                    write!(f, "?{}", pretty)?;
+                }
+
+                if let Some(ref s) = *precise {
+                    let len = cmp::min(s.len(), 8);
+                    write!(f, "#{}", &s[..len])?;
+                }
+                Ok(())
+            }
+            SourceIdInner {
+                kind: Kind::Registry,
+                ref url,
+                ..
+            }
+            | SourceIdInner {
+                kind: Kind::LocalRegistry,
+                ref url,
+                ..
+            } => write!(f, "registry `{}`", url_display(url)),
+            SourceIdInner {
+                kind: Kind::Directory,
+                ref url,
+                ..
+            } => write!(f, "dir {}", url_display(url)),
+        }
+    }
+}
+
+// This custom implementation handles situations such as when two git sources
+// point at *almost* the same URL, but not quite, even when they actually point
+// to the same repository.
+/// This method tests for self and other values to be equal, and is used by ==.
+///
+/// For git repositories, the canonical url is checked.
+impl PartialEq for SourceIdInner {
+    fn eq(&self, other: &SourceIdInner) -> bool {
+        if self.kind != other.kind {
+            return false;
+        }
+        if self.url == other.url {
+            return true;
+        }
+
+        match (&self.kind, &other.kind) {
+            (&Kind::Git(ref ref1), &Kind::Git(ref ref2)) => {
+                ref1 == ref2 && self.canonical_url == other.canonical_url
+            }
+            _ => false,
+        }
+    }
+}
+
+impl PartialOrd for SourceIdInner {
+    fn partial_cmp(&self, other: &SourceIdInner) -> Option<Ordering> {
+        Some(self.cmp(other))
+    }
+}
+
+impl Ord for SourceIdInner {
+    fn cmp(&self, other: &SourceIdInner) -> Ordering {
+        match self.kind.cmp(&other.kind) {
+            Ordering::Equal => {}
+            ord => return ord,
+        }
+        match self.url.cmp(&other.url) {
+            Ordering::Equal => {}
+            ord => return ord,
+        }
+        match (&self.kind, &other.kind) {
+            (&Kind::Git(ref ref1), &Kind::Git(ref ref2)) => {
+                (ref1, &self.canonical_url).cmp(&(ref2, &other.canonical_url))
+            }
+            _ => self.kind.cmp(&other.kind),
+        }
+    }
+}
+
+// The hash of SourceId is used in the name of some Cargo folders, so shouldn't
+// vary. `as_str` gives the serialisation of a url (which has a spec) and so
+// insulates against possible changes in how the url crate does hashing.
+impl Hash for SourceId {
+    fn hash<S: hash::Hasher>(&self, into: &mut S) {
+        self.inner.kind.hash(into);
+        match *self.inner {
+            SourceIdInner {
+                kind: Kind::Git(..),
+                ref canonical_url,
+                ..
+            } => canonical_url.as_str().hash(into),
+            _ => self.inner.url.as_str().hash(into),
+        }
+    }
+}
+
+/// A `Display`able view into a `SourceId` that will write it as a url
+pub struct SourceIdToUrl<'a> {
+    inner: &'a SourceIdInner,
+}
+
+impl<'a> fmt::Display for SourceIdToUrl<'a> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match *self.inner {
+            SourceIdInner {
+                kind: Kind::Path,
+                ref url,
+                ..
+            } => write!(f, "path+{}", url),
+            SourceIdInner {
+                kind: Kind::Git(ref reference),
+                ref url,
+                ref precise,
+                ..
+            } => {
+                write!(f, "git+{}", url)?;
+                if let Some(pretty) = reference.pretty_ref() {
+                    write!(f, "?{}", pretty)?;
+                }
+                if let Some(precise) = precise.as_ref() {
+                    write!(f, "#{}", precise)?;
+                }
+                Ok(())
+            }
+            SourceIdInner {
+                kind: Kind::Registry,
+                ref url,
+                ..
+            } => write!(f, "registry+{}", url),
+            SourceIdInner {
+                kind: Kind::LocalRegistry,
+                ref url,
+                ..
+            } => write!(f, "local-registry+{}", url),
+            SourceIdInner {
+                kind: Kind::Directory,
+                ref url,
+                ..
+            } => write!(f, "directory+{}", url),
+        }
+    }
+}
+
+impl GitReference {
+    /// Returns a `Display`able view of this git reference, or None if using
+    /// the head of the "master" branch
+    pub fn pretty_ref(&self) -> Option<PrettyRef> {
+        match *self {
+            GitReference::Branch(ref s) if *s == "master" => None,
+            _ => Some(PrettyRef { inner: self }),
+        }
+    }
+}
+
+/// A git reference that can be `Display`ed
+pub struct PrettyRef<'a> {
+    inner: &'a GitReference,
+}
+
+impl<'a> fmt::Display for PrettyRef<'a> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match *self.inner {
+            GitReference::Branch(ref b) => write!(f, "branch={}", b),
+            GitReference::Tag(ref s) => write!(f, "tag={}", s),
+            GitReference::Rev(ref s) => write!(f, "rev={}", s),
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::{GitReference, Kind, SourceId};
+    use util::ToUrl;
+
+    #[test]
+    fn github_sources_equal() {
+        let loc = "https://github.com/foo/bar".to_url().unwrap();
+        let master = Kind::Git(GitReference::Branch("master".to_string()));
+        let s1 = SourceId::new(master.clone(), loc).unwrap();
+
+        let loc = "git://github.com/foo/bar".to_url().unwrap();
+        let s2 = SourceId::new(master, loc.clone()).unwrap();
+
+        assert_eq!(s1, s2);
+
+        let foo = Kind::Git(GitReference::Branch("foo".to_string()));
+        let s3 = SourceId::new(foo, loc).unwrap();
+        assert_ne!(s1, s3);
+    }
+}
diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs
new file mode 100644 (file)
index 0000000..727bcdb
--- /dev/null
@@ -0,0 +1,401 @@
+use std::borrow::Borrow;
+use std::collections::{BTreeMap, HashMap};
+use std::fmt::Display;
+use std::mem;
+use std::rc::Rc;
+
+use serde::{Serialize, Serializer};
+
+use core::interning::InternedString;
+use core::{Dependency, PackageId, SourceId};
+use semver::Version;
+
+use util::CargoResult;
+
+/// Subset of a `Manifest`. Contains only the most important information about
+/// a package.
+///
+/// Summaries are cloned, and should not be mutated after creation
+#[derive(Debug, Clone)]
+pub struct Summary {
+    inner: Rc<Inner>,
+}
+
+#[derive(Debug, Clone)]
+struct Inner {
+    package_id: PackageId,
+    dependencies: Vec<Dependency>,
+    features: FeatureMap,
+    checksum: Option<String>,
+    links: Option<InternedString>,
+    namespaced_features: bool,
+}
+
+impl Summary {
+    pub fn new<K>(
+        pkg_id: PackageId,
+        dependencies: Vec<Dependency>,
+        features: &BTreeMap<K, Vec<impl AsRef<str>>>,
+        links: Option<impl AsRef<str>>,
+        namespaced_features: bool,
+    ) -> CargoResult<Summary>
+    where K: Borrow<str> + Ord + Display {
+        for dep in dependencies.iter() {
+            let feature = dep.name_in_toml();
+            if !namespaced_features && features.get(&*feature).is_some() {
+                bail!(
+                    "Features and dependencies cannot have the \
+                     same name: `{}`",
+                    feature
+                )
+            }
+            if dep.is_optional() && !dep.is_transitive() {
+                bail!(
+                    "Dev-dependencies are not allowed to be optional: `{}`",
+                    feature
+                )
+            }
+        }
+        let feature_map = build_feature_map(&features, &dependencies, namespaced_features)?;
+        Ok(Summary {
+            inner: Rc::new(Inner {
+                package_id: pkg_id,
+                dependencies,
+                features: feature_map,
+                checksum: None,
+                links: links.map(|l| InternedString::new(l.as_ref())),
+                namespaced_features,
+            }),
+        })
+    }
+
+    pub fn package_id(&self) -> &PackageId {
+        &self.inner.package_id
+    }
+    pub fn name(&self) -> InternedString {
+        self.package_id().name()
+    }
+    pub fn version(&self) -> &Version {
+        self.package_id().version()
+    }
+    pub fn source_id(&self) -> &SourceId {
+        self.package_id().source_id()
+    }
+    pub fn dependencies(&self) -> &[Dependency] {
+        &self.inner.dependencies
+    }
+    pub fn features(&self) -> &FeatureMap {
+        &self.inner.features
+    }
+    pub fn checksum(&self) -> Option<&str> {
+        self.inner.checksum.as_ref().map(|s| &s[..])
+    }
+    pub fn links(&self) -> Option<InternedString> {
+        self.inner.links
+    }
+    pub fn namespaced_features(&self) -> bool {
+        self.inner.namespaced_features
+    }
+
+    pub fn override_id(mut self, id: PackageId) -> Summary {
+        Rc::make_mut(&mut self.inner).package_id = id;
+        self
+    }
+
+    pub fn set_checksum(mut self, cksum: String) -> Summary {
+        Rc::make_mut(&mut self.inner).checksum = Some(cksum);
+        self
+    }
+
+    pub fn map_dependencies<F>(mut self, f: F) -> Summary
+    where
+        F: FnMut(Dependency) -> Dependency,
+    {
+        {
+            let slot = &mut Rc::make_mut(&mut self.inner).dependencies;
+            let deps = mem::replace(slot, Vec::new());
+            *slot = deps.into_iter().map(f).collect();
+        }
+        self
+    }
+
+    pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Summary {
+        let me = if self.package_id().source_id() == to_replace {
+            let new_id = self.package_id().with_source_id(replace_with);
+            self.override_id(new_id)
+        } else {
+            self
+        };
+        me.map_dependencies(|dep| dep.map_source(to_replace, replace_with))
+    }
+}
+
+impl PartialEq for Summary {
+    fn eq(&self, other: &Summary) -> bool {
+        self.inner.package_id == other.inner.package_id
+    }
+}
+
+// Checks features for errors, bailing out a CargoResult:Err if invalid,
+// and creates FeatureValues for each feature.
+fn build_feature_map<K>(
+    features: &BTreeMap<K, Vec<impl AsRef<str>>>,
+    dependencies: &[Dependency],
+    namespaced: bool,
+) -> CargoResult<FeatureMap>
+where K: Borrow<str> + Ord + Display {
+    use self::FeatureValue::*;
+    let mut dep_map = HashMap::new();
+    for dep in dependencies.iter() {
+        dep_map
+            .entry(dep.name_in_toml())
+            .or_insert_with(Vec::new)
+            .push(dep);
+    }
+
+    let mut map = BTreeMap::new();
+    for (feature, list) in features.iter() {
+        // If namespaced features is active and the key is the same as that of an
+        // optional dependency, that dependency must be included in the values.
+        // Thus, if a `feature` is found that has the same name as a dependency, we
+        // (a) bail out if the dependency is non-optional, and (b) we track if the
+        // feature requirements include the dependency `crate:feature` in the list.
+        // This is done with the `dependency_found` variable, which can only be
+        // false if features are namespaced and the current feature key is the same
+        // as the name of an optional dependency. If so, it gets set to true during
+        // iteration over the list if the dependency is found in the list.
+        let mut dependency_found = if namespaced {
+            match dep_map.get(feature.borrow()) {
+                Some(ref dep_data) => {
+                    if !dep_data.iter().any(|d| d.is_optional()) {
+                        bail!(
+                            "Feature `{}` includes the dependency of the same name, but this is \
+                             left implicit in the features included by this feature.\n\
+                             Additionally, the dependency must be marked as optional to be \
+                             included in the feature definition.\n\
+                             Consider adding `crate:{}` to this feature's requirements \
+                             and marking the dependency as `optional = true`",
+                            feature,
+                            feature
+                        )
+                    } else {
+                        false
+                    }
+                }
+                None => true,
+            }
+        } else {
+            true
+        };
+
+        let mut values = vec![];
+        for dep in list {
+            let val = FeatureValue::build(
+                InternedString::new(dep.as_ref()),
+                |fs| features.contains_key(fs.as_str()),
+                namespaced,
+            );
+
+            // Find data for the referenced dependency...
+            let dep_data = {
+                match val {
+                    Feature(ref dep_name) | Crate(ref dep_name) | CrateFeature(ref dep_name, _) => {
+                        dep_map.get(dep_name.as_str())
+                    }
+                }
+            };
+            let is_optional_dep = dep_data
+                .iter()
+                .flat_map(|d| d.iter())
+                .any(|d| d.is_optional());
+            if let FeatureValue::Crate(ref dep_name) = val {
+                // If we have a dependency value, check if this is the dependency named
+                // the same as the feature that we were looking for.
+                if !dependency_found && feature.borrow() == dep_name.as_str() {
+                    dependency_found = true;
+                }
+            }
+
+            match (&val, dep_data.is_some(), is_optional_dep) {
+                // The value is a feature. If features are namespaced, this just means
+                // it's not prefixed with `crate:`, so we have to check whether the
+                // feature actually exist. If the feature is not defined *and* an optional
+                // dependency of the same name exists, the feature is defined implicitly
+                // here by adding it to the feature map, pointing to the dependency.
+                // If features are not namespaced, it's been validated as a feature already
+                // while instantiating the `FeatureValue` in `FeatureValue::build()`, so
+                // we don't have to do so here.
+                (&Feature(feat), _, true) => {
+                    if namespaced && !features.contains_key(&*feat) {
+                        map.insert(feat, vec![FeatureValue::Crate(feat)]);
+                    }
+                }
+                // If features are namespaced and the value is not defined as a feature
+                // and there is no optional dependency of the same name, error out.
+                // If features are not namespaced, there must be an existing feature
+                // here (checked by `FeatureValue::build()`), so it will always be defined.
+                (&Feature(feat), dep_exists, false) => {
+                    if namespaced && !features.contains_key(&*feat) {
+                        if dep_exists {
+                            bail!(
+                                "Feature `{}` includes `{}` which is not defined as a feature.\n\
+                                 A non-optional dependency of the same name is defined; consider \
+                                 adding `optional = true` to its definition",
+                                feature,
+                                feat
+                            )
+                        } else {
+                            bail!(
+                                "Feature `{}` includes `{}` which is not defined as a feature",
+                                feature,
+                                feat
+                            )
+                        }
+                    }
+                }
+                // The value is a dependency. If features are namespaced, it is explicitly
+                // tagged as such (`crate:value`). If features are not namespaced, any value
+                // not recognized as a feature is pegged as a `Crate`. Here we handle the case
+                // where the dependency exists but is non-optional. It branches on namespaced
+                // just to provide the correct string for the crate dependency in the error.
+                (&Crate(ref dep), true, false) => if namespaced {
+                    bail!(
+                        "Feature `{}` includes `crate:{}` which is not an \
+                         optional dependency.\nConsider adding \
+                         `optional = true` to the dependency",
+                        feature,
+                        dep
+                    )
+                } else {
+                    bail!(
+                        "Feature `{}` depends on `{}` which is not an \
+                         optional dependency.\nConsider adding \
+                         `optional = true` to the dependency",
+                        feature,
+                        dep
+                    )
+                },
+                // If namespaced, the value was tagged as a dependency; if not namespaced,
+                // this could be anything not defined as a feature. This handles the case
+                // where no such dependency is actually defined; again, the branch on
+                // namespaced here is just to provide the correct string in the error.
+                (&Crate(ref dep), false, _) => if namespaced {
+                    bail!(
+                        "Feature `{}` includes `crate:{}` which is not a known \
+                         dependency",
+                        feature,
+                        dep
+                    )
+                } else {
+                    bail!(
+                        "Feature `{}` includes `{}` which is neither a dependency nor \
+                         another feature",
+                        feature,
+                        dep
+                    )
+                },
+                (&Crate(_), true, true) => {}
+                // If the value is a feature for one of the dependencies, bail out if no such
+                // dependency is actually defined in the manifest.
+                (&CrateFeature(ref dep, _), false, _) => bail!(
+                    "Feature `{}` requires a feature of `{}` which is not a \
+                     dependency",
+                    feature,
+                    dep
+                ),
+                (&CrateFeature(_, _), true, _) => {}
+            }
+            values.push(val);
+        }
+
+        if !dependency_found {
+            // If we have not found the dependency of the same-named feature, we should
+            // bail here.
+            bail!(
+                "Feature `{}` includes the optional dependency of the \
+                 same name, but this is left implicit in the features \
+                 included by this feature.\nConsider adding \
+                 `crate:{}` to this feature's requirements.",
+                feature,
+                feature
+            )
+        }
+
+        map.insert(InternedString::new(feature.borrow()), values);
+    }
+    Ok(map)
+}
+
+/// FeatureValue represents the types of dependencies a feature can have:
+///
+/// * Another feature
+/// * An optional dependency
+/// * A feature in a dependency
+///
+/// The selection between these 3 things happens as part of the construction of the FeatureValue.
+#[derive(Clone, Debug)]
+pub enum FeatureValue {
+    Feature(InternedString),
+    Crate(InternedString),
+    CrateFeature(InternedString, InternedString),
+}
+
+impl FeatureValue {
+    fn build<T>(feature: InternedString, is_feature: T, namespaced: bool) -> FeatureValue
+    where
+        T: Fn(InternedString) -> bool,
+    {
+        match (feature.find('/'), namespaced) {
+            (Some(pos), _) => {
+                let (dep, dep_feat) = feature.split_at(pos);
+                let dep_feat = &dep_feat[1..];
+                FeatureValue::CrateFeature(InternedString::new(dep), InternedString::new(dep_feat))
+            }
+            (None, true) if feature.starts_with("crate:") => {
+                FeatureValue::Crate(InternedString::new(&feature[6..]))
+            }
+            (None, true) => FeatureValue::Feature(feature),
+            (None, false) if is_feature(feature) => FeatureValue::Feature(feature),
+            (None, false) => FeatureValue::Crate(feature),
+        }
+    }
+
+    pub fn new(feature: InternedString, s: &Summary) -> FeatureValue {
+        Self::build(
+            feature,
+            |fs| s.features().contains_key(&fs),
+            s.namespaced_features(),
+        )
+    }
+
+    pub fn to_string(&self, s: &Summary) -> String {
+        use self::FeatureValue::*;
+        match *self {
+            Feature(ref f) => f.to_string(),
+            Crate(ref c) => if s.namespaced_features() {
+                format!("crate:{}", &c)
+            } else {
+                c.to_string()
+            },
+            CrateFeature(ref c, ref f) => [c.as_ref(), f.as_ref()].join("/"),
+        }
+    }
+}
+
+impl Serialize for FeatureValue {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: Serializer,
+    {
+        use self::FeatureValue::*;
+        match *self {
+            Feature(ref f) => serializer.serialize_str(f),
+            Crate(ref c) => serializer.serialize_str(c),
+            CrateFeature(ref c, ref f) => {
+                serializer.serialize_str(&[c.as_ref(), f.as_ref()].join("/"))
+            }
+        }
+    }
+}
+
+pub type FeatureMap = BTreeMap<InternedString, Vec<FeatureValue>>;
diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs
new file mode 100644 (file)
index 0000000..e6fc45d
--- /dev/null
@@ -0,0 +1,876 @@
+use std::cell::RefCell;
+use std::collections::hash_map::{Entry, HashMap};
+use std::collections::BTreeMap;
+use std::path::{Path, PathBuf};
+use std::slice;
+
+use glob::glob;
+use url::Url;
+
+use core::profiles::Profiles;
+use core::registry::PackageRegistry;
+use core::{Dependency, PackageIdSpec};
+use core::{EitherManifest, Package, SourceId, VirtualManifest};
+use ops;
+use sources::PathSource;
+use util::errors::{CargoResult, CargoResultExt, ManifestError};
+use util::paths;
+use util::toml::read_manifest;
+use util::{Config, Filesystem};
+
+/// The core abstraction in Cargo for working with a workspace of crates.
+///
+/// A workspace is often created very early on and then threaded through all
+/// other functions. It's typically through this object that the current
+/// package is loaded and/or learned about.
+#[derive(Debug)]
+pub struct Workspace<'cfg> {
+    config: &'cfg Config,
+
+    // This path is a path to where the current cargo subcommand was invoked
+    // from. That is the `--manifest-path` argument to Cargo, and
+    // points to the "main crate" that we're going to worry about.
+    current_manifest: PathBuf,
+
+    // A list of packages found in this workspace. Always includes at least the
+    // package mentioned by `current_manifest`.
+    packages: Packages<'cfg>,
+
+    // If this workspace includes more than one crate, this points to the root
+    // of the workspace. This is `None` in the case that `[workspace]` is
+    // missing, `package.workspace` is missing, and no `Cargo.toml` above
+    // `current_manifest` was found on the filesystem with `[workspace]`.
+    root_manifest: Option<PathBuf>,
+
+    // Shared target directory for all the packages of this workspace.
+    // `None` if the default path of `root/target` should be used.
+    target_dir: Option<Filesystem>,
+
+    // List of members in this workspace with a listing of all their manifest
+    // paths. The packages themselves can be looked up through the `packages`
+    // set above.
+    members: Vec<PathBuf>,
+
+    // The subset of `members` that are used by the
+    // `build`, `check`, `test`, and `bench` subcommands
+    // when no package is selected with `--package` / `-p` and `--all`
+    // is not used.
+    //
+    // This is set by the `default-members` config
+    // in the `[workspace]` section.
+    // When unset, this is the same as `members` for virtual workspaces
+    // (`--all` is implied)
+    // or only the root package for non-virtual workspaces.
+    default_members: Vec<PathBuf>,
+
+    // True, if this is a temporary workspace created for the purposes of
+    // cargo install or cargo package.
+    is_ephemeral: bool,
+
+    // True if this workspace should enforce optional dependencies even when
+    // not needed; false if this workspace should only enforce dependencies
+    // needed by the current configuration (such as in cargo install). In some
+    // cases `false` also results in the non-enforcement of dev-dependencies.
+    require_optional_deps: bool,
+
+    // A cache of loaded packages for particular paths which is disjoint from
+    // `packages` up above, used in the `load` method down below.
+    loaded_packages: RefCell<HashMap<PathBuf, Package>>,
+}
+
+// Separate structure for tracking loaded packages (to avoid loading anything
+// twice), and this is separate to help appease the borrow checker.
+#[derive(Debug)]
+struct Packages<'cfg> {
+    config: &'cfg Config,
+    packages: HashMap<PathBuf, MaybePackage>,
+}
+
+#[derive(Debug)]
+enum MaybePackage {
+    Package(Package),
+    Virtual(VirtualManifest),
+}
+
+/// Configuration of a workspace in a manifest.
+#[derive(Debug, Clone)]
+pub enum WorkspaceConfig {
+    /// Indicates that `[workspace]` was present and the members were
+    /// optionally specified as well.
+    Root(WorkspaceRootConfig),
+
+    /// Indicates that `[workspace]` was present and the `root` field is the
+    /// optional value of `package.workspace`, if present.
+    Member { root: Option<String> },
+}
+
+/// Intermediate configuration of a workspace root in a manifest.
+///
+/// Knows the Workspace Root path, as well as `members` and `exclude` lists of path patterns, which
+/// together tell if some path is recognized as a member by this root or not.
+#[derive(Debug, Clone)]
+pub struct WorkspaceRootConfig {
+    root_dir: PathBuf,
+    members: Option<Vec<String>>,
+    default_members: Option<Vec<String>>,
+    exclude: Vec<String>,
+}
+
+/// An iterator over the member packages of a workspace, returned by
+/// `Workspace::members`
+pub struct Members<'a, 'cfg: 'a> {
+    ws: &'a Workspace<'cfg>,
+    iter: slice::Iter<'a, PathBuf>,
+}
+
+impl<'cfg> Workspace<'cfg> {
+    /// Creates a new workspace given the target manifest pointed to by
+    /// `manifest_path`.
+    ///
+    /// This function will construct the entire workspace by determining the
+    /// root and all member packages. It will then validate the workspace
+    /// before returning it, so `Ok` is only returned for valid workspaces.
+    pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult<Workspace<'cfg>> {
+        let target_dir = config.target_dir()?;
+
+        let mut ws = Workspace {
+            config,
+            current_manifest: manifest_path.to_path_buf(),
+            packages: Packages {
+                config,
+                packages: HashMap::new(),
+            },
+            root_manifest: None,
+            target_dir,
+            members: Vec::new(),
+            default_members: Vec::new(),
+            is_ephemeral: false,
+            require_optional_deps: true,
+            loaded_packages: RefCell::new(HashMap::new()),
+        };
+        ws.root_manifest = ws.find_root(manifest_path)?;
+        ws.find_members()?;
+        ws.validate()?;
+        Ok(ws)
+    }
+
+    /// Creates a "temporary workspace" from one package which only contains
+    /// that package.
+    ///
+    /// This constructor will not touch the filesystem and only creates an
+    /// in-memory workspace. That is, all configuration is ignored, it's just
+    /// intended for that one package.
+    ///
+    /// This is currently only used in niche situations like `cargo install` or
+    /// `cargo package`.
+    pub fn ephemeral(
+        package: Package,
+        config: &'cfg Config,
+        target_dir: Option<Filesystem>,
+        require_optional_deps: bool,
+    ) -> CargoResult<Workspace<'cfg>> {
+        let mut ws = Workspace {
+            config,
+            current_manifest: package.manifest_path().to_path_buf(),
+            packages: Packages {
+                config,
+                packages: HashMap::new(),
+            },
+            root_manifest: None,
+            target_dir: None,
+            members: Vec::new(),
+            default_members: Vec::new(),
+            is_ephemeral: true,
+            require_optional_deps,
+            loaded_packages: RefCell::new(HashMap::new()),
+        };
+        {
+            let key = ws.current_manifest.parent().unwrap();
+            let package = MaybePackage::Package(package);
+            ws.packages.packages.insert(key.to_path_buf(), package);
+            ws.target_dir = if let Some(dir) = target_dir {
+                Some(dir)
+            } else {
+                ws.config.target_dir()?
+            };
+            ws.members.push(ws.current_manifest.clone());
+            ws.default_members.push(ws.current_manifest.clone());
+        }
+        Ok(ws)
+    }
+
+    /// Returns the current package of this workspace.
+    ///
+    /// Note that this can return an error if it the current manifest is
+    /// actually a "virtual Cargo.toml", in which case an error is returned
+    /// indicating that something else should be passed.
+    pub fn current(&self) -> CargoResult<&Package> {
+        let pkg = self.current_opt().ok_or_else(|| {
+            format_err!(
+                "manifest path `{}` is a virtual manifest, but this \
+                 command requires running against an actual package in \
+                 this workspace",
+                self.current_manifest.display()
+            )
+        })?;
+        Ok(pkg)
+    }
+
+    pub fn current_opt(&self) -> Option<&Package> {
+        match *self.packages.get(&self.current_manifest) {
+            MaybePackage::Package(ref p) => Some(p),
+            MaybePackage::Virtual(..) => None,
+        }
+    }
+
+    pub fn is_virtual(&self) -> bool {
+        match *self.packages.get(&self.current_manifest) {
+            MaybePackage::Package(..) => false,
+            MaybePackage::Virtual(..) => true,
+        }
+    }
+
+    /// Returns the `Config` this workspace is associated with.
+    pub fn config(&self) -> &'cfg Config {
+        self.config
+    }
+
+    pub fn profiles(&self) -> &Profiles {
+        let root = self.root_manifest
+            .as_ref()
+            .unwrap_or(&self.current_manifest);
+        match *self.packages.get(root) {
+            MaybePackage::Package(ref p) => p.manifest().profiles(),
+            MaybePackage::Virtual(ref vm) => vm.profiles(),
+        }
+    }
+
+    /// Returns the root path of this workspace.
+    ///
+    /// That is, this returns the path of the directory containing the
+    /// `Cargo.toml` which is the root of this workspace.
+    pub fn root(&self) -> &Path {
+        match self.root_manifest {
+            Some(ref p) => p,
+            None => &self.current_manifest,
+        }.parent()
+            .unwrap()
+    }
+
+    pub fn target_dir(&self) -> Filesystem {
+        self.target_dir
+            .clone()
+            .unwrap_or_else(|| Filesystem::new(self.root().join("target")))
+    }
+
+    /// Returns the root [replace] section of this workspace.
+    ///
+    /// This may be from a virtual crate or an actual crate.
+    pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] {
+        let path = match self.root_manifest {
+            Some(ref p) => p,
+            None => &self.current_manifest,
+        };
+        match *self.packages.get(path) {
+            MaybePackage::Package(ref p) => p.manifest().replace(),
+            MaybePackage::Virtual(ref vm) => vm.replace(),
+        }
+    }
+
+    /// Returns the root [patch] section of this workspace.
+    ///
+    /// This may be from a virtual crate or an actual crate.
+    pub fn root_patch(&self) -> &HashMap<Url, Vec<Dependency>> {
+        let path = match self.root_manifest {
+            Some(ref p) => p,
+            None => &self.current_manifest,
+        };
+        match *self.packages.get(path) {
+            MaybePackage::Package(ref p) => p.manifest().patch(),
+            MaybePackage::Virtual(ref vm) => vm.patch(),
+        }
+    }
+
+    /// Returns an iterator over all packages in this workspace
+    pub fn members<'a>(&'a self) -> Members<'a, 'cfg> {
+        Members {
+            ws: self,
+            iter: self.members.iter(),
+        }
+    }
+
+    /// Returns an iterator over default packages in this workspace
+    pub fn default_members<'a>(&'a self) -> Members<'a, 'cfg> {
+        Members {
+            ws: self,
+            iter: self.default_members.iter(),
+        }
+    }
+
+    /// Returns true if the package is a member of the workspace.
+    pub fn is_member(&self, pkg: &Package) -> bool {
+        self.members().any(|p| p == pkg)
+    }
+
+    pub fn is_ephemeral(&self) -> bool {
+        self.is_ephemeral
+    }
+
+    pub fn require_optional_deps(&self) -> bool {
+        self.require_optional_deps
+    }
+
+    pub fn set_require_optional_deps<'a>(
+        &'a mut self,
+        require_optional_deps: bool,
+    ) -> &mut Workspace<'cfg> {
+        self.require_optional_deps = require_optional_deps;
+        self
+    }
+
+    /// Finds the root of a workspace for the crate whose manifest is located
+    /// at `manifest_path`.
+    ///
+    /// This will parse the `Cargo.toml` at `manifest_path` and then interpret
+    /// the workspace configuration, optionally walking up the filesystem
+    /// looking for other workspace roots.
+    ///
+    /// Returns an error if `manifest_path` isn't actually a valid manifest or
+    /// if some other transient error happens.
+    fn find_root(&mut self, manifest_path: &Path) -> CargoResult<Option<PathBuf>> {
+        fn read_root_pointer(member_manifest: &Path, root_link: &str) -> CargoResult<PathBuf> {
+            let path = member_manifest
+                .parent()
+                .unwrap()
+                .join(root_link)
+                .join("Cargo.toml");
+            debug!("find_root - pointer {}", path.display());
+            Ok(paths::normalize_path(&path))
+        };
+
+        {
+            let current = self.packages.load(manifest_path)?;
+            match *current.workspace_config() {
+                WorkspaceConfig::Root(_) => {
+                    debug!("find_root - is root {}", manifest_path.display());
+                    return Ok(Some(manifest_path.to_path_buf()));
+                }
+                WorkspaceConfig::Member {
+                    root: Some(ref path_to_root),
+                } => return Ok(Some(read_root_pointer(manifest_path, path_to_root)?)),
+                WorkspaceConfig::Member { root: None } => {}
+            }
+        }
+
+        for path in paths::ancestors(manifest_path).skip(2) {
+            if path.ends_with("target/package") {
+                break;
+            }
+
+            let ances_manifest_path = path.join("Cargo.toml");
+            debug!("find_root - trying {}", ances_manifest_path.display());
+            if ances_manifest_path.exists() {
+                match *self.packages.load(&ances_manifest_path)?.workspace_config() {
+                    WorkspaceConfig::Root(ref ances_root_config) => {
+                        debug!("find_root - found a root checking exclusion");
+                        if !ances_root_config.is_excluded(manifest_path) {
+                            debug!("find_root - found!");
+                            return Ok(Some(ances_manifest_path));
+                        }
+                    }
+                    WorkspaceConfig::Member {
+                        root: Some(ref path_to_root),
+                    } => {
+                        debug!("find_root - found pointer");
+                        return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)?));
+                    }
+                    WorkspaceConfig::Member { .. } => {}
+                }
+            }
+
+            // Don't walk across `CARGO_HOME` when we're looking for the
+            // workspace root. Sometimes a package will be organized with
+            // `CARGO_HOME` pointing inside of the workspace root or in the
+            // current package, but we don't want to mistakenly try to put
+            // crates.io crates into the workspace by accident.
+            if self.config.home() == path {
+                break;
+            }
+        }
+
+        Ok(None)
+    }
+
+    /// After the root of a workspace has been located, probes for all members
+    /// of a workspace.
+    ///
+    /// If the `workspace.members` configuration is present, then this just
+    /// verifies that those are all valid packages to point to. Otherwise, this
+    /// will transitively follow all `path` dependencies looking for members of
+    /// the workspace.
+    fn find_members(&mut self) -> CargoResult<()> {
+        let root_manifest_path = match self.root_manifest {
+            Some(ref path) => path.clone(),
+            None => {
+                debug!("find_members - only me as a member");
+                self.members.push(self.current_manifest.clone());
+                self.default_members.push(self.current_manifest.clone());
+                return Ok(());
+            }
+        };
+
+        let members_paths;
+        let default_members_paths;
+        {
+            let root_package = self.packages.load(&root_manifest_path)?;
+            match *root_package.workspace_config() {
+                WorkspaceConfig::Root(ref root_config) => {
+                    members_paths =
+                        root_config.members_paths(root_config.members.as_ref().unwrap_or(&vec![]))?;
+                    default_members_paths = if let Some(ref default) = root_config.default_members {
+                        Some(root_config.members_paths(default)?)
+                    } else {
+                        None
+                    }
+                }
+                _ => bail!(
+                    "root of a workspace inferred but wasn't a root: {}",
+                    root_manifest_path.display()
+                ),
+            }
+        }
+
+        for path in members_paths {
+            self.find_path_deps(&path.join("Cargo.toml"), &root_manifest_path, false)?;
+        }
+
+        if let Some(default) = default_members_paths {
+            for path in default {
+                let manifest_path = paths::normalize_path(&path.join("Cargo.toml"));
+                if !self.members.contains(&manifest_path) {
+                    bail!(
+                        "package `{}` is listed in workspace’s default-members \
+                         but is not a member.",
+                        path.display()
+                    )
+                }
+                self.default_members.push(manifest_path)
+            }
+        } else if self.is_virtual() {
+            self.default_members = self.members.clone()
+        } else {
+            self.default_members.push(self.current_manifest.clone())
+        }
+
+        self.find_path_deps(&root_manifest_path, &root_manifest_path, false)
+    }
+
+    fn find_path_deps(
+        &mut self,
+        manifest_path: &Path,
+        root_manifest: &Path,
+        is_path_dep: bool,
+    ) -> CargoResult<()> {
+        let manifest_path = paths::normalize_path(manifest_path);
+        if self.members.contains(&manifest_path) {
+            return Ok(());
+        }
+        if is_path_dep && !manifest_path.parent().unwrap().starts_with(self.root())
+            && self.find_root(&manifest_path)? != self.root_manifest
+        {
+            // If `manifest_path` is a path dependency outside of the workspace,
+            // don't add it, or any of its dependencies, as a members.
+            return Ok(());
+        }
+
+        if let WorkspaceConfig::Root(ref root_config) =
+            *self.packages.load(root_manifest)?.workspace_config()
+        {
+            if root_config.is_excluded(&manifest_path) {
+                return Ok(());
+            }
+        }
+
+        debug!("find_members - {}", manifest_path.display());
+        self.members.push(manifest_path.clone());
+
+        let candidates = {
+            let pkg = match *self.packages.load(&manifest_path)? {
+                MaybePackage::Package(ref p) => p,
+                MaybePackage::Virtual(_) => return Ok(()),
+            };
+            pkg.dependencies()
+                .iter()
+                .map(|d| d.source_id())
+                .filter(|d| d.is_path())
+                .filter_map(|d| d.url().to_file_path().ok())
+                .map(|p| p.join("Cargo.toml"))
+                .collect::<Vec<_>>()
+        };
+        for candidate in candidates {
+            self.find_path_deps(&candidate, root_manifest, true)
+                .map_err(|err| ManifestError::new(err, manifest_path.clone()))?;
+        }
+        Ok(())
+    }
+
+    /// Validates a workspace, ensuring that a number of invariants are upheld:
+    ///
+    /// 1. A workspace only has one root.
+    /// 2. All workspace members agree on this one root as the root.
+    /// 3. The current crate is a member of this workspace.
+    fn validate(&mut self) -> CargoResult<()> {
+        if self.root_manifest.is_none() {
+            return Ok(());
+        }
+
+        let mut roots = Vec::new();
+        {
+            let mut names = BTreeMap::new();
+            for member in self.members.iter() {
+                let package = self.packages.get(member);
+                match *package.workspace_config() {
+                    WorkspaceConfig::Root(_) => {
+                        roots.push(member.parent().unwrap().to_path_buf());
+                    }
+                    WorkspaceConfig::Member { .. } => {}
+                }
+                let name = match *package {
+                    MaybePackage::Package(ref p) => p.name(),
+                    MaybePackage::Virtual(_) => continue,
+                };
+                if let Some(prev) = names.insert(name, member) {
+                    bail!(
+                        "two packages named `{}` in this workspace:\n\
+                         - {}\n\
+                         - {}",
+                        name,
+                        prev.display(),
+                        member.display()
+                    );
+                }
+            }
+        }
+
+        match roots.len() {
+            0 => bail!(
+                "`package.workspace` configuration points to a crate \
+                 which is not configured with [workspace]: \n\
+                 configuration at: {}\n\
+                 points to: {}",
+                self.current_manifest.display(),
+                self.root_manifest.as_ref().unwrap().display()
+            ),
+            1 => {}
+            _ => {
+                bail!(
+                    "multiple workspace roots found in the same workspace:\n{}",
+                    roots
+                        .iter()
+                        .map(|r| format!("  {}", r.display()))
+                        .collect::<Vec<_>>()
+                        .join("\n")
+                );
+            }
+        }
+
+        for member in self.members.clone() {
+            let root = self.find_root(&member)?;
+            if root == self.root_manifest {
+                continue;
+            }
+
+            match root {
+                Some(root) => {
+                    bail!(
+                        "package `{}` is a member of the wrong workspace\n\
+                         expected: {}\n\
+                         actual:   {}",
+                        member.display(),
+                        self.root_manifest.as_ref().unwrap().display(),
+                        root.display()
+                    );
+                }
+                None => {
+                    bail!(
+                        "workspace member `{}` is not hierarchically below \
+                         the workspace root `{}`",
+                        member.display(),
+                        self.root_manifest.as_ref().unwrap().display()
+                    );
+                }
+            }
+        }
+
+        if !self.members.contains(&self.current_manifest) {
+            let root = self.root_manifest.as_ref().unwrap();
+            let root_dir = root.parent().unwrap();
+            let current_dir = self.current_manifest.parent().unwrap();
+            let root_pkg = self.packages.get(root);
+
+            // FIXME: Make this more generic by using a relative path resolver between member and
+            // root.
+            let members_msg = match current_dir.strip_prefix(root_dir) {
+                Ok(rel) => format!(
+                    "this may be fixable by adding `{}` to the \
+                     `workspace.members` array of the manifest \
+                     located at: {}",
+                    rel.display(),
+                    root.display()
+                ),
+                Err(_) => format!(
+                    "this may be fixable by adding a member to \
+                     the `workspace.members` array of the \
+                     manifest located at: {}",
+                    root.display()
+                ),
+            };
+            let extra = match *root_pkg {
+                MaybePackage::Virtual(_) => members_msg,
+                MaybePackage::Package(ref p) => {
+                    let has_members_list = match *p.manifest().workspace_config() {
+                        WorkspaceConfig::Root(ref root_config) => root_config.has_members_list(),
+                        WorkspaceConfig::Member { .. } => unreachable!(),
+                    };
+                    if !has_members_list {
+                        format!(
+                            "this may be fixable by ensuring that this \
+                             crate is depended on by the workspace \
+                             root: {}",
+                            root.display()
+                        )
+                    } else {
+                        members_msg
+                    }
+                }
+            };
+            bail!(
+                "current package believes it's in a workspace when it's not:\n\
+                 current:   {}\n\
+                 workspace: {}\n\n{}",
+                self.current_manifest.display(),
+                root.display(),
+                extra
+            );
+        }
+
+        if let Some(ref root_manifest) = self.root_manifest {
+            for pkg in self.members()
+                .filter(|p| p.manifest_path() != root_manifest)
+            {
+                if pkg.manifest().original().has_profiles() {
+                    let message = &format!(
+                        "profiles for the non root package will be ignored, \
+                         specify profiles at the workspace root:\n\
+                         package:   {}\n\
+                         workspace: {}",
+                        pkg.manifest_path().display(),
+                        root_manifest.display()
+                    );
+
+                    //TODO: remove `Eq` bound from `Profiles` when the warning is removed.
+                    self.config.shell().warn(&message)?;
+                }
+            }
+        }
+
+        Ok(())
+    }
+
+    pub fn load(&self, manifest_path: &Path) -> CargoResult<Package> {
+        match self.packages.maybe_get(manifest_path) {
+            Some(&MaybePackage::Package(ref p)) => return Ok(p.clone()),
+            Some(&MaybePackage::Virtual(_)) => bail!("cannot load workspace root"),
+            None => {}
+        }
+
+        let mut loaded = self.loaded_packages.borrow_mut();
+        if let Some(p) = loaded.get(manifest_path).cloned() {
+            return Ok(p);
+        }
+        let source_id = SourceId::for_path(manifest_path.parent().unwrap())?;
+        let (package, _nested_paths) = ops::read_package(manifest_path, &source_id, self.config)?;
+        loaded.insert(manifest_path.to_path_buf(), package.clone());
+        Ok(package)
+    }
+
+    /// Preload the provided registry with already loaded packages.
+    ///
+    /// A workspace may load packages during construction/parsing/early phases
+    /// for various operations, and this preload step avoids doubly-loading and
+    /// parsing crates on the filesystem by inserting them all into the registry
+    /// with their in-memory formats.
+    pub fn preload(&self, registry: &mut PackageRegistry<'cfg>) {
+        // These can get weird as this generally represents a workspace during
+        // `cargo install`. Things like git repositories will actually have a
+        // `PathSource` with multiple entries in it, so the logic below is
+        // mostly just an optimization for normal `cargo build` in workspaces
+        // during development.
+        if self.is_ephemeral {
+            return;
+        }
+
+        for pkg in self.packages.packages.values() {
+            let pkg = match *pkg {
+                MaybePackage::Package(ref p) => p.clone(),
+                MaybePackage::Virtual(_) => continue,
+            };
+            let mut src = PathSource::new(
+                pkg.manifest_path(),
+                pkg.package_id().source_id(),
+                self.config,
+            );
+            src.preload_with(pkg);
+            registry.add_preloaded(Box::new(src));
+        }
+    }
+
+    pub fn emit_warnings(&self) -> CargoResult<()> {
+        for (path, maybe_pkg) in &self.packages.packages {
+            let warnings = match maybe_pkg {
+                MaybePackage::Package(pkg) => pkg.manifest().warnings().warnings(),
+                MaybePackage::Virtual(vm) => vm.warnings().warnings(),
+            };
+            for warning in warnings {
+                if warning.is_critical {
+                    let err = format_err!("{}", warning.message);
+                    let cx = format_err!(
+                        "failed to parse manifest at `{}`",
+                        path.display()
+                    );
+                    return Err(err.context(cx).into());
+                } else {
+                    self.config.shell().warn(&warning.message)?
+                }
+            }
+        }
+        Ok(())
+    }
+}
+
+impl<'cfg> Packages<'cfg> {
+    fn get(&self, manifest_path: &Path) -> &MaybePackage {
+        self.maybe_get(manifest_path).unwrap()
+    }
+
+    fn maybe_get(&self, manifest_path: &Path) -> Option<&MaybePackage> {
+        self.packages.get(manifest_path.parent().unwrap())
+    }
+
+    fn load(&mut self, manifest_path: &Path) -> CargoResult<&MaybePackage> {
+        let key = manifest_path.parent().unwrap();
+        match self.packages.entry(key.to_path_buf()) {
+            Entry::Occupied(e) => Ok(e.into_mut()),
+            Entry::Vacant(v) => {
+                let source_id = SourceId::for_path(key)?;
+                let (manifest, _nested_paths) =
+                    read_manifest(manifest_path, &source_id, self.config)?;
+                Ok(v.insert(match manifest {
+                    EitherManifest::Real(manifest) => {
+                        MaybePackage::Package(Package::new(manifest, manifest_path))
+                    }
+                    EitherManifest::Virtual(vm) => MaybePackage::Virtual(vm),
+                }))
+            }
+        }
+    }
+}
+
+impl<'a, 'cfg> Iterator for Members<'a, 'cfg> {
+    type Item = &'a Package;
+
+    fn next(&mut self) -> Option<&'a Package> {
+        loop {
+            let next = self.iter.next().map(|path| self.ws.packages.get(path));
+            match next {
+                Some(&MaybePackage::Package(ref p)) => return Some(p),
+                Some(&MaybePackage::Virtual(_)) => {}
+                None => return None,
+            }
+        }
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let (_, upper) = self.iter.size_hint();
+        (0, upper)
+    }
+}
+
+impl MaybePackage {
+    fn workspace_config(&self) -> &WorkspaceConfig {
+        match *self {
+            MaybePackage::Package(ref p) => p.manifest().workspace_config(),
+            MaybePackage::Virtual(ref vm) => vm.workspace_config(),
+        }
+    }
+}
+
+impl WorkspaceRootConfig {
+    /// Create a new Intermediate Workspace Root configuration.
+    pub fn new(
+        root_dir: &Path,
+        members: &Option<Vec<String>>,
+        default_members: &Option<Vec<String>>,
+        exclude: &Option<Vec<String>>,
+    ) -> WorkspaceRootConfig {
+        WorkspaceRootConfig {
+            root_dir: root_dir.to_path_buf(),
+            members: members.clone(),
+            default_members: default_members.clone(),
+            exclude: exclude.clone().unwrap_or_default(),
+        }
+    }
+
+    /// Checks the path against the `excluded` list.
+    ///
+    /// This method does NOT consider the `members` list.
+    fn is_excluded(&self, manifest_path: &Path) -> bool {
+        let excluded = self.exclude
+            .iter()
+            .any(|ex| manifest_path.starts_with(self.root_dir.join(ex)));
+
+        let explicit_member = match self.members {
+            Some(ref members) => members
+                .iter()
+                .any(|mem| manifest_path.starts_with(self.root_dir.join(mem))),
+            None => false,
+        };
+
+        !explicit_member && excluded
+    }
+
+    fn has_members_list(&self) -> bool {
+        self.members.is_some()
+    }
+
+    fn members_paths(&self, globs: &[String]) -> CargoResult<Vec<PathBuf>> {
+        let mut expanded_list = Vec::new();
+
+        for glob in globs {
+            let pathbuf = self.root_dir.join(glob);
+            let expanded_paths = Self::expand_member_path(&pathbuf)?;
+
+            // If glob does not find any valid paths, then put the original
+            // path in the expanded list to maintain backwards compatibility.
+            if expanded_paths.is_empty() {
+                expanded_list.push(pathbuf);
+            } else {
+                expanded_list.extend(expanded_paths);
+            }
+        }
+
+        Ok(expanded_list)
+    }
+
+    fn expand_member_path(path: &Path) -> CargoResult<Vec<PathBuf>> {
+        let path = match path.to_str() {
+            Some(p) => p,
+            None => return Ok(Vec::new()),
+        };
+        let res = glob(path).chain_err(|| format_err!("could not parse pattern `{}`", &path))?;
+        let res = res.map(|p| {
+            p.chain_err(|| format_err!("unable to match path to pattern `{}`", &path))
+        }).collect::<Result<Vec<_>, _>>()?;
+        Ok(res)
+    }
+}
diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs
new file mode 100644 (file)
index 0000000..e7fcac4
--- /dev/null
@@ -0,0 +1,261 @@
+#![cfg_attr(test, deny(warnings))]
+
+// Clippy isn't enforced by CI, and know that @alexcrichton isn't a fan :)
+#![cfg_attr(feature = "cargo-clippy", allow(boxed_local))]             // bug rust-lang-nursery/rust-clippy#1123
+#![cfg_attr(feature = "cargo-clippy", allow(cyclomatic_complexity))]   // large project
+#![cfg_attr(feature = "cargo-clippy", allow(derive_hash_xor_eq))]      // there's an intentional incoherence
+#![cfg_attr(feature = "cargo-clippy", allow(explicit_into_iter_loop))] // explicit loops are clearer
+#![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))]      // explicit loops are clearer
+#![cfg_attr(feature = "cargo-clippy", allow(identity_op))]             // used for vertical alignment
+#![cfg_attr(feature = "cargo-clippy", allow(implicit_hasher))]         // large project
+#![cfg_attr(feature = "cargo-clippy", allow(large_enum_variant))]      // large project
+#![cfg_attr(feature = "cargo-clippy", allow(redundant_closure_call))]  // closures over try catch blocks
+#![cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))]      // large project
+#![cfg_attr(feature = "cargo-clippy", allow(type_complexity))]         // there's an exceptionally complex type
+#![cfg_attr(feature = "cargo-clippy", allow(wrong_self_convention))]   // perhaps Rc should be special cased in Clippy?
+
+extern crate atty;
+extern crate bytesize;
+extern crate clap;
+#[cfg(target_os = "macos")]
+extern crate core_foundation;
+extern crate crates_io as registry;
+extern crate crossbeam_utils;
+extern crate curl;
+extern crate curl_sys;
+#[macro_use]
+extern crate failure;
+extern crate filetime;
+extern crate flate2;
+extern crate fs2;
+#[cfg(windows)]
+extern crate fwdansi;
+extern crate git2;
+extern crate glob;
+extern crate hex;
+extern crate home;
+extern crate ignore;
+extern crate jobserver;
+#[macro_use]
+extern crate lazy_static;
+extern crate lazycell;
+extern crate libc;
+extern crate libgit2_sys;
+#[macro_use]
+extern crate log;
+extern crate num_cpus;
+extern crate opener;
+extern crate rustfix;
+extern crate same_file;
+extern crate semver;
+#[macro_use]
+extern crate serde;
+#[macro_use]
+extern crate serde_derive;
+extern crate serde_ignored;
+#[macro_use]
+extern crate serde_json;
+extern crate shell_escape;
+extern crate tar;
+extern crate tempfile;
+extern crate termcolor;
+extern crate toml;
+extern crate unicode_width;
+extern crate url;
+
+use std::fmt;
+
+use serde::ser;
+use failure::Error;
+
+use core::Shell;
+use core::shell::Verbosity::Verbose;
+
+pub use util::{CargoError, CargoResult, CliError, CliResult, Config};
+pub use util::errors::Internal;
+
+pub const CARGO_ENV: &str = "CARGO";
+
+#[macro_use]
+mod macros;
+
+pub mod core;
+pub mod ops;
+pub mod sources;
+pub mod util;
+
+pub struct CommitInfo {
+    pub short_commit_hash: String,
+    pub commit_hash: String,
+    pub commit_date: String,
+}
+
+pub struct CfgInfo {
+    // Information about the git repository we may have been built from.
+    pub commit_info: Option<CommitInfo>,
+    // The release channel we were built for.
+    pub release_channel: String,
+}
+
+pub struct VersionInfo {
+    pub major: u8,
+    pub minor: u8,
+    pub patch: u8,
+    pub pre_release: Option<String>,
+    // Information that's only available when we were built with
+    // configure/make, rather than cargo itself.
+    pub cfg_info: Option<CfgInfo>,
+}
+
+impl fmt::Display for VersionInfo {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "cargo {}.{}.{}", self.major, self.minor, self.patch)?;
+        if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) {
+            if channel != "stable" {
+                write!(f, "-{}", channel)?;
+                let empty = String::new();
+                write!(f, "{}", self.pre_release.as_ref().unwrap_or(&empty))?;
+            }
+        };
+
+        if let Some(ref cfg) = self.cfg_info {
+            if let Some(ref ci) = cfg.commit_info {
+                write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
+            }
+        };
+        Ok(())
+    }
+}
+
+pub fn print_json<T: ser::Serialize>(obj: &T) {
+    let encoded = serde_json::to_string(&obj).unwrap();
+    println!("{}", encoded);
+}
+
+pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! {
+    debug!("exit_with_error; err={:?}", err);
+    if let Some(ref err) = err.error {
+        if let Some(clap_err) = err.downcast_ref::<clap::Error>() {
+            clap_err.exit()
+        }
+    }
+
+    let CliError {
+        error,
+        exit_code,
+        unknown,
+    } = err;
+    // exit_code == 0 is non-fatal error, e.g. docopt version info
+    let fatal = exit_code != 0;
+
+    let hide = unknown && shell.verbosity() != Verbose;
+
+    if let Some(error) = error {
+        if hide {
+            drop(shell.error("An unknown error occurred"))
+        } else if fatal {
+            drop(shell.error(&error))
+        } else {
+            println!("{}", error);
+        }
+
+        if !handle_cause(&error, shell) || hide {
+            drop(writeln!(
+                shell.err(),
+                "\nTo learn more, run the command again \
+                 with --verbose."
+            ));
+        }
+    }
+
+    std::process::exit(exit_code)
+}
+
+pub fn handle_error(err: &CargoError, shell: &mut Shell) {
+    debug!("handle_error; err={:?}", err);
+
+    let _ignored_result = shell.error(err);
+    handle_cause(err, shell);
+}
+
+fn handle_cause(cargo_err: &Error, shell: &mut Shell) -> bool {
+    fn print(error: &str, shell: &mut Shell) {
+        drop(writeln!(shell.err(), "\nCaused by:"));
+        drop(writeln!(shell.err(), "  {}", error));
+    }
+
+    let verbose = shell.verbosity();
+
+    if verbose == Verbose {
+        // The first error has already been printed to the shell
+        // Print all remaining errors
+        for err in cargo_err.iter_causes() {
+            print(&err.to_string(), shell);
+        }
+    } else {
+        // The first error has already been printed to the shell
+        // Print remaining errors until one marked as Internal appears
+        for err in cargo_err.iter_causes() {
+            if err.downcast_ref::<Internal>().is_some() {
+                return false;
+            }
+
+            print(&err.to_string(), shell);
+        }
+    }
+
+    true
+}
+
+pub fn version() -> VersionInfo {
+    macro_rules! option_env_str {
+        ($name:expr) => { option_env!($name).map(|s| s.to_string()) }
+    }
+
+    // So this is pretty horrible...
+    // There are two versions at play here:
+    //   - version of cargo-the-binary, which you see when you type `cargo --version`
+    //   - version of cargo-the-library, which you download from crates.io for use
+    //     in your packages.
+    //
+    // We want to make the `binary` version the same as the corresponding Rust/rustc release.
+    // At the same time, we want to keep the library version at `0.x`, because Cargo as
+    // a library is (and probably will always be) unstable.
+    //
+    // Historically, Cargo used the same version number for both the binary and the library.
+    // Specifically, rustc 1.x.z was paired with cargo 0.x+1.w.
+    // We continue to use this scheme for the library, but transform it to 1.x.w for the purposes
+    // of `cargo --version`.
+    let major = 1;
+    let minor = env!("CARGO_PKG_VERSION_MINOR").parse::<u8>().unwrap() - 1;
+    let patch = env!("CARGO_PKG_VERSION_PATCH").parse::<u8>().unwrap();
+
+    match option_env!("CFG_RELEASE_CHANNEL") {
+        // We have environment variables set up from configure/make.
+        Some(_) => {
+            let commit_info = option_env!("CFG_COMMIT_HASH").map(|s| CommitInfo {
+                commit_hash: s.to_string(),
+                short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(),
+                commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(),
+            });
+            VersionInfo {
+                major,
+                minor,
+                patch,
+                pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"),
+                cfg_info: Some(CfgInfo {
+                    release_channel: option_env_str!("CFG_RELEASE_CHANNEL").unwrap(),
+                    commit_info,
+                }),
+            }
+        }
+        // We are being compiled by Cargo itself.
+        None => VersionInfo {
+            major,
+            minor,
+            patch,
+            pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"),
+            cfg_info: None,
+        },
+    }
+}
diff --git a/src/cargo/macros.rs b/src/cargo/macros.rs
new file mode 100644 (file)
index 0000000..17529ee
--- /dev/null
@@ -0,0 +1,49 @@
+use std::fmt;
+
+macro_rules! compact_debug {
+    (
+        impl fmt::Debug for $ty:ident {
+            fn fmt(&$this:ident, f: &mut fmt::Formatter) -> fmt::Result {
+                let (default, default_name) = $e:expr;
+                [debug_the_fields($($field:ident)*)]
+            }
+        }
+    ) => (
+
+        impl fmt::Debug for $ty {
+            fn fmt(&$this, f: &mut fmt::Formatter) -> fmt::Result {
+                // Try printing a pretty version where we collapse as many fields as
+                // possible, indicating that they're equivalent to a function call
+                // that's hopefully enough to indicate what each value is without
+                // actually dumping everything so verbosely.
+                let mut s = f.debug_struct(stringify!($ty));
+                let (default, default_name) = $e;
+                let mut any_default = false;
+
+                // Exhaustively match so when fields are added we get a compile
+                // failure
+                let $ty { $($field),* } = $this;
+                $(
+                    if *$field == default.$field {
+                        any_default = true;
+                    } else {
+                        s.field(stringify!($field), $field);
+                    }
+                )*
+
+                if any_default {
+                    s.field("..", &::macros::DisplayAsDebug(default_name));
+                }
+                s.finish()
+            }
+        }
+    )
+}
+
+pub struct DisplayAsDebug<T>(pub T);
+
+impl<T: fmt::Display> fmt::Debug for DisplayAsDebug<T> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        fmt::Display::fmt(&self.0, f)
+    }
+}
diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs
new file mode 100644 (file)
index 0000000..10308cd
--- /dev/null
@@ -0,0 +1,142 @@
+use std::collections::HashMap;
+use std::fs;
+use std::path::Path;
+
+use core::compiler::{BuildConfig, BuildContext, CompileMode, Context, Kind, Unit};
+use core::profiles::UnitFor;
+use core::Workspace;
+use ops;
+use util::errors::{CargoResult, CargoResultExt};
+use util::paths;
+use util::Config;
+
+pub struct CleanOptions<'a> {
+    pub config: &'a Config,
+    /// A list of packages to clean. If empty, everything is cleaned.
+    pub spec: Vec<String>,
+    /// The target arch triple to clean, or None for the host arch
+    pub target: Option<String>,
+    /// Whether to clean the release directory
+    pub release: bool,
+    /// Whether to just clean the doc directory
+    pub doc: bool,
+}
+
+/// Cleans the package's build artifacts.
+pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
+    let target_dir = ws.target_dir();
+    let config = ws.config();
+
+    // If the doc option is set, we just want to delete the doc directory.
+    if opts.doc {
+        let target_dir = target_dir.join("doc");
+        let target_dir = target_dir.into_path_unlocked();
+        return rm_rf(&target_dir, config);
+    }
+
+    // If we have a spec, then we need to delete some packages, otherwise, just
+    // remove the whole target directory and be done with it!
+    //
+    // Note that we don't bother grabbing a lock here as we're just going to
+    // blow it all away anyway.
+    if opts.spec.is_empty() {
+        let target_dir = target_dir.into_path_unlocked();
+        return rm_rf(&target_dir, config);
+    }
+
+    let (packages, resolve) = ops::resolve_ws(ws)?;
+
+    let profiles = ws.profiles();
+    let mut units = Vec::new();
+
+    for spec in opts.spec.iter() {
+        // Translate the spec to a Package
+        let pkgid = resolve.query(spec)?;
+        let pkg = packages.get_one(pkgid)?;
+
+        // Generate all relevant `Unit` targets for this package
+        for target in pkg.targets() {
+            for kind in [Kind::Host, Kind::Target].iter() {
+                for mode in CompileMode::all_modes() {
+                    for unit_for in UnitFor::all_values() {
+                        let profile = if mode.is_run_custom_build() {
+                            profiles.get_profile_run_custom_build(&profiles.get_profile(
+                                pkg.package_id(),
+                                ws.is_member(pkg),
+                                *unit_for,
+                                CompileMode::Build,
+                                opts.release,
+                            ))
+                        } else {
+                            profiles.get_profile(
+                                pkg.package_id(),
+                                ws.is_member(pkg),
+                                *unit_for,
+                                *mode,
+                                opts.release,
+                            )
+                        };
+                        units.push(Unit {
+                            pkg,
+                            target,
+                            profile,
+                            kind: *kind,
+                            mode: *mode,
+                        });
+                    }
+                }
+            }
+        }
+    }
+
+    let mut build_config = BuildConfig::new(config, Some(1), &opts.target, CompileMode::Build)?;
+    build_config.release = opts.release;
+    let bcx = BuildContext::new(
+        ws,
+        &resolve,
+        &packages,
+        opts.config,
+        &build_config,
+        profiles,
+        HashMap::new(),
+    )?;
+    let mut cx = Context::new(config, &bcx)?;
+    cx.prepare_units(None, &units)?;
+
+    for unit in units.iter() {
+        rm_rf(&cx.files().fingerprint_dir(unit), config)?;
+        if unit.target.is_custom_build() {
+            if unit.mode.is_run_custom_build() {
+                rm_rf(&cx.files().build_script_out_dir(unit), config)?;
+            } else {
+                rm_rf(&cx.files().build_script_dir(unit), config)?;
+            }
+            continue;
+        }
+
+        for output in cx.outputs(unit)?.iter() {
+            rm_rf(&output.path, config)?;
+            if let Some(ref dst) = output.hardlink {
+                rm_rf(dst, config)?;
+            }
+        }
+    }
+
+    Ok(())
+}
+
+fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> {
+    let m = fs::metadata(path);
+    if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
+        config
+            .shell()
+            .verbose(|shell| shell.status("Removing", path.display()))?;
+        paths::remove_dir_all(path).chain_err(|| format_err!("could not remove build directory"))?;
+    } else if m.is_ok() {
+        config
+            .shell()
+            .verbose(|shell| shell.status("Removing", path.display()))?;
+        paths::remove_file(path).chain_err(|| format_err!("failed to remove build artifact"))?;
+    }
+    Ok(())
+}
diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs
new file mode 100644 (file)
index 0000000..54f2c41
--- /dev/null
@@ -0,0 +1,863 @@
+//!
+//! Cargo compile currently does the following steps:
+//!
+//! All configurations are already injected as environment variables via the
+//! main cargo command
+//!
+//! 1. Read the manifest
+//! 2. Shell out to `cargo-resolve` with a list of dependencies and sources as
+//!    stdin
+//!
+//!    a. Shell out to `--do update` and `--do list` for each source
+//!    b. Resolve dependencies and return a list of name/version/source
+//!
+//! 3. Shell out to `--do download` for each source
+//! 4. Shell out to `--do get` for each source, and build up the list of paths
+//!    to pass to rustc -L
+//! 5. Call `cargo-rustc` with the results of the resolver zipped together with
+//!    the results of the `get`
+//!
+//!    a. Topologically sort the dependencies
+//!    b. Compile each dependency in order, passing in the -L's pointing at each
+//!       previously compiled dependency
+//!
+
+use std::collections::{HashMap, HashSet};
+use std::path::PathBuf;
+use std::sync::Arc;
+
+use core::compiler::{BuildConfig, BuildContext, Compilation, Context, DefaultExecutor, Executor};
+use core::compiler::{CompileMode, Kind, Unit};
+use core::profiles::{UnitFor, Profiles};
+use core::resolver::{Method, Resolve};
+use core::{Package, Source, Target};
+use core::{PackageId, PackageIdSpec, TargetKind, Workspace};
+use ops;
+use util::config::Config;
+use util::{lev_distance, profile, CargoResult};
+
+/// Contains information about how a package should be compiled.
+#[derive(Debug)]
+pub struct CompileOptions<'a> {
+    pub config: &'a Config,
+    /// Configuration information for a rustc build
+    pub build_config: BuildConfig,
+    /// Extra features to build for the root package
+    pub features: Vec<String>,
+    /// Flag whether all available features should be built for the root package
+    pub all_features: bool,
+    /// Flag if the default feature should be built for the root package
+    pub no_default_features: bool,
+    /// A set of packages to build.
+    pub spec: Packages,
+    /// Filter to apply to the root package to select which targets will be
+    /// built.
+    pub filter: CompileFilter,
+    /// Extra arguments to be passed to rustdoc (single target only)
+    pub target_rustdoc_args: Option<Vec<String>>,
+    /// The specified target will be compiled with all the available arguments,
+    /// note that this only accounts for the *final* invocation of rustc
+    pub target_rustc_args: Option<Vec<String>>,
+    /// Extra arguments passed to all selected targets for rustdoc.
+    pub local_rustdoc_args: Option<Vec<String>>,
+    /// The directory to copy final artifacts to. Note that even if `out_dir` is
+    /// set, a copy of artifacts still could be found a `target/(debug\release)`
+    /// as usual.
+    // Note that, although the cmd-line flag name is `out-dir`, in code we use
+    // `export_dir`, to avoid confusion with out dir at `target/debug/deps`.
+    pub export_dir: Option<PathBuf>,
+}
+
+impl<'a> CompileOptions<'a> {
+    pub fn new(config: &'a Config, mode: CompileMode) -> CargoResult<CompileOptions<'a>> {
+        Ok(CompileOptions {
+            config,
+            build_config: BuildConfig::new(config, None, &None, mode)?,
+            features: Vec::new(),
+            all_features: false,
+            no_default_features: false,
+            spec: ops::Packages::Packages(Vec::new()),
+            filter: CompileFilter::Default {
+                required_features_filterable: false,
+            },
+            target_rustdoc_args: None,
+            target_rustc_args: None,
+            local_rustdoc_args: None,
+            export_dir: None,
+        })
+    }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub enum Packages {
+    Default,
+    All,
+    OptOut(Vec<String>),
+    Packages(Vec<String>),
+}
+
+impl Packages {
+    pub fn from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) -> CargoResult<Self> {
+        Ok(match (all, exclude.len(), package.len()) {
+            (false, 0, 0) => Packages::Default,
+            (false, 0, _) => Packages::Packages(package),
+            (false, _, _) => bail!("--exclude can only be used together with --all"),
+            (true, 0, _) => Packages::All,
+            (true, _, _) => Packages::OptOut(exclude),
+        })
+    }
+
+    pub fn to_package_id_specs(&self, ws: &Workspace) -> CargoResult<Vec<PackageIdSpec>> {
+        let specs = match *self {
+            Packages::All => ws.members()
+                .map(Package::package_id)
+                .map(PackageIdSpec::from_package_id)
+                .collect(),
+            Packages::OptOut(ref opt_out) => ws.members()
+                .map(Package::package_id)
+                .map(PackageIdSpec::from_package_id)
+                .filter(|p| opt_out.iter().position(|x| *x == p.name()).is_none())
+                .collect(),
+            Packages::Packages(ref packages) if packages.is_empty() => {
+                vec![PackageIdSpec::from_package_id(ws.current()?.package_id())]
+            }
+            Packages::Packages(ref packages) => packages
+                .iter()
+                .map(|p| PackageIdSpec::parse(p))
+                .collect::<CargoResult<Vec<_>>>()?,
+            Packages::Default => ws.default_members()
+                .map(Package::package_id)
+                .map(PackageIdSpec::from_package_id)
+                .collect(),
+        };
+        if specs.is_empty() {
+            if ws.is_virtual() {
+                bail!(
+                    "manifest path `{}` contains no package: The manifest is virtual, \
+                     and the workspace has no members.",
+                    ws.root().display()
+                )
+            }
+            bail!("no packages to compile")
+        }
+        Ok(specs)
+    }
+
+    pub fn get_packages<'ws>(&self, ws: &'ws Workspace) -> CargoResult<Vec<&'ws Package>> {
+        let packages: Vec<_> = match self {
+            Packages::Default => ws.default_members().collect(),
+            Packages::All => ws.members().collect(),
+            Packages::OptOut(ref opt_out) => ws
+                .members()
+                .filter(|pkg| !opt_out.iter().any(|name| pkg.name().as_str() == name))
+                .collect(),
+            Packages::Packages(ref pkgs) => pkgs
+                .iter()
+                .map(|name| {
+                    ws.members()
+                        .find(|pkg| pkg.name().as_str() == name)
+                        .ok_or_else(|| {
+                            format_err!("package `{}` is not a member of the workspace", name)
+                        })
+                }).collect::<CargoResult<Vec<_>>>()?,
+        };
+        Ok(packages)
+    }
+}
+
+#[derive(Debug)]
+pub enum FilterRule {
+    All,
+    Just(Vec<String>),
+}
+
+#[derive(Debug)]
+pub enum CompileFilter {
+    Default {
+        /// Flag whether targets can be safely skipped when required-features are not satisfied.
+        required_features_filterable: bool,
+    },
+    Only {
+        all_targets: bool,
+        lib: bool,
+        bins: FilterRule,
+        examples: FilterRule,
+        tests: FilterRule,
+        benches: FilterRule,
+    },
+}
+
+pub fn compile<'a>(
+    ws: &Workspace<'a>,
+    options: &CompileOptions<'a>,
+) -> CargoResult<Compilation<'a>> {
+    let exec: Arc<Executor> = Arc::new(DefaultExecutor);
+    compile_with_exec(ws, options, &exec)
+}
+
+/// Like `compile` but allows specifying a custom `Executor` that will be able to intercept build
+/// calls and add custom logic. `compile` uses `DefaultExecutor` which just passes calls through.
+pub fn compile_with_exec<'a>(
+    ws: &Workspace<'a>,
+    options: &CompileOptions<'a>,
+    exec: &Arc<Executor>,
+) -> CargoResult<Compilation<'a>> {
+    ws.emit_warnings()?;
+    compile_ws(ws, None, options, exec)
+}
+
+pub fn compile_ws<'a>(
+    ws: &Workspace<'a>,
+    source: Option<Box<Source + 'a>>,
+    options: &CompileOptions<'a>,
+    exec: &Arc<Executor>,
+) -> CargoResult<Compilation<'a>> {
+    let CompileOptions {
+        config,
+        ref build_config,
+        ref spec,
+        ref features,
+        all_features,
+        no_default_features,
+        ref filter,
+        ref target_rustdoc_args,
+        ref target_rustc_args,
+        ref local_rustdoc_args,
+        ref export_dir,
+    } = *options;
+
+    let default_arch_kind = if build_config.requested_target.is_some() {
+        Kind::Target
+    } else {
+        Kind::Host
+    };
+
+    let specs = spec.to_package_id_specs(ws)?;
+    let features = Method::split_features(features);
+    let method = Method::Required {
+        dev_deps: ws.require_optional_deps() || filter.need_dev_deps(build_config.mode),
+        features: &features,
+        all_features,
+        uses_default_features: !no_default_features,
+    };
+    let resolve = ops::resolve_ws_with_method(ws, source, method, &specs)?;
+    let (packages, resolve_with_overrides) = resolve;
+
+    let to_build_ids = specs.iter()
+        .map(|s| s.query(resolve_with_overrides.iter()))
+        .collect::<CargoResult<Vec<_>>>()?;
+    let mut to_builds = packages.get_many(to_build_ids)?;
+
+    // The ordering here affects some error messages coming out of cargo, so
+    // let's be test and CLI friendly by always printing in the same order if
+    // there's an error.
+    to_builds.sort_by_key(|p| p.package_id());
+
+    for pkg in to_builds.iter() {
+        pkg.manifest().print_teapot(ws.config());
+
+        if build_config.mode.is_any_test()
+            && !ws.is_member(pkg)
+            && pkg.dependencies().iter().any(|dep| !dep.is_transitive())
+        {
+            bail!(
+                "package `{}` cannot be tested because it requires dev-dependencies \
+                 and is not a member of the workspace",
+                pkg.name()
+            );
+        }
+    }
+
+    let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
+        (&Some(ref args), _) => (Some(args.clone()), "rustc"),
+        (_, &Some(ref args)) => (Some(args.clone()), "rustdoc"),
+        _ => (None, ""),
+    };
+
+    if extra_args.is_some() && to_builds.len() != 1 {
+        panic!(
+            "`{}` should not accept multiple `-p` flags",
+            extra_args_name
+        );
+    }
+
+    let profiles = ws.profiles();
+    profiles.validate_packages(&mut config.shell(), &packages)?;
+
+    let units = generate_targets(
+        ws,
+        profiles,
+        &to_builds,
+        filter,
+        default_arch_kind,
+        &resolve_with_overrides,
+        build_config,
+    )?;
+
+    let mut extra_compiler_args = HashMap::new();
+    if let Some(args) = extra_args {
+        if units.len() != 1 {
+            bail!(
+                "extra arguments to `{}` can only be passed to one \
+                 target, consider filtering\nthe package by passing \
+                 e.g. `--lib` or `--bin NAME` to specify a single target",
+                extra_args_name
+            );
+        }
+        extra_compiler_args.insert(units[0], args);
+    }
+    if let Some(args) = local_rustdoc_args {
+        for unit in &units {
+            if unit.mode.is_doc() {
+                extra_compiler_args.insert(*unit, args.clone());
+            }
+        }
+    }
+
+    let ret = {
+        let _p = profile::start("compiling");
+        let bcx = BuildContext::new(
+            ws,
+            &resolve_with_overrides,
+            &packages,
+            config,
+            &build_config,
+            profiles,
+            extra_compiler_args,
+        )?;
+        let cx = Context::new(config, &bcx)?;
+        cx.compile(&units, export_dir.clone(), &exec)?
+    };
+
+    Ok(ret)
+}
+
+impl FilterRule {
+    pub fn new(targets: Vec<String>, all: bool) -> FilterRule {
+        if all {
+            FilterRule::All
+        } else {
+            FilterRule::Just(targets)
+        }
+    }
+
+    fn matches(&self, target: &Target) -> bool {
+        match *self {
+            FilterRule::All => true,
+            FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()),
+        }
+    }
+
+    fn is_specific(&self) -> bool {
+        match *self {
+            FilterRule::All => true,
+            FilterRule::Just(ref targets) => !targets.is_empty(),
+        }
+    }
+
+    pub fn try_collect(&self) -> Option<Vec<String>> {
+        match *self {
+            FilterRule::All => None,
+            FilterRule::Just(ref targets) => Some(targets.clone()),
+        }
+    }
+}
+
+impl CompileFilter {
+    pub fn new(
+        lib_only: bool,
+        bins: Vec<String>,
+        all_bins: bool,
+        tsts: Vec<String>,
+        all_tsts: bool,
+        exms: Vec<String>,
+        all_exms: bool,
+        bens: Vec<String>,
+        all_bens: bool,
+        all_targets: bool,
+    ) -> CompileFilter {
+        let rule_bins = FilterRule::new(bins, all_bins);
+        let rule_tsts = FilterRule::new(tsts, all_tsts);
+        let rule_exms = FilterRule::new(exms, all_exms);
+        let rule_bens = FilterRule::new(bens, all_bens);
+
+        if all_targets {
+            CompileFilter::Only {
+                all_targets: true,
+                lib: true,
+                bins: FilterRule::All,
+                examples: FilterRule::All,
+                benches: FilterRule::All,
+                tests: FilterRule::All,
+            }
+        } else if lib_only || rule_bins.is_specific() || rule_tsts.is_specific()
+            || rule_exms.is_specific() || rule_bens.is_specific()
+        {
+            CompileFilter::Only {
+                all_targets: false,
+                lib: lib_only,
+                bins: rule_bins,
+                examples: rule_exms,
+                benches: rule_bens,
+                tests: rule_tsts,
+            }
+        } else {
+            CompileFilter::Default {
+                required_features_filterable: true,
+            }
+        }
+    }
+
+    pub fn need_dev_deps(&self, mode: CompileMode) -> bool {
+        match mode {
+            CompileMode::Test | CompileMode::Doctest | CompileMode::Bench => true,
+            CompileMode::Build | CompileMode::Doc { .. } | CompileMode::Check { .. } => match *self
+            {
+                CompileFilter::Default { .. } => false,
+                CompileFilter::Only {
+                    ref examples,
+                    ref tests,
+                    ref benches,
+                    ..
+                } => examples.is_specific() || tests.is_specific() || benches.is_specific(),
+            },
+            CompileMode::RunCustomBuild => panic!("Invalid mode"),
+        }
+    }
+
+    // this selects targets for "cargo run". for logic to select targets for
+    // other subcommands, see generate_targets and filter_default_targets
+    pub fn target_run(&self, target: &Target) -> bool {
+        match *self {
+            CompileFilter::Default { .. } => true,
+            CompileFilter::Only {
+                lib,
+                ref bins,
+                ref examples,
+                ref tests,
+                ref benches,
+                ..
+            } => {
+                let rule = match *target.kind() {
+                    TargetKind::Bin => bins,
+                    TargetKind::Test => tests,
+                    TargetKind::Bench => benches,
+                    TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples,
+                    TargetKind::Lib(..) => return lib,
+                    TargetKind::CustomBuild => return false,
+                };
+                rule.matches(target)
+            }
+        }
+    }
+
+    pub fn is_specific(&self) -> bool {
+        match *self {
+            CompileFilter::Default { .. } => false,
+            CompileFilter::Only { .. } => true,
+        }
+    }
+}
+
+/// A proposed target.
+///
+/// Proposed targets are later filtered into actual Units based on whether or
+/// not the target requires its features to be present.
+#[derive(Debug)]
+struct Proposal<'a> {
+    pkg: &'a Package,
+    target: &'a Target,
+    /// Indicates whether or not all required features *must* be present. If
+    /// false, and the features are not available, then it will be silently
+    /// skipped. Generally, targets specified by name (`--bin foo`) are
+    /// required, all others can be silently skipped if features are missing.
+    requires_features: bool,
+    mode: CompileMode,
+}
+
+/// Generates all the base targets for the packages the user has requested to
+/// compile. Dependencies for these targets are computed later in
+/// `unit_dependencies`.
+fn generate_targets<'a>(
+    ws: &Workspace,
+    profiles: &Profiles,
+    packages: &[&'a Package],
+    filter: &CompileFilter,
+    default_arch_kind: Kind,
+    resolve: &Resolve,
+    build_config: &BuildConfig,
+) -> CargoResult<Vec<Unit<'a>>> {
+    // Helper for creating a Unit struct.
+    let new_unit = |pkg: &'a Package, target: &'a Target, target_mode: CompileMode| {
+        let unit_for = if build_config.mode.is_any_test() {
+            // NOTE: The UnitFor here is subtle.  If you have a profile
+            // with `panic` set, the `panic` flag is cleared for
+            // tests/benchmarks and their dependencies.  If this
+            // was `normal`, then the lib would get compiled three
+            // times (once with panic, once without, and once with
+            // --test).
+            //
+            // This would cause a problem for Doc tests, which would fail
+            // because `rustdoc` would attempt to link with both libraries
+            // at the same time. Also, it's probably not important (or
+            // even desirable?) for rustdoc to link with a lib with
+            // `panic` set.
+            //
+            // As a consequence, Examples and Binaries get compiled
+            // without `panic` set.  This probably isn't a bad deal.
+            //
+            // Forcing the lib to be compiled three times during `cargo
+            // test` is probably also not desirable.
+            UnitFor::new_test()
+        } else if target.for_host() {
+            // proc-macro/plugin should not have `panic` set.
+            UnitFor::new_compiler()
+        } else {
+            UnitFor::new_normal()
+        };
+        // Custom build units are added in `build_unit_dependencies`.
+        assert!(!target.is_custom_build());
+        let target_mode = match target_mode {
+            CompileMode::Test => {
+                if target.is_example() && !filter.is_specific() && !target.tested() {
+                    // Examples are included as regular binaries to verify
+                    // that they compile.
+                    CompileMode::Build
+                } else {
+                    CompileMode::Test
+                }
+            }
+            CompileMode::Build => match *target.kind() {
+                TargetKind::Test => CompileMode::Test,
+                TargetKind::Bench => CompileMode::Bench,
+                _ => CompileMode::Build,
+            },
+            _ => target_mode,
+        };
+        // Plugins or proc-macro should be built for the host.
+        let kind = if target.for_host() {
+            Kind::Host
+        } else {
+            default_arch_kind
+        };
+        let profile = profiles.get_profile(
+            pkg.package_id(),
+            ws.is_member(pkg),
+            unit_for,
+            target_mode,
+            build_config.release,
+        );
+        // Once the profile has been selected for benchmarks, we don't need to
+        // distinguish between benches and tests. Switching the mode allows
+        // de-duplication of units that are essentially identical.  For
+        // example, `cargo build --all-targets --release` creates the units
+        // (lib profile:bench, mode:test) and (lib profile:bench, mode:bench)
+        // and since these are the same, we want them to be de-duped in
+        // `unit_dependencies`.
+        let target_mode = match target_mode {
+            CompileMode::Bench => CompileMode::Test,
+            _ => target_mode,
+        };
+        Unit {
+            pkg,
+            target,
+            profile,
+            kind,
+            mode: target_mode,
+        }
+    };
+
+    // Create a list of proposed targets.
+    let mut proposals: Vec<Proposal> = Vec::new();
+
+    match *filter {
+        CompileFilter::Default {
+            required_features_filterable,
+        } => {
+            for pkg in packages {
+                let default = filter_default_targets(pkg.targets(), build_config.mode);
+                proposals.extend(default.into_iter().map(|target| Proposal {
+                    pkg,
+                    target,
+                    requires_features: !required_features_filterable,
+                    mode: build_config.mode,
+                }));
+                if build_config.mode == CompileMode::Test {
+                    if let Some(t) = pkg
+                        .targets()
+                        .iter()
+                        .find(|t| t.is_lib() && t.doctested() && t.doctestable())
+                    {
+                        proposals.push(Proposal {
+                            pkg,
+                            target: t,
+                            requires_features: false,
+                            mode: CompileMode::Doctest,
+                        });
+                    }
+                }
+            }
+        }
+        CompileFilter::Only {
+            all_targets,
+            lib,
+            ref bins,
+            ref examples,
+            ref tests,
+            ref benches,
+        } => {
+            if lib {
+                let mut libs = Vec::new();
+                for pkg in packages {
+                    for target in pkg.targets().iter().filter(|t| t.is_lib()) {
+                        if build_config.mode == CompileMode::Doctest && !target.doctestable() {
+                            ws.config()
+                                .shell()
+                                .warn(format!(
+                                "doc tests are not supported for crate type(s) `{}` in package `{}`",
+                                target.rustc_crate_types().join(", "),
+                                pkg.name()
+                            ))?;
+                        } else {
+                            libs.push(Proposal {
+                                pkg,
+                                target,
+                                requires_features: false,
+                                mode: build_config.mode,
+                            });
+                        }
+                    }
+                }
+                if !all_targets && libs.is_empty() {
+                    let names = packages.iter().map(|pkg| pkg.name()).collect::<Vec<_>>();
+                    if names.len() == 1 {
+                        bail!("no library targets found in package `{}`", names[0]);
+                    } else {
+                        bail!("no library targets found in packages: {}", names.join(", "));
+                    }
+                }
+                proposals.extend(libs);
+            }
+
+            // If --tests was specified, add all targets that would be
+            // generated by `cargo test`.
+            let test_filter = match *tests {
+                FilterRule::All => Target::tested,
+                FilterRule::Just(_) => Target::is_test,
+            };
+            let test_mode = match build_config.mode {
+                CompileMode::Build => CompileMode::Test,
+                CompileMode::Check { .. } => CompileMode::Check { test: true },
+                _ => build_config.mode,
+            };
+            // If --benches was specified, add all targets that would be
+            // generated by `cargo bench`.
+            let bench_filter = match *benches {
+                FilterRule::All => Target::benched,
+                FilterRule::Just(_) => Target::is_bench,
+            };
+            let bench_mode = match build_config.mode {
+                CompileMode::Build => CompileMode::Bench,
+                CompileMode::Check { .. } => CompileMode::Check { test: true },
+                _ => build_config.mode,
+            };
+
+            proposals.extend(list_rule_targets(
+                packages,
+                bins,
+                "bin",
+                Target::is_bin,
+                build_config.mode,
+            )?);
+            proposals.extend(list_rule_targets(
+                packages,
+                examples,
+                "example",
+                Target::is_example,
+                build_config.mode,
+            )?);
+            proposals.extend(list_rule_targets(
+                packages,
+                tests,
+                "test",
+                test_filter,
+                test_mode,
+            )?);
+            proposals.extend(list_rule_targets(
+                packages,
+                benches,
+                "bench",
+                bench_filter,
+                bench_mode,
+            )?);
+        }
+    }
+
+    // Only include targets that are libraries or have all required
+    // features available.
+    let mut features_map = HashMap::new();
+    let mut units = HashSet::new();
+    for Proposal { pkg, target, requires_features, mode} in proposals {
+        let unavailable_features = match target.required_features() {
+            Some(rf) => {
+                let features = features_map
+                    .entry(pkg)
+                    .or_insert_with(|| resolve_all_features(resolve, pkg.package_id()));
+                rf.iter().filter(|f| !features.contains(*f)).collect()
+            }
+            None => Vec::new(),
+        };
+        if target.is_lib() || unavailable_features.is_empty() {
+            let unit = new_unit(pkg, target, mode);
+            units.insert(unit);
+        } else if requires_features {
+            let required_features = target.required_features().unwrap();
+            let quoted_required_features: Vec<String> = required_features
+                .iter()
+                .map(|s| format!("`{}`", s))
+                .collect();
+            bail!(
+                "target `{}` in package `{}` requires the features: {}\n\
+                 Consider enabling them by passing e.g. `--features=\"{}\"`",
+                target.name(),
+                pkg.name(),
+                quoted_required_features.join(", "),
+                required_features.join(" ")
+            );
+        }
+        // else, silently skip target.
+    }
+    Ok(units.into_iter().collect())
+}
+
+fn resolve_all_features(
+    resolve_with_overrides: &Resolve,
+    package_id: &PackageId,
+) -> HashSet<String> {
+    let mut features = resolve_with_overrides.features(package_id).clone();
+
+    // Include features enabled for use by dependencies so targets can also use them with the
+    // required-features field when deciding whether to be built or skipped.
+    for (dep, _) in resolve_with_overrides.deps(package_id) {
+        for feature in resolve_with_overrides.features(dep) {
+            features.insert(dep.name().to_string() + "/" + feature);
+        }
+    }
+
+    features
+}
+
+/// Given a list of all targets for a package, filters out only the targets
+/// that are automatically included when the user doesn't specify any targets.
+fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> {
+    match mode {
+        CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(),
+        CompileMode::Test => targets
+            .iter()
+            .filter(|t| t.tested() || t.is_example())
+            .collect(),
+        CompileMode::Build | CompileMode::Check { .. } => targets
+            .iter()
+            .filter(|t| t.is_bin() || t.is_lib())
+            .collect(),
+        CompileMode::Doc { .. } => {
+            // `doc` does lib and bins (bin with same name as lib is skipped).
+            targets
+                .iter()
+                .filter(|t| {
+                    t.documented()
+                        && (!t.is_bin()
+                            || !targets.iter().any(|l| l.is_lib() && l.name() == t.name()))
+                })
+                .collect()
+        }
+        CompileMode::Doctest | CompileMode::RunCustomBuild => panic!("Invalid mode {:?}", mode),
+    }
+}
+
+/// Returns a list of targets based on command-line target selection flags.
+/// The return value is a list of `(Package, Target, bool, CompileMode)`
+/// tuples.  The `bool` value indicates whether or not all required features
+/// *must* be present.
+fn list_rule_targets<'a>(
+    packages: &[&'a Package],
+    rule: &FilterRule,
+    target_desc: &'static str,
+    is_expected_kind: fn(&Target) -> bool,
+    mode: CompileMode,
+) -> CargoResult<Vec<Proposal<'a>>> {
+    let mut result = Vec::new();
+    match *rule {
+        FilterRule::All => {
+            for pkg in packages {
+                for target in pkg.targets() {
+                    if is_expected_kind(target) {
+                        result.push(Proposal {
+                            pkg,
+                            target,
+                            requires_features: false,
+                            mode,
+                        });
+                    }
+                }
+            }
+        }
+        FilterRule::Just(ref names) => {
+            for name in names {
+                result.extend(find_named_targets(
+                    packages,
+                    name,
+                    target_desc,
+                    is_expected_kind,
+                    mode,
+                )?);
+            }
+        }
+    }
+    Ok(result)
+}
+
+/// Find the targets for a specifically named target.
+fn find_named_targets<'a>(
+    packages: &[&'a Package],
+    target_name: &str,
+    target_desc: &'static str,
+    is_expected_kind: fn(&Target) -> bool,
+    mode: CompileMode,
+) -> CargoResult<Vec<Proposal<'a>>> {
+    let mut result = Vec::new();
+    for pkg in packages {
+        for target in pkg.targets() {
+            if target.name() == target_name && is_expected_kind(target) {
+                result.push(Proposal {
+                    pkg,
+                    target,
+                    requires_features: true,
+                    mode,
+                });
+            }
+        }
+    }
+    if result.is_empty() {
+        let suggestion = packages
+            .iter()
+            .flat_map(|pkg| {
+                pkg.targets()
+                    .iter()
+                    .filter(|target| is_expected_kind(target))
+            }).map(|target| (lev_distance(target_name, target.name()), target))
+            .filter(|&(d, _)| d < 4)
+            .min_by_key(|t| t.0)
+            .map(|t| t.1);
+        match suggestion {
+            Some(s) => bail!(
+                "no {} target named `{}`\n\nDid you mean `{}`?",
+                target_desc,
+                target_name,
+                s.name()
+            ),
+            None => bail!("no {} target named `{}`", target_desc, target_name),
+        }
+    }
+    Ok(result)
+}
diff --git a/src/cargo/ops/cargo_doc.rs b/src/cargo/ops/cargo_doc.rs
new file mode 100644 (file)
index 0000000..3e84120
--- /dev/null
@@ -0,0 +1,109 @@
+use std::collections::HashMap;
+use std::fs;
+use std::path::Path;
+
+use failure::Fail;
+use opener;
+
+use core::Workspace;
+use ops;
+use util::CargoResult;
+
+/// Strongly typed options for the `cargo doc` command.
+#[derive(Debug)]
+pub struct DocOptions<'a> {
+    /// Whether to attempt to open the browser after compiling the docs
+    pub open_result: bool,
+    /// Options to pass through to the compiler
+    pub compile_opts: ops::CompileOptions<'a>,
+}
+
+/// Main method for `cargo doc`.
+pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
+    let specs = options.compile_opts.spec.to_package_id_specs(ws)?;
+    let resolve = ops::resolve_ws_precisely(
+        ws,
+        None,
+        &options.compile_opts.features,
+        options.compile_opts.all_features,
+        options.compile_opts.no_default_features,
+        &specs,
+    )?;
+    let (packages, resolve_with_overrides) = resolve;
+
+    let ids = specs.iter()
+        .map(|s| s.query(resolve_with_overrides.iter()))
+        .collect::<CargoResult<Vec<_>>>()?;
+    let pkgs = packages.get_many(ids)?;
+
+    let mut lib_names = HashMap::new();
+    let mut bin_names = HashMap::new();
+    for package in &pkgs {
+        for target in package.targets().iter().filter(|t| t.documented()) {
+            if target.is_lib() {
+                if let Some(prev) = lib_names.insert(target.crate_name(), package) {
+                    bail!(
+                        "The library `{}` is specified by packages `{}` and \
+                         `{}` but can only be documented once. Consider renaming \
+                         or marking one of the targets as `doc = false`.",
+                        target.crate_name(),
+                        prev,
+                        package
+                    );
+                }
+            } else if let Some(prev) = bin_names.insert(target.crate_name(), package) {
+                bail!(
+                    "The binary `{}` is specified by packages `{}` and \
+                     `{}` but can be documented only once. Consider renaming \
+                     or marking one of the targets as `doc = false`.",
+                    target.crate_name(),
+                    prev,
+                    package
+                );
+            }
+        }
+    }
+
+    ops::compile(ws, &options.compile_opts)?;
+
+    if options.open_result {
+        let name = if pkgs.len() > 1 {
+            bail!(
+                "Passing multiple packages and `open` is not supported.\n\
+                 Please re-run this command with `-p <spec>` where `<spec>` \
+                 is one of the following:\n  {}",
+                pkgs.iter()
+                    .map(|p| p.name().as_str())
+                    .collect::<Vec<_>>()
+                    .join("\n  ")
+            );
+        } else {
+            match lib_names.keys().chain(bin_names.keys()).nth(0) {
+                Some(s) => s.to_string(),
+                None => return Ok(()),
+            }
+        };
+
+        // Don't bother locking here as if this is getting deleted there's
+        // nothing we can do about it and otherwise if it's getting overwritten
+        // then that's also ok!
+        let mut target_dir = ws.target_dir();
+        if let Some(ref triple) = options.compile_opts.build_config.requested_target {
+            target_dir.push(Path::new(triple).file_stem().unwrap());
+        }
+        let path = target_dir.join("doc").join(&name).join("index.html");
+        let path = path.into_path_unlocked();
+        if fs::metadata(&path).is_ok() {
+            let mut shell = options.compile_opts.config.shell();
+            shell.status("Opening", path.display())?;
+            if let Err(e) = opener::open(&path) {
+                shell.warn(format!("Couldn't open docs: {}", e))?;
+                for cause in (&e as &Fail).iter_chain() {
+                    shell.warn(format!("Caused by:\n {}", cause))?;
+                }
+            }
+        }
+    }
+
+    Ok(())
+}
diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs
new file mode 100644 (file)
index 0000000..c9d2553
--- /dev/null
@@ -0,0 +1,65 @@
+use core::compiler::{BuildConfig, CompileMode, Kind, TargetInfo};
+use core::{PackageSet, Resolve, Workspace};
+use ops;
+use std::collections::HashSet;
+use util::CargoResult;
+use util::Config;
+
+pub struct FetchOptions<'a> {
+    pub config: &'a Config,
+    /// The target arch triple to fetch dependencies for
+    pub target: Option<String>,
+}
+
+/// Executes `cargo fetch`.
+pub fn fetch<'a>(
+    ws: &Workspace<'a>,
+    options: &FetchOptions<'a>,
+) -> CargoResult<(Resolve, PackageSet<'a>)> {
+    let (packages, resolve) = ops::resolve_ws(ws)?;
+
+    let jobs = Some(1);
+    let config = ws.config();
+    let build_config = BuildConfig::new(config, jobs, &options.target, CompileMode::Build)?;
+    let rustc = config.rustc(Some(ws))?;
+    let target_info =
+        TargetInfo::new(config, &build_config.requested_target, &rustc, Kind::Target)?;
+    {
+        let mut fetched_packages = HashSet::new();
+        let mut deps_to_fetch = ws.members().map(|p| p.package_id()).collect::<Vec<_>>();
+        let mut to_download = Vec::new();
+
+        while let Some(id) = deps_to_fetch.pop() {
+            if !fetched_packages.insert(id) {
+                continue;
+            }
+
+            to_download.push(id.clone());
+            let deps = resolve.deps(id)
+                .filter(|&(_id, deps)| {
+                    deps.iter()
+                        .any(|d| {
+                            // If no target was specified then all dependencies can
+                            // be fetched.
+                            let target = match options.target {
+                                Some(ref t) => t,
+                                None => return true,
+                            };
+                            // If this dependency is only available for certain
+                            // platforms, make sure we're only fetching it for that
+                            // platform.
+                            let platform = match d.platform() {
+                                Some(p) => p,
+                                None => return true,
+                            };
+                            platform.matches(target, target_info.cfg())
+                        })
+                })
+                .map(|(id, _deps)| id);
+            deps_to_fetch.extend(deps);
+        }
+        packages.get_many(&to_download)?;
+    }
+
+    Ok((resolve, packages))
+}
diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs
new file mode 100644 (file)
index 0000000..cce7390
--- /dev/null
@@ -0,0 +1,213 @@
+use std::collections::{BTreeMap, HashSet};
+
+use termcolor::Color::{self, Cyan, Green, Red};
+
+use core::registry::PackageRegistry;
+use core::resolver::Method;
+use core::PackageId;
+use core::{Resolve, SourceId, Workspace};
+use ops;
+use util::config::Config;
+use util::CargoResult;
+
+pub struct UpdateOptions<'a> {
+    pub config: &'a Config,
+    pub to_update: Vec<String>,
+    pub precise: Option<&'a str>,
+    pub aggressive: bool,
+}
+
+pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> {
+    let mut registry = PackageRegistry::new(ws.config())?;
+    let resolve = ops::resolve_with_previous(
+        &mut registry,
+        ws,
+        Method::Everything,
+        None,
+        None,
+        &[],
+        true,
+        true,
+    )?;
+    ops::write_pkg_lockfile(ws, &resolve)?;
+    Ok(())
+}
+
+pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()> {
+    if opts.aggressive && opts.precise.is_some() {
+        bail!("cannot specify both aggressive and precise simultaneously")
+    }
+
+    if ws.members().count() == 0 {
+        bail!("you can't generate a lockfile for an empty workspace.")
+    }
+
+    if opts.config.cli_unstable().offline {
+        bail!("you can't update in the offline mode");
+    }
+
+    let previous_resolve = match ops::load_pkg_lockfile(ws)? {
+        Some(resolve) => resolve,
+        None => return generate_lockfile(ws),
+    };
+    let mut registry = PackageRegistry::new(opts.config)?;
+    let mut to_avoid = HashSet::new();
+
+    if opts.to_update.is_empty() {
+        to_avoid.extend(previous_resolve.iter());
+    } else {
+        let mut sources = Vec::new();
+        for name in opts.to_update.iter() {
+            let dep = previous_resolve.query(name)?;
+            if opts.aggressive {
+                fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new());
+            } else {
+                to_avoid.insert(dep);
+                sources.push(match opts.precise {
+                    Some(precise) => {
+                        // TODO: see comment in `resolve.rs` as well, but this
+                        //       seems like a pretty hokey reason to single out
+                        //       the registry as well.
+                        let precise = if dep.source_id().is_registry() {
+                            format!("{}={}->{}", dep.name(), dep.version(), precise)
+                        } else {
+                            precise.to_string()
+                        };
+                        dep.source_id().clone().with_precise(Some(precise))
+                    }
+                    None => dep.source_id().clone().with_precise(None),
+                });
+            }
+        }
+        registry.add_sources(&sources)?;
+    }
+
+    let resolve = ops::resolve_with_previous(
+        &mut registry,
+        ws,
+        Method::Everything,
+        Some(&previous_resolve),
+        Some(&to_avoid),
+        &[],
+        true,
+        true,
+    )?;
+
+    // Summarize what is changing for the user.
+    let print_change = |status: &str, msg: String, color: Color| {
+        opts.config.shell().status_with_color(status, msg, color)
+    };
+    for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) {
+        if removed.len() == 1 && added.len() == 1 {
+            let msg = if removed[0].source_id().is_git() {
+                format!(
+                    "{} -> #{}",
+                    removed[0],
+                    &added[0].source_id().precise().unwrap()[..8]
+                )
+            } else {
+                format!("{} -> v{}", removed[0], added[0].version())
+            };
+            print_change("Updating", msg, Green)?;
+        } else {
+            for package in removed.iter() {
+                print_change("Removing", format!("{}", package), Red)?;
+            }
+            for package in added.iter() {
+                print_change("Adding", format!("{}", package), Cyan)?;
+            }
+        }
+    }
+
+    ops::write_pkg_lockfile(ws, &resolve)?;
+    return Ok(());
+
+    fn fill_with_deps<'a>(
+        resolve: &'a Resolve,
+        dep: &'a PackageId,
+        set: &mut HashSet<&'a PackageId>,
+        visited: &mut HashSet<&'a PackageId>,
+    ) {
+        if !visited.insert(dep) {
+            return;
+        }
+        set.insert(dep);
+        for dep in resolve.deps_not_replaced(dep) {
+            fill_with_deps(resolve, dep, set, visited);
+        }
+    }
+
+    fn compare_dependency_graphs<'a>(
+        previous_resolve: &'a Resolve,
+        resolve: &'a Resolve,
+    ) -> Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> {
+        fn key(dep: &PackageId) -> (&str, &SourceId) {
+            (dep.name().as_str(), dep.source_id())
+        }
+
+        // Removes all package ids in `b` from `a`. Note that this is somewhat
+        // more complicated because the equality for source ids does not take
+        // precise versions into account (e.g. git shas), but we want to take
+        // that into account here.
+        fn vec_subtract<'a>(a: &[&'a PackageId], b: &[&'a PackageId]) -> Vec<&'a PackageId> {
+            a.iter()
+                .filter(|a| {
+                    // If this package id is not found in `b`, then it's definitely
+                    // in the subtracted set
+                    let i = match b.binary_search(a) {
+                        Ok(i) => i,
+                        Err(..) => return true,
+                    };
+
+                    // If we've found `a` in `b`, then we iterate over all instances
+                    // (we know `b` is sorted) and see if they all have different
+                    // precise versions. If so, then `a` isn't actually in `b` so
+                    // we'll let it through.
+                    //
+                    // Note that we only check this for non-registry sources,
+                    // however, as registries contain enough version information in
+                    // the package id to disambiguate
+                    if a.source_id().is_registry() {
+                        return false;
+                    }
+                    b[i..]
+                        .iter()
+                        .take_while(|b| a == b)
+                        .all(|b| a.source_id().precise() != b.source_id().precise())
+                })
+                .cloned()
+                .collect()
+        }
+
+        // Map (package name, package source) to (removed versions, added versions).
+        let mut changes = BTreeMap::new();
+        let empty = (Vec::new(), Vec::new());
+        for dep in previous_resolve.iter() {
+            changes
+                .entry(key(dep))
+                .or_insert_with(|| empty.clone())
+                .0
+                .push(dep);
+        }
+        for dep in resolve.iter() {
+            changes
+                .entry(key(dep))
+                .or_insert_with(|| empty.clone())
+                .1
+                .push(dep);
+        }
+
+        for v in changes.values_mut() {
+            let (ref mut old, ref mut new) = *v;
+            old.sort();
+            new.sort();
+            let removed = vec_subtract(old, new);
+            let added = vec_subtract(new, old);
+            *old = removed;
+            *new = added;
+        }
+        debug!("{:#?}", changes);
+
+        changes.into_iter().map(|(_, v)| v).collect()
+    }
+}
diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs
new file mode 100644 (file)
index 0000000..c38019a
--- /dev/null
@@ -0,0 +1,873 @@
+use std::collections::btree_map::Entry;
+use std::collections::{BTreeMap, BTreeSet};
+use std::{env, fs};
+use std::io::prelude::*;
+use std::io::SeekFrom;
+use std::path::{Path, PathBuf};
+use std::sync::Arc;
+
+use semver::{Version, VersionReq};
+use tempfile::Builder as TempFileBuilder;
+use toml;
+
+use core::{Dependency, Edition, Package, PackageIdSpec, Source, SourceId};
+use core::{PackageId, Workspace};
+use core::source::SourceMap;
+use core::package::PackageSet;
+use core::compiler::{DefaultExecutor, Executor};
+use ops::{self, CompileFilter};
+use sources::{GitSource, PathSource, SourceConfigMap};
+use util::{internal, Config};
+use util::{FileLock, Filesystem};
+use util::errors::{CargoResult, CargoResultExt};
+use util::paths;
+
+#[derive(Deserialize, Serialize)]
+#[serde(untagged)]
+enum CrateListing {
+    V1(CrateListingV1),
+    Empty(Empty),
+}
+
+#[derive(Deserialize, Serialize)]
+#[serde(deny_unknown_fields)]
+struct Empty {}
+
+#[derive(Deserialize, Serialize)]
+struct CrateListingV1 {
+    v1: BTreeMap<PackageId, BTreeSet<String>>,
+}
+
+struct Transaction {
+    bins: Vec<PathBuf>,
+}
+
+impl Transaction {
+    fn success(mut self) {
+        self.bins.clear();
+    }
+}
+
+impl Drop for Transaction {
+    fn drop(&mut self) {
+        for bin in self.bins.iter() {
+            let _ = paths::remove_file(bin);
+        }
+    }
+}
+
+pub fn install(
+    root: Option<&str>,
+    krates: Vec<&str>,
+    source_id: &SourceId,
+    from_cwd: bool,
+    vers: Option<&str>,
+    opts: &ops::CompileOptions,
+    force: bool,
+) -> CargoResult<()> {
+    let root = resolve_root(root, opts.config)?;
+    let map = SourceConfigMap::new(opts.config)?;
+
+    let (installed_anything, scheduled_error) = if krates.len() <= 1 {
+        install_one(
+            &root,
+            &map,
+            krates.into_iter().next(),
+            source_id,
+            from_cwd,
+            vers,
+            opts,
+            force,
+            true,
+        )?;
+        (true, false)
+    } else {
+        let mut succeeded = vec![];
+        let mut failed = vec![];
+        let mut first = true;
+        for krate in krates {
+            let root = root.clone();
+            let map = map.clone();
+            match install_one(
+                &root,
+                &map,
+                Some(krate),
+                source_id,
+                from_cwd,
+                vers,
+                opts,
+                force,
+                first,
+            ) {
+                Ok(()) => succeeded.push(krate),
+                Err(e) => {
+                    ::handle_error(&e, &mut opts.config.shell());
+                    failed.push(krate)
+                }
+            }
+            first = false;
+        }
+
+        let mut summary = vec![];
+        if !succeeded.is_empty() {
+            summary.push(format!("Successfully installed {}!", succeeded.join(", ")));
+        }
+        if !failed.is_empty() {
+            summary.push(format!(
+                "Failed to install {} (see error(s) above).",
+                failed.join(", ")
+            ));
+        }
+        if !succeeded.is_empty() || !failed.is_empty() {
+            opts.config.shell().status("Summary", summary.join(" "))?;
+        }
+
+        (!succeeded.is_empty(), !failed.is_empty())
+    };
+
+    if installed_anything {
+        // Print a warning that if this directory isn't in PATH that they won't be
+        // able to run these commands.
+        let dst = metadata(opts.config, &root)?.parent().join("bin");
+        let path = env::var_os("PATH").unwrap_or_default();
+        for path in env::split_paths(&path) {
+            if path == dst {
+                return Ok(());
+            }
+        }
+
+        opts.config.shell().warn(&format!(
+            "be sure to add `{}` to your PATH to be \
+             able to run the installed binaries",
+            dst.display()
+        ))?;
+    }
+
+    if scheduled_error {
+        bail!("some crates failed to install");
+    }
+
+    Ok(())
+}
+
+fn install_one(
+    root: &Filesystem,
+    map: &SourceConfigMap,
+    krate: Option<&str>,
+    source_id: &SourceId,
+    from_cwd: bool,
+    vers: Option<&str>,
+    opts: &ops::CompileOptions,
+    force: bool,
+    is_first_install: bool,
+) -> CargoResult<()> {
+    let config = opts.config;
+
+    let (pkg, source) = if source_id.is_git() {
+        select_pkg(
+            GitSource::new(source_id, config)?,
+            krate,
+            vers,
+            config,
+            true,
+            &mut |git| git.read_packages(),
+        )?
+    } else if source_id.is_path() {
+        let mut src = path_source(source_id, config)?;
+        src.update().chain_err(|| {
+            format_err!(
+                "`{}` is not a crate root; specify a crate to \
+                 install from crates.io, or use --path or --git to \
+                 specify an alternate source",
+                src.path().display()
+            )
+        })?;
+        select_pkg(src, krate, vers, config, false, &mut |path| path.read_packages())?
+    } else {
+        select_pkg(
+            map.load(source_id)?,
+            krate,
+            vers,
+            config,
+            is_first_install,
+            &mut |_| {
+                bail!(
+                    "must specify a crate to install from \
+                     crates.io, or use --path or --git to \
+                     specify alternate source"
+                )
+            },
+        )?
+    };
+
+    let mut td_opt = None;
+    let mut needs_cleanup = false;
+    let overidden_target_dir = if source_id.is_path() {
+        None
+    } else if let Some(dir) = config.target_dir()? {
+        Some(dir)
+    } else if let Ok(td) = TempFileBuilder::new().prefix("cargo-install").tempdir() {
+        let p = td.path().to_owned();
+        td_opt = Some(td);
+        Some(Filesystem::new(p))
+    } else {
+        needs_cleanup = true;
+        Some(Filesystem::new(config.cwd().join("target-install")))
+    };
+
+    let ws = match overidden_target_dir {
+        Some(dir) => Workspace::ephemeral(pkg, config, Some(dir), false)?,
+        None => {
+            let mut ws = Workspace::new(pkg.manifest_path(), config)?;
+            ws.set_require_optional_deps(false);
+            ws
+        }
+    };
+    let pkg = ws.current()?;
+
+    if from_cwd {
+        match pkg.manifest().edition() {
+            Edition::Edition2015 => config.shell().warn(
+                "Using `cargo install` to install the binaries for the \
+                 package in current working directory is deprecated, \
+                 use `cargo install --path .` instead. \
+                 Use `cargo build` if you want to simply build the package.",
+            )?,
+            Edition::Edition2018 => bail!(
+                "Using `cargo install` to install the binaries for the \
+                 package in current working directory is no longer supported, \
+                 use `cargo install --path .` instead. \
+                 Use `cargo build` if you want to simply build the package."
+            ),
+        }
+    };
+
+    config.shell().status("Installing", pkg)?;
+
+    // Preflight checks to check up front whether we'll overwrite something.
+    // We have to check this again afterwards, but may as well avoid building
+    // anything if we're gonna throw it away anyway.
+    {
+        let metadata = metadata(config, root)?;
+        let list = read_crate_list(&metadata)?;
+        let dst = metadata.parent().join("bin");
+        check_overwrites(&dst, pkg, &opts.filter, &list, force)?;
+    }
+
+    let exec: Arc<Executor> = Arc::new(DefaultExecutor);
+    let compile =
+        ops::compile_ws(&ws, Some(source), opts, &exec).chain_err(|| {
+            if let Some(td) = td_opt.take() {
+                // preserve the temporary directory, so the user can inspect it
+                td.into_path();
+            }
+
+            format_err!(
+                "failed to compile `{}`, intermediate artifacts can be \
+                 found at `{}`",
+                pkg,
+                ws.target_dir().display()
+            )
+        })?;
+    let binaries: Vec<(&str, &Path)> = compile
+        .binaries
+        .iter()
+        .map(|bin| {
+            let name = bin.file_name().unwrap();
+            if let Some(s) = name.to_str() {
+                Ok((s, bin.as_ref()))
+            } else {
+                bail!("Binary `{:?}` name can't be serialized into string", name)
+            }
+        })
+        .collect::<CargoResult<_>>()?;
+    if binaries.is_empty() {
+        bail!(
+            "no binaries are available for install using the selected \
+             features"
+        );
+    }
+
+    let metadata = metadata(config, root)?;
+    let mut list = read_crate_list(&metadata)?;
+    let dst = metadata.parent().join("bin");
+    let duplicates = check_overwrites(&dst, pkg, &opts.filter, &list, force)?;
+
+    fs::create_dir_all(&dst)?;
+
+    // Copy all binaries to a temporary directory under `dst` first, catching
+    // some failure modes (e.g. out of space) before touching the existing
+    // binaries. This directory will get cleaned up via RAII.
+    let staging_dir = TempFileBuilder::new()
+        .prefix("cargo-install")
+        .tempdir_in(&dst)?;
+    for &(bin, src) in binaries.iter() {
+        let dst = staging_dir.path().join(bin);
+        // Try to move if `target_dir` is transient.
+        if !source_id.is_path() && fs::rename(src, &dst).is_ok() {
+            continue;
+        }
+        fs::copy(src, &dst).chain_err(|| {
+            format_err!("failed to copy `{}` to `{}`", src.display(), dst.display())
+        })?;
+    }
+
+    let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries
+        .iter()
+        .map(|&(bin, _)| bin)
+        .partition(|&bin| duplicates.contains_key(bin));
+
+    let mut installed = Transaction { bins: Vec::new() };
+
+    // Move the temporary copies into `dst` starting with new binaries.
+    for bin in to_install.iter() {
+        let src = staging_dir.path().join(bin);
+        let dst = dst.join(bin);
+        config.shell().status("Installing", dst.display())?;
+        fs::rename(&src, &dst).chain_err(|| {
+            format_err!("failed to move `{}` to `{}`", src.display(), dst.display())
+        })?;
+        installed.bins.push(dst);
+    }
+
+    // Repeat for binaries which replace existing ones but don't pop the error
+    // up until after updating metadata.
+    let mut replaced_names = Vec::new();
+    let result = {
+        let mut try_install = || -> CargoResult<()> {
+            for &bin in to_replace.iter() {
+                let src = staging_dir.path().join(bin);
+                let dst = dst.join(bin);
+                config.shell().status("Replacing", dst.display())?;
+                fs::rename(&src, &dst).chain_err(|| {
+                    format_err!("failed to move `{}` to `{}`", src.display(), dst.display())
+                })?;
+                replaced_names.push(bin);
+            }
+            Ok(())
+        };
+        try_install()
+    };
+
+    // Update records of replaced binaries.
+    for &bin in replaced_names.iter() {
+        if let Some(&Some(ref p)) = duplicates.get(bin) {
+            if let Some(set) = list.v1.get_mut(p) {
+                set.remove(bin);
+            }
+        }
+        // Failsafe to force replacing metadata for git packages
+        // https://github.com/rust-lang/cargo/issues/4582
+        if let Some(set) = list.v1.remove(&pkg.package_id().clone()) {
+            list.v1.insert(pkg.package_id().clone(), set);
+        }
+        list.v1
+            .entry(pkg.package_id().clone())
+            .or_insert_with(BTreeSet::new)
+            .insert(bin.to_string());
+    }
+
+    // Remove empty metadata lines.
+    let pkgs = list.v1
+        .iter()
+        .filter_map(|(p, set)| {
+            if set.is_empty() {
+                Some(p.clone())
+            } else {
+                None
+            }
+        })
+        .collect::<Vec<_>>();
+    for p in pkgs.iter() {
+        list.v1.remove(p);
+    }
+
+    // If installation was successful record newly installed binaries.
+    if result.is_ok() {
+        list.v1
+            .entry(pkg.package_id().clone())
+            .or_insert_with(BTreeSet::new)
+            .extend(to_install.iter().map(|s| s.to_string()));
+    }
+
+    let write_result = write_crate_list(&metadata, list);
+    match write_result {
+        // Replacement error (if any) isn't actually caused by write error
+        // but this seems to be the only way to show both.
+        Err(err) => result.chain_err(|| err)?,
+        Ok(_) => result?,
+    }
+
+    // Reaching here means all actions have succeeded. Clean up.
+    installed.success();
+    if needs_cleanup {
+        // Don't bother grabbing a lock as we're going to blow it all away
+        // anyway.
+        let target_dir = ws.target_dir().into_path_unlocked();
+        paths::remove_dir_all(&target_dir)?;
+    }
+
+    Ok(())
+}
+
+fn path_source<'a>(source_id: &SourceId, config: &'a Config) -> CargoResult<PathSource<'a>> {
+    let path = source_id
+        .url()
+        .to_file_path()
+        .map_err(|()| format_err!("path sources must have a valid path"))?;
+    Ok(PathSource::new(&path, source_id, config))
+}
+
+fn select_pkg<'a, T>(
+    mut source: T,
+    name: Option<&str>,
+    vers: Option<&str>,
+    config: &Config,
+    needs_update: bool,
+    list_all: &mut FnMut(&mut T) -> CargoResult<Vec<Package>>,
+) -> CargoResult<(Package, Box<Source + 'a>)>
+where
+    T: Source + 'a,
+{
+    if needs_update {
+        source.update()?;
+    }
+
+    match name {
+        Some(name) => {
+            let vers = match vers {
+                Some(v) => {
+                    // If the version begins with character <, >, =, ^, ~ parse it as a
+                    // version range, otherwise parse it as a specific version
+                    let first = v.chars()
+                        .nth(0)
+                        .ok_or_else(|| format_err!("no version provided for the `--vers` flag"))?;
+
+                    match first {
+                        '<' | '>' | '=' | '^' | '~' => match v.parse::<VersionReq>() {
+                            Ok(v) => Some(v.to_string()),
+                            Err(_) => bail!(
+                                "the `--vers` provided, `{}`, is \
+                                       not a valid semver version requirement\n\n
+                                       Please have a look at \
+                                       http://doc.crates.io/specifying-dependencies.html \
+                                       for the correct format",
+                                v
+                            ),
+                        },
+                        _ => match v.parse::<Version>() {
+                            Ok(v) => Some(format!("={}", v)),
+                            Err(_) => {
+                                let mut msg = format!(
+                                    "\
+                                     the `--vers` provided, `{}`, is \
+                                     not a valid semver version\n\n\
+                                     historically Cargo treated this \
+                                     as a semver version requirement \
+                                     accidentally\nand will continue \
+                                     to do so, but this behavior \
+                                     will be removed eventually",
+                                    v
+                                );
+
+                                // If it is not a valid version but it is a valid version
+                                // requirement, add a note to the warning
+                                if v.parse::<VersionReq>().is_ok() {
+                                    msg.push_str(&format!(
+                                        "\nif you want to specify semver range, \
+                                         add an explicit qualifier, like ^{}",
+                                        v
+                                    ));
+                                }
+                                config.shell().warn(&msg)?;
+                                Some(v.to_string())
+                            }
+                        },
+                    }
+                }
+                None => None,
+            };
+            let vers = vers.as_ref().map(|s| &**s);
+            let vers_spec = if vers.is_none() && source.source_id().is_registry() {
+                // Avoid pre-release versions from crate.io
+                // unless explicitly asked for
+                Some("*")
+            } else {
+                vers
+            };
+            let dep = Dependency::parse_no_deprecated(
+                name,
+                vers_spec,
+                source.source_id(),
+            )?;
+            let deps = source.query_vec(&dep)?;
+            let pkgid = match deps.iter().map(|p| p.package_id()).max() {
+                Some(pkgid) => pkgid,
+                None => {
+                    let vers_info = vers.map(|v| format!(" with version `{}`", v))
+                        .unwrap_or_default();
+                    bail!(
+                        "could not find `{}` in {}{}",
+                        name,
+                        source.source_id(),
+                        vers_info
+                    )
+                }
+            };
+
+            let pkg = {
+                let mut map = SourceMap::new();
+                map.insert(Box::new(&mut source));
+                PackageSet::new(&[pkgid.clone()], map, config)?
+                    .get_one(&pkgid)?
+                    .clone()
+            };
+            Ok((pkg, Box::new(source)))
+        }
+        None => {
+            let candidates = list_all(&mut source)?;
+            let binaries = candidates
+                .iter()
+                .filter(|cand| cand.targets().iter().filter(|t| t.is_bin()).count() > 0);
+            let examples = candidates
+                .iter()
+                .filter(|cand| cand.targets().iter().filter(|t| t.is_example()).count() > 0);
+            let pkg = match one(binaries, |v| multi_err("binaries", v))? {
+                Some(p) => p,
+                None => match one(examples, |v| multi_err("examples", v))? {
+                    Some(p) => p,
+                    None => bail!(
+                        "no packages found with binaries or \
+                         examples"
+                    ),
+                },
+            };
+            return Ok((pkg.clone(), Box::new(source)));
+
+            fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String {
+                pkgs.sort_unstable_by_key(|a| a.name());
+                format!(
+                    "multiple packages with {} found: {}",
+                    kind,
+                    pkgs.iter()
+                        .map(|p| p.name().as_str())
+                        .collect::<Vec<_>>()
+                        .join(", ")
+                )
+            }
+        }
+    }
+}
+
+fn one<I, F>(mut i: I, f: F) -> CargoResult<Option<I::Item>>
+where
+    I: Iterator,
+    F: FnOnce(Vec<I::Item>) -> String,
+{
+    match (i.next(), i.next()) {
+        (Some(i1), Some(i2)) => {
+            let mut v = vec![i1, i2];
+            v.extend(i);
+            Err(format_err!("{}", f(v)))
+        }
+        (Some(i), None) => Ok(Some(i)),
+        (None, _) => Ok(None),
+    }
+}
+
+fn check_overwrites(
+    dst: &Path,
+    pkg: &Package,
+    filter: &ops::CompileFilter,
+    prev: &CrateListingV1,
+    force: bool,
+) -> CargoResult<BTreeMap<String, Option<PackageId>>> {
+    // If explicit --bin or --example flags were passed then those'll
+    // get checked during cargo_compile, we only care about the "build
+    // everything" case here
+    if !filter.is_specific() && !pkg.targets().iter().any(|t| t.is_bin()) {
+        bail!("specified package has no binaries")
+    }
+    let duplicates = find_duplicates(dst, pkg, filter, prev);
+    if force || duplicates.is_empty() {
+        return Ok(duplicates);
+    }
+    // Format the error message.
+    let mut msg = String::new();
+    for (bin, p) in duplicates.iter() {
+        msg.push_str(&format!("binary `{}` already exists in destination", bin));
+        if let Some(p) = p.as_ref() {
+            msg.push_str(&format!(" as part of `{}`\n", p));
+        } else {
+            msg.push_str("\n");
+        }
+    }
+    msg.push_str("Add --force to overwrite");
+    Err(format_err!("{}", msg))
+}
+
+fn find_duplicates(
+    dst: &Path,
+    pkg: &Package,
+    filter: &ops::CompileFilter,
+    prev: &CrateListingV1,
+) -> BTreeMap<String, Option<PackageId>> {
+    let check = |name: String| {
+        // Need to provide type, works around Rust Issue #93349
+        let name = format!("{}{}", name, env::consts::EXE_SUFFIX);
+        if fs::metadata(dst.join(&name)).is_err() {
+            None
+        } else if let Some((p, _)) = prev.v1.iter().find(|&(_, v)| v.contains(&name)) {
+            Some((name, Some(p.clone())))
+        } else {
+            Some((name, None))
+        }
+    };
+    match *filter {
+        CompileFilter::Default { .. } => pkg.targets()
+            .iter()
+            .filter(|t| t.is_bin())
+            .filter_map(|t| check(t.name().to_string()))
+            .collect(),
+        CompileFilter::Only {
+            ref bins,
+            ref examples,
+            ..
+        } => {
+            let all_bins: Vec<String> = bins.try_collect().unwrap_or_else(|| {
+                pkg.targets()
+                    .iter()
+                    .filter(|t| t.is_bin())
+                    .map(|t| t.name().to_string())
+                    .collect()
+            });
+            let all_examples: Vec<String> = examples.try_collect().unwrap_or_else(|| {
+                pkg.targets()
+                    .iter()
+                    .filter(|t| t.is_bin_example())
+                    .map(|t| t.name().to_string())
+                    .collect()
+            });
+
+            all_bins
+                .iter()
+                .chain(all_examples.iter())
+                .filter_map(|t| check(t.clone()))
+                .collect::<BTreeMap<String, Option<PackageId>>>()
+        }
+    }
+}
+
+fn read_crate_list(file: &FileLock) -> CargoResult<CrateListingV1> {
+    let listing = (|| -> CargoResult<_> {
+        let mut contents = String::new();
+        file.file().read_to_string(&mut contents)?;
+        let listing =
+            toml::from_str(&contents).chain_err(|| internal("invalid TOML found for metadata"))?;
+        match listing {
+            CrateListing::V1(v1) => Ok(v1),
+            CrateListing::Empty(_) => Ok(CrateListingV1 {
+                v1: BTreeMap::new(),
+            }),
+        }
+    })()
+        .chain_err(|| {
+        format_err!(
+            "failed to parse crate metadata at `{}`",
+            file.path().to_string_lossy()
+        )
+    })?;
+    Ok(listing)
+}
+
+fn write_crate_list(file: &FileLock, listing: CrateListingV1) -> CargoResult<()> {
+    (|| -> CargoResult<_> {
+        let mut file = file.file();
+        file.seek(SeekFrom::Start(0))?;
+        file.set_len(0)?;
+        let data = toml::to_string(&CrateListing::V1(listing))?;
+        file.write_all(data.as_bytes())?;
+        Ok(())
+    })()
+        .chain_err(|| {
+        format_err!(
+            "failed to write crate metadata at `{}`",
+            file.path().to_string_lossy()
+        )
+    })?;
+    Ok(())
+}
+
+pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> {
+    let dst = resolve_root(dst, config)?;
+    let dst = metadata(config, &dst)?;
+    let list = read_crate_list(&dst)?;
+    for (k, v) in list.v1.iter() {
+        println!("{}:", k);
+        for bin in v {
+            println!("    {}", bin);
+        }
+    }
+    Ok(())
+}
+
+pub fn uninstall(
+    root: Option<&str>,
+    specs: Vec<&str>,
+    bins: &[String],
+    config: &Config,
+) -> CargoResult<()> {
+    if specs.len() > 1 && !bins.is_empty() {
+        bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.");
+    }
+
+    let root = resolve_root(root, config)?;
+    let scheduled_error = if specs.len() == 1 {
+        uninstall_one(&root, specs[0], bins, config)?;
+        false
+    } else if specs.len() == 0 {
+        uninstall_cwd(&root, bins, config)?;
+        false
+    } else {
+        let mut succeeded = vec![];
+        let mut failed = vec![];
+        for spec in specs {
+            let root = root.clone();
+            match uninstall_one(&root, spec, bins, config) {
+                Ok(()) => succeeded.push(spec),
+                Err(e) => {
+                    ::handle_error(&e, &mut config.shell());
+                    failed.push(spec)
+                }
+            }
+        }
+
+        let mut summary = vec![];
+        if !succeeded.is_empty() {
+            summary.push(format!(
+                "Successfully uninstalled {}!",
+                succeeded.join(", ")
+            ));
+        }
+        if !failed.is_empty() {
+            summary.push(format!(
+                "Failed to uninstall {} (see error(s) above).",
+                failed.join(", ")
+            ));
+        }
+
+        if !succeeded.is_empty() || !failed.is_empty() {
+            config.shell().status("Summary", summary.join(" "))?;
+        }
+
+        !failed.is_empty()
+    };
+
+    if scheduled_error {
+        bail!("some packages failed to uninstall");
+    }
+
+    Ok(())
+}
+
+pub fn uninstall_one(
+    root: &Filesystem,
+    spec: &str,
+    bins: &[String],
+    config: &Config,
+) -> CargoResult<()> {
+    let crate_metadata = metadata(config, root)?;
+    let metadata = read_crate_list(&crate_metadata)?;
+    let pkgid = PackageIdSpec::query_str(spec, metadata.v1.keys())?.clone();
+    uninstall_pkgid(crate_metadata, metadata, &pkgid, bins, config)
+}
+
+fn uninstall_cwd(
+    root: &Filesystem,
+    bins: &[String],
+    config: &Config,
+) -> CargoResult<()> {
+    let crate_metadata = metadata(config, root)?;
+    let metadata = read_crate_list(&crate_metadata)?;
+    let source_id = SourceId::for_path(config.cwd())?;
+    let src = path_source(&source_id, config)?;
+    let (pkg, _source) =
+        select_pkg(src, None, None, config, true, &mut |path| path.read_packages())?;
+    let pkgid = pkg.package_id();
+    uninstall_pkgid(crate_metadata, metadata, pkgid, bins, config)
+}
+
+fn uninstall_pkgid(
+    crate_metadata: FileLock,
+    mut metadata: CrateListingV1,
+    pkgid: &PackageId,
+    bins: &[String],
+    config: &Config,
+) -> CargoResult<()> {
+    let mut to_remove = Vec::new();
+    {
+        let mut installed = match metadata.v1.entry(pkgid.clone()) {
+            Entry::Occupied(e) => e,
+            Entry::Vacant(..) => bail!("package `{}` is not installed", pkgid),
+        };
+        let dst = crate_metadata.parent().join("bin");
+        for bin in installed.get() {
+            let bin = dst.join(bin);
+            if fs::metadata(&bin).is_err() {
+                bail!(
+                    "corrupt metadata, `{}` does not exist when it should",
+                    bin.display()
+                )
+            }
+        }
+
+        let bins = bins.iter()
+            .map(|s| {
+                if s.ends_with(env::consts::EXE_SUFFIX) {
+                    s.to_string()
+                } else {
+                    format!("{}{}", s, env::consts::EXE_SUFFIX)
+                }
+            })
+            .collect::<Vec<_>>();
+
+        for bin in bins.iter() {
+            if !installed.get().contains(bin) {
+                bail!("binary `{}` not installed as part of `{}`", bin, pkgid)
+            }
+        }
+
+        if bins.is_empty() {
+            to_remove.extend(installed.get().iter().map(|b| dst.join(b)));
+            installed.get_mut().clear();
+        } else {
+            for bin in bins.iter() {
+                to_remove.push(dst.join(bin));
+                installed.get_mut().remove(bin);
+            }
+        }
+        if installed.get().is_empty() {
+            installed.remove();
+        }
+    }
+    write_crate_list(&crate_metadata, metadata)?;
+    for bin in to_remove {
+        config.shell().status("Removing", bin.display())?;
+        paths::remove_file(bin)?;
+    }
+
+    Ok(())
+}
+
+fn metadata(config: &Config, root: &Filesystem) -> CargoResult<FileLock> {
+    root.open_rw(Path::new(".crates.toml"), config, "crate metadata")
+}
+
+fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult<Filesystem> {
+    let config_root = config.get_path("install.root")?;
+    Ok(flag.map(PathBuf::from)
+        .or_else(|| env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from))
+        .or_else(move || config_root.map(|v| v.val))
+        .map(Filesystem::new)
+        .unwrap_or_else(|| config.home().clone()))
+}
diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs
new file mode 100644 (file)
index 0000000..bd2bc40
--- /dev/null
@@ -0,0 +1,693 @@
+use std::collections::BTreeMap;
+use std::env;
+use std::fs;
+use std::fmt;
+use std::path::{Path, PathBuf};
+
+use git2::Config as GitConfig;
+use git2::Repository as GitRepository;
+
+use core::{compiler, Workspace};
+use util::{internal, FossilRepo, GitRepo, HgRepo, PijulRepo, existing_vcs_repo};
+use util::{paths, Config};
+use util::errors::{CargoResult, CargoResultExt};
+
+use toml;
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum VersionControl {
+    Git,
+    Hg,
+    Pijul,
+    Fossil,
+    NoVcs,
+}
+
+#[derive(Debug)]
+pub struct NewOptions {
+    pub version_control: Option<VersionControl>,
+    pub kind: NewProjectKind,
+    /// Absolute path to the directory for the new package
+    pub path: PathBuf,
+    pub name: Option<String>,
+    pub edition: Option<String>,
+    pub registry: Option<String>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum NewProjectKind {
+    Bin,
+    Lib,
+}
+
+impl NewProjectKind {
+    fn is_bin(self) -> bool {
+        self == NewProjectKind::Bin
+    }
+}
+
+impl fmt::Display for NewProjectKind {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match *self {
+            NewProjectKind::Bin => "binary (application)",
+            NewProjectKind::Lib => "library",
+        }.fmt(f)
+    }
+}
+
+struct SourceFileInformation {
+    relative_path: String,
+    target_name: String,
+    bin: bool,
+}
+
+struct MkOptions<'a> {
+    version_control: Option<VersionControl>,
+    path: &'a Path,
+    name: &'a str,
+    source_files: Vec<SourceFileInformation>,
+    bin: bool,
+    edition: Option<&'a str>,
+    registry: Option<&'a str>,
+}
+
+impl NewOptions {
+    pub fn new(
+        version_control: Option<VersionControl>,
+        bin: bool,
+        lib: bool,
+        path: PathBuf,
+        name: Option<String>,
+        edition: Option<String>,
+        registry: Option<String>,
+    ) -> CargoResult<NewOptions> {
+        let kind = match (bin, lib) {
+            (true, true) => bail!("can't specify both lib and binary outputs"),
+            (false, true) => NewProjectKind::Lib,
+            // default to bin
+            (_, false) => NewProjectKind::Bin,
+        };
+
+        let opts = NewOptions {
+            version_control,
+            kind,
+            path,
+            name,
+            edition,
+            registry,
+        };
+        Ok(opts)
+    }
+}
+
+struct CargoNewConfig {
+    name: Option<String>,
+    email: Option<String>,
+    version_control: Option<VersionControl>,
+}
+
+fn get_name<'a>(path: &'a Path, opts: &'a NewOptions) -> CargoResult<&'a str> {
+    if let Some(ref name) = opts.name {
+        return Ok(name);
+    }
+
+    let file_name = path.file_name().ok_or_else(|| {
+        format_err!(
+            "cannot auto-detect package name from path {:?} ; use --name to override",
+            path.as_os_str()
+        )
+    })?;
+
+    file_name.to_str().ok_or_else(|| {
+        format_err!(
+            "cannot create package with a non-unicode name: {:?}",
+            file_name
+        )
+    })
+}
+
+fn check_name(name: &str, opts: &NewOptions) -> CargoResult<()> {
+    // If --name is already used to override, no point in suggesting it
+    // again as a fix.
+    let name_help = match opts.name {
+        Some(_) => "",
+        None => "\nuse --name to override crate name",
+    };
+
+    // Ban keywords + test list found at
+    // https://doc.rust-lang.org/grammar.html#keywords
+    let blacklist = [
+        "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do",
+        "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop",
+        "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub",
+        "pure", "ref", "return", "self", "sizeof", "static", "struct", "super", "test", "trait",
+        "true", "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
+    ];
+    if blacklist.contains(&name) || (opts.kind.is_bin() && compiler::is_bad_artifact_name(name)) {
+        bail!(
+            "The name `{}` cannot be used as a crate name{}",
+            name,
+            name_help
+        )
+    }
+
+    if let Some(ref c) = name.chars().nth(0) {
+        if c.is_digit(10) {
+            bail!(
+                "Package names starting with a digit cannot be used as a crate name{}",
+                name_help
+            )
+        }
+    }
+
+    for c in name.chars() {
+        if c.is_alphanumeric() {
+            continue;
+        }
+        if c == '_' || c == '-' {
+            continue;
+        }
+        bail!(
+            "Invalid character `{}` in crate name: `{}`{}",
+            c,
+            name,
+            name_help
+        )
+    }
+    Ok(())
+}
+
+fn detect_source_paths_and_types(
+    package_path: &Path,
+    package_name: &str,
+    detected_files: &mut Vec<SourceFileInformation>,
+) -> CargoResult<()> {
+    let path = package_path;
+    let name = package_name;
+
+    enum H {
+        Bin,
+        Lib,
+        Detect,
+    }
+
+    struct Test {
+        proposed_path: String,
+        handling: H,
+    }
+
+    let tests = vec![
+        Test {
+            proposed_path: "src/main.rs".to_string(),
+            handling: H::Bin,
+        },
+        Test {
+            proposed_path: "main.rs".to_string(),
+            handling: H::Bin,
+        },
+        Test {
+            proposed_path: format!("src/{}.rs", name),
+            handling: H::Detect,
+        },
+        Test {
+            proposed_path: format!("{}.rs", name),
+            handling: H::Detect,
+        },
+        Test {
+            proposed_path: "src/lib.rs".to_string(),
+            handling: H::Lib,
+        },
+        Test {
+            proposed_path: "lib.rs".to_string(),
+            handling: H::Lib,
+        },
+    ];
+
+    for i in tests {
+        let pp = i.proposed_path;
+
+        // path/pp does not exist or is not a file
+        if !fs::metadata(&path.join(&pp))
+            .map(|x| x.is_file())
+            .unwrap_or(false)
+        {
+            continue;
+        }
+
+        let sfi = match i.handling {
+            H::Bin => SourceFileInformation {
+                relative_path: pp,
+                target_name: package_name.to_string(),
+                bin: true,
+            },
+            H::Lib => SourceFileInformation {
+                relative_path: pp,
+                target_name: package_name.to_string(),
+                bin: false,
+            },
+            H::Detect => {
+                let content = paths::read(&path.join(pp.clone()))?;
+                let isbin = content.contains("fn main");
+                SourceFileInformation {
+                    relative_path: pp,
+                    target_name: package_name.to_string(),
+                    bin: isbin,
+                }
+            }
+        };
+        detected_files.push(sfi);
+    }
+
+    // Check for duplicate lib attempt
+
+    let mut previous_lib_relpath: Option<&str> = None;
+    let mut duplicates_checker: BTreeMap<&str, &SourceFileInformation> = BTreeMap::new();
+
+    for i in detected_files {
+        if i.bin {
+            if let Some(x) = BTreeMap::get::<str>(&duplicates_checker, i.target_name.as_ref()) {
+                bail!(
+                    "\
+multiple possible binary sources found:
+  {}
+  {}
+cannot automatically generate Cargo.toml as the main target would be ambiguous",
+                    &x.relative_path,
+                    &i.relative_path
+                );
+            }
+            duplicates_checker.insert(i.target_name.as_ref(), i);
+        } else {
+            if let Some(plp) = previous_lib_relpath {
+                bail!(
+                    "cannot have a package with \
+                     multiple libraries, \
+                     found both `{}` and `{}`",
+                    plp,
+                    i.relative_path
+                )
+            }
+            previous_lib_relpath = Some(&i.relative_path);
+        }
+    }
+
+    Ok(())
+}
+
+fn plan_new_source_file(bin: bool, package_name: String) -> SourceFileInformation {
+    if bin {
+        SourceFileInformation {
+            relative_path: "src/main.rs".to_string(),
+            target_name: package_name,
+            bin: true,
+        }
+    } else {
+        SourceFileInformation {
+            relative_path: "src/lib.rs".to_string(),
+            target_name: package_name,
+            bin: false,
+        }
+    }
+}
+
+pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> {
+    let path = &opts.path;
+    if fs::metadata(path).is_ok() {
+        bail!(
+            "destination `{}` already exists\n\n\
+             Use `cargo init` to initialize the directory",
+            path.display()
+        )
+    }
+
+    let name = get_name(path, opts)?;
+    check_name(name, opts)?;
+
+    let mkopts = MkOptions {
+        version_control: opts.version_control,
+        path,
+        name,
+        source_files: vec![plan_new_source_file(opts.kind.is_bin(), name.to_string())],
+        bin: opts.kind.is_bin(),
+        edition: opts.edition.as_ref().map(|s| &**s),
+        registry: opts.registry.as_ref().map(|s| &**s),
+    };
+
+    mk(config, &mkopts).chain_err(|| {
+        format_err!(
+            "Failed to create package `{}` at `{}`",
+            name,
+            path.display()
+        )
+    })?;
+    Ok(())
+}
+
+pub fn init(opts: &NewOptions, config: &Config) -> CargoResult<()> {
+    let path = &opts.path;
+
+    if fs::metadata(&path.join("Cargo.toml")).is_ok() {
+        bail!("`cargo init` cannot be run on existing Cargo packages")
+    }
+
+    let name = get_name(path, opts)?;
+    check_name(name, opts)?;
+
+    let mut src_paths_types = vec![];
+
+    detect_source_paths_and_types(path, name, &mut src_paths_types)?;
+
+    if src_paths_types.is_empty() {
+        src_paths_types.push(plan_new_source_file(opts.kind.is_bin(), name.to_string()));
+    } else {
+        // --bin option may be ignored if lib.rs or src/lib.rs present
+        // Maybe when doing `cargo init --bin` inside a library package stub,
+        // user may mean "initialize for library, but also add binary target"
+    }
+
+    let mut version_control = opts.version_control;
+
+    if version_control == None {
+        let mut num_detected_vsces = 0;
+
+        if fs::metadata(&path.join(".git")).is_ok() {
+            version_control = Some(VersionControl::Git);
+            num_detected_vsces += 1;
+        }
+
+        if fs::metadata(&path.join(".hg")).is_ok() {
+            version_control = Some(VersionControl::Hg);
+            num_detected_vsces += 1;
+        }
+
+        if fs::metadata(&path.join(".pijul")).is_ok() {
+            version_control = Some(VersionControl::Pijul);
+            num_detected_vsces += 1;
+        }
+
+        if fs::metadata(&path.join(".fossil")).is_ok() {
+            version_control = Some(VersionControl::Fossil);
+            num_detected_vsces += 1;
+        }
+
+        // if none exists, maybe create git, like in `cargo new`
+
+        if num_detected_vsces > 1 {
+            bail!(
+                "more than one of .hg, .git, .pijul, .fossil configurations \
+                 found and the ignore file can't be filled in as \
+                 a result. specify --vcs to override detection"
+            );
+        }
+    }
+
+    let mkopts = MkOptions {
+        version_control,
+        path,
+        name,
+        bin: src_paths_types.iter().any(|x| x.bin),
+        source_files: src_paths_types,
+        edition: opts.edition.as_ref().map(|s| &**s),
+        registry: opts.registry.as_ref().map(|s| &**s),
+    };
+
+    mk(config, &mkopts).chain_err(|| {
+        format_err!(
+            "Failed to create package `{}` at `{}`",
+            name,
+            path.display()
+        )
+    })?;
+    Ok(())
+}
+
+fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> {
+    let path = opts.path;
+    let name = opts.name;
+    let cfg = global_config(config)?;
+    // Please ensure that ignore and hgignore are in sync.
+    let ignore = [
+        "/target\n",
+        "**/*.rs.bk\n",
+        if !opts.bin { "Cargo.lock\n" } else { "" },
+    ].concat();
+    // Mercurial glob ignores can't be rooted, so just sticking a 'syntax: glob' at the top of the
+    // file will exclude too much. Instead, use regexp-based ignores. See 'hg help ignore' for
+    // more.
+    let hgignore = [
+        "^target/\n",
+        "glob:*.rs.bk\n",
+        if !opts.bin { "glob:Cargo.lock\n" } else { "" },
+    ].concat();
+
+    let vcs = opts.version_control.unwrap_or_else(|| {
+        let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd());
+        match (cfg.version_control, in_existing_vcs) {
+            (None, false) => VersionControl::Git,
+            (Some(opt), false) => opt,
+            (_, true) => VersionControl::NoVcs,
+        }
+    });
+
+    match vcs {
+        VersionControl::Git => {
+            if !path.join(".git").exists() {
+                GitRepo::init(path, config.cwd())?;
+            }
+            let ignore = if path.join(".gitignore").exists() {
+                format!("\n{}", ignore)
+            } else {
+                ignore
+            };
+            paths::append(&path.join(".gitignore"), ignore.as_bytes())?;
+        }
+        VersionControl::Hg => {
+            if !path.join(".hg").exists() {
+                HgRepo::init(path, config.cwd())?;
+            }
+            let hgignore = if path.join(".hgignore").exists() {
+                format!("\n{}", hgignore)
+            } else {
+                hgignore
+            };
+            paths::append(&path.join(".hgignore"), hgignore.as_bytes())?;
+        }
+        VersionControl::Pijul => {
+            if !path.join(".pijul").exists() {
+                PijulRepo::init(path, config.cwd())?;
+            }
+            let ignore = if path.join(".ignore").exists() {
+                format!("\n{}", ignore)
+            } else {
+                ignore
+            };
+            paths::append(&path.join(".ignore"), ignore.as_bytes())?;
+        }
+        VersionControl::Fossil => {
+            if path.join(".fossil").exists() {
+                FossilRepo::init(path, config.cwd())?;
+            }
+        }
+        VersionControl::NoVcs => {
+            fs::create_dir_all(path)?;
+        }
+    };
+
+    let (author_name, email) = discover_author()?;
+    // Hoo boy, sure glad we've got exhaustiveness checking behind us.
+    let author = match (cfg.name, cfg.email, author_name, email) {
+        (Some(name), Some(email), _, _)
+        | (Some(name), None, _, Some(email))
+        | (None, Some(email), name, _)
+        | (None, None, name, Some(email)) => format!("{} <{}>", name, email),
+        (Some(name), None, _, None) | (None, None, name, None) => name,
+    };
+
+    let mut cargotoml_path_specifier = String::new();
+
+    // Calculate what [lib] and [[bin]]s do we need to append to Cargo.toml
+
+    for i in &opts.source_files {
+        if i.bin {
+            if i.relative_path != "src/main.rs" {
+                cargotoml_path_specifier.push_str(&format!(
+                    r#"
+[[bin]]
+name = "{}"
+path = {}
+"#,
+                    i.target_name,
+                    toml::Value::String(i.relative_path.clone())
+                ));
+            }
+        } else if i.relative_path != "src/lib.rs" {
+            cargotoml_path_specifier.push_str(&format!(
+                r#"
+[lib]
+name = "{}"
+path = {}
+"#,
+                i.target_name,
+                toml::Value::String(i.relative_path.clone())
+            ));
+        }
+    }
+
+    // Create Cargo.toml file with necessary [lib] and [[bin]] sections, if needed
+
+    paths::write(
+        &path.join("Cargo.toml"),
+        format!(
+            r#"[package]
+name = "{}"
+version = "0.1.0"
+authors = [{}]
+edition = {}
+{}
+[dependencies]
+{}"#,
+            name,
+            toml::Value::String(author),
+            match opts.edition {
+                Some(edition) => toml::Value::String(edition.to_string()),
+                None => toml::Value::String("2018".to_string()),
+            },
+            match opts.registry {
+                Some(registry) => {
+                    format!("publish = {}\n",
+                        toml::Value::Array(vec!(toml::Value::String(registry.to_string())))
+                    )
+                }
+                None => "".to_string(),
+            }, 
+            cargotoml_path_specifier
+        ).as_bytes(),
+    )?;
+
+    // Create all specified source files
+    // (with respective parent directories)
+    // if they are don't exist
+
+    for i in &opts.source_files {
+        let path_of_source_file = path.join(i.relative_path.clone());
+
+        if let Some(src_dir) = path_of_source_file.parent() {
+            fs::create_dir_all(src_dir)?;
+        }
+
+        let default_file_content: &[u8] = if i.bin {
+            b"\
+fn main() {
+    println!(\"Hello, world!\");
+}
+"
+        } else {
+            b"\
+#[cfg(test)]
+mod tests {
+    #[test]
+    fn it_works() {
+        assert_eq!(2 + 2, 4);
+    }
+}
+"
+        };
+
+        if !fs::metadata(&path_of_source_file)
+            .map(|x| x.is_file())
+            .unwrap_or(false)
+        {
+            paths::write(&path_of_source_file, default_file_content)?;
+        }
+    }
+
+    if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) {
+        let msg = format!(
+            "compiling this new crate may not work due to invalid \
+             workspace configuration\n\n{}",
+            e
+        );
+        config.shell().warn(msg)?;
+    }
+
+    Ok(())
+}
+
+fn get_environment_variable(variables: &[&str]) -> Option<String> {
+    variables.iter().filter_map(|var| env::var(var).ok()).next()
+}
+
+fn discover_author() -> CargoResult<(String, Option<String>)> {
+    let cwd = env::current_dir()?;
+    let git_config = if let Ok(repo) = GitRepository::discover(&cwd) {
+        repo.config()
+            .ok()
+            .or_else(|| GitConfig::open_default().ok())
+    } else {
+        GitConfig::open_default().ok()
+    };
+    let git_config = git_config.as_ref();
+    let name_variables = [
+        "CARGO_NAME",
+        "GIT_AUTHOR_NAME",
+        "GIT_COMMITTER_NAME",
+        "USER",
+        "USERNAME",
+        "NAME",
+    ];
+    let name = get_environment_variable(&name_variables[0..3])
+        .or_else(|| git_config.and_then(|g| g.get_string("user.name").ok()))
+        .or_else(|| get_environment_variable(&name_variables[3..]));
+
+    let name = match name {
+        Some(name) => name,
+        None => {
+            let username_var = if cfg!(windows) { "USERNAME" } else { "USER" };
+            bail!(
+                "could not determine the current user, please set ${}",
+                username_var
+            )
+        }
+    };
+    let email_variables = [
+        "CARGO_EMAIL",
+        "GIT_AUTHOR_EMAIL",
+        "GIT_COMMITTER_EMAIL",
+        "EMAIL",
+    ];
+    let email = get_environment_variable(&email_variables[0..3])
+        .or_else(|| git_config.and_then(|g| g.get_string("user.email").ok()))
+        .or_else(|| get_environment_variable(&email_variables[3..]));
+
+    let name = name.trim().to_string();
+    let email = email.map(|s| s.trim().to_string());
+
+    Ok((name, email))
+}
+
+fn global_config(config: &Config) -> CargoResult<CargoNewConfig> {
+    let name = config.get_string("cargo-new.name")?.map(|s| s.val);
+    let email = config.get_string("cargo-new.email")?.map(|s| s.val);
+    let vcs = config.get_string("cargo-new.vcs")?;
+
+    let vcs = match vcs.as_ref().map(|p| (&p.val[..], &p.definition)) {
+        Some(("git", _)) => Some(VersionControl::Git),
+        Some(("hg", _)) => Some(VersionControl::Hg),
+        Some(("pijul", _)) => Some(VersionControl::Pijul),
+        Some(("none", _)) => Some(VersionControl::NoVcs),
+        Some((s, p)) => {
+            return Err(internal(format!(
+                "invalid configuration for key \
+                 `cargo-new.vcs`, unknown vcs `{}` \
+                 (found in {})",
+                s, p
+            )))
+        }
+        None => None,
+    };
+    Ok(CargoNewConfig {
+        name,
+        email,
+        version_control: vcs,
+    })
+}
diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs
new file mode 100644 (file)
index 0000000..da6462b
--- /dev/null
@@ -0,0 +1,133 @@
+use std::collections::HashMap;
+
+use serde::ser;
+
+use core::resolver::Resolve;
+use core::{Package, PackageId, Workspace};
+use ops::{self, Packages};
+use util::CargoResult;
+
+const VERSION: u32 = 1;
+
+pub struct OutputMetadataOptions {
+    pub features: Vec<String>,
+    pub no_default_features: bool,
+    pub all_features: bool,
+    pub no_deps: bool,
+    pub version: u32,
+}
+
+/// Loads the manifest, resolves the dependencies of the package to the concrete
+/// used versions - considering overrides - and writes all dependencies in a JSON
+/// format to stdout.
+pub fn output_metadata(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
+    if opt.version != VERSION {
+        bail!(
+            "metadata version {} not supported, only {} is currently supported",
+            opt.version,
+            VERSION
+        );
+    }
+    if opt.no_deps {
+        metadata_no_deps(ws, opt)
+    } else {
+        metadata_full(ws, opt)
+    }
+}
+
+fn metadata_no_deps(ws: &Workspace, _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
+    Ok(ExportInfo {
+        packages: ws.members().cloned().collect(),
+        workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
+        resolve: None,
+        target_directory: ws.target_dir().display().to_string(),
+        version: VERSION,
+        workspace_root: ws.root().display().to_string(),
+    })
+}
+
+fn metadata_full(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
+    let specs = Packages::All.to_package_id_specs(ws)?;
+    let (package_set, resolve) = ops::resolve_ws_precisely(
+        ws,
+        None,
+        &opt.features,
+        opt.all_features,
+        opt.no_default_features,
+        &specs,
+    )?;
+    let mut packages = HashMap::new();
+    for pkg in package_set.get_many(package_set.package_ids())? {
+        packages.insert(pkg.package_id().clone(), pkg.clone());
+    }
+
+    Ok(ExportInfo {
+        packages: packages.values().map(|p| (*p).clone()).collect(),
+        workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
+        resolve: Some(MetadataResolve {
+            resolve: (packages, resolve),
+            root: ws.current_opt().map(|pkg| pkg.package_id().clone()),
+        }),
+        target_directory: ws.target_dir().display().to_string(),
+        version: VERSION,
+        workspace_root: ws.root().display().to_string(),
+    })
+}
+
+#[derive(Serialize)]
+pub struct ExportInfo {
+    packages: Vec<Package>,
+    workspace_members: Vec<PackageId>,
+    resolve: Option<MetadataResolve>,
+    target_directory: String,
+    version: u32,
+    workspace_root: String,
+}
+
+/// Newtype wrapper to provide a custom `Serialize` implementation.
+/// The one from lockfile does not fit because it uses a non-standard
+/// format for `PackageId`s
+#[derive(Serialize)]
+struct MetadataResolve {
+    #[serde(rename = "nodes", serialize_with = "serialize_resolve")]
+    resolve: (HashMap<PackageId, Package>, Resolve),
+    root: Option<PackageId>,
+}
+
+fn serialize_resolve<S>((packages, resolve): &(HashMap<PackageId, Package>, Resolve), s: S) -> Result<S::Ok, S::Error>
+where
+    S: ser::Serializer,
+{
+    #[derive(Serialize)]
+    struct Dep<'a> {
+        name: Option<String>,
+        pkg: &'a PackageId
+    }
+
+    #[derive(Serialize)]
+    struct Node<'a> {
+        id: &'a PackageId,
+        dependencies: Vec<&'a PackageId>,
+        deps: Vec<Dep<'a>>,
+        features: Vec<&'a str>,
+    }
+
+    s.collect_seq(resolve
+        .iter()
+        .map(|id| Node {
+            id,
+            dependencies: resolve.deps(id).map(|(pkg, _deps)| pkg).collect(),
+            deps: resolve.deps(id)
+                .map(|(pkg, _deps)| {
+                    let name = packages.get(pkg)
+                        .and_then(|pkg| pkg.targets().iter().find(|t| t.is_lib()))
+                        .and_then(|lib_target| {
+                            resolve.extern_crate_name(id, pkg, lib_target).ok()
+                        });
+
+                    Dep { name, pkg }
+                })
+                .collect(),
+            features: resolve.features_sorted(id),
+        }))
+}
diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs
new file mode 100644 (file)
index 0000000..2b06284
--- /dev/null
@@ -0,0 +1,497 @@
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::io::SeekFrom;
+use std::path::{self, Path, PathBuf};
+use std::sync::Arc;
+
+use flate2::read::GzDecoder;
+use flate2::{Compression, GzBuilder};
+use git2;
+use serde_json;
+use tar::{Archive, Builder, EntryType, Header};
+
+use core::{Package, Source, SourceId, Workspace};
+use core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor};
+use sources::PathSource;
+use util::{self, internal, Config, FileLock};
+use util::paths;
+use util::errors::{CargoResult, CargoResultExt};
+use ops;
+
+pub struct PackageOpts<'cfg> {
+    pub config: &'cfg Config,
+    pub list: bool,
+    pub check_metadata: bool,
+    pub allow_dirty: bool,
+    pub verify: bool,
+    pub jobs: Option<u32>,
+    pub target: Option<String>,
+    pub registry: Option<String>,
+}
+
+static VCS_INFO_FILE: &'static str = ".cargo_vcs_info.json";
+
+pub fn package(ws: &Workspace, opts: &PackageOpts) -> CargoResult<Option<FileLock>> {
+    ops::resolve_ws(ws)?;
+    let pkg = ws.current()?;
+    let config = ws.config();
+
+    let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config);
+    src.update()?;
+
+    if opts.check_metadata {
+        check_metadata(pkg, config)?;
+    }
+
+    verify_dependencies(pkg)?;
+
+    // `list_files` outputs warnings as a side effect, so only do it once.
+    let src_files = src.list_files(pkg)?;
+
+    // Make sure a VCS info file is not included in source, regardless of if
+    // we produced the file above, and in particular if we did not.
+    check_vcs_file_collision(pkg, &src_files)?;
+
+    // Check (git) repository state, getting the current commit hash if not
+    // dirty. This will `bail!` if dirty, unless allow_dirty. Produce json
+    // info for any sha1 (HEAD revision) returned.
+    let vcs_info = check_repo_state(pkg, &src_files, &config, opts.allow_dirty)?
+        .map(|h| json!({"git":{"sha1": h}}));
+
+    if opts.list {
+        let root = pkg.root();
+        let mut list: Vec<_> = src.list_files(pkg)?
+            .iter()
+            .map(|file| util::without_prefix(file, root).unwrap().to_path_buf())
+            .collect();
+        if include_lockfile(pkg) {
+            list.push("Cargo.lock".into());
+        }
+        if vcs_info.is_some() {
+            list.push(Path::new(VCS_INFO_FILE).to_path_buf());
+        }
+        list.sort_unstable();
+        for file in list.iter() {
+            println!("{}", file.display());
+        }
+        return Ok(None);
+    }
+
+    let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
+    let dir = ws.target_dir().join("package");
+    let mut dst = {
+        let tmp = format!(".{}", filename);
+        dir.open_rw(&tmp, config, "package scratch space")?
+    };
+
+    // Package up and test a temporary tarball and only move it to the final
+    // location if it actually passes all our tests. Any previously existing
+    // tarball can be assumed as corrupt or invalid, so we just blow it away if
+    // it exists.
+    config
+        .shell()
+        .status("Packaging", pkg.package_id().to_string())?;
+    dst.file().set_len(0)?;
+    tar(ws, &src_files, vcs_info.as_ref(), dst.file(), &filename)
+        .chain_err(|| format_err!("failed to prepare local package for uploading"))?;
+    if opts.verify {
+        dst.seek(SeekFrom::Start(0))?;
+        run_verify(ws, &dst, opts).chain_err(|| "failed to verify package tarball")?
+    }
+    dst.seek(SeekFrom::Start(0))?;
+    {
+        let src_path = dst.path();
+        let dst_path = dst.parent().join(&filename);
+        fs::rename(&src_path, &dst_path)
+            .chain_err(|| "failed to move temporary tarball into final location")?;
+    }
+    Ok(Some(dst))
+}
+
+fn include_lockfile(pkg: &Package) -> bool {
+    pkg.manifest().publish_lockfile() && pkg.targets().iter().any(|t| t.is_example() || t.is_bin())
+}
+
+// check that the package has some piece of metadata that a human can
+// use to tell what the package is about.
+fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
+    let md = pkg.manifest().metadata();
+
+    let mut missing = vec![];
+
+    macro_rules! lacking {
+        ($( $($field: ident)||* ),*) => {{
+            $(
+                if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
+                    $(missing.push(stringify!($field).replace("_", "-"));)*
+                }
+            )*
+        }}
+    }
+    lacking!(
+        description,
+        license || license_file,
+        documentation || homepage || repository
+    );
+
+    if !missing.is_empty() {
+        let mut things = missing[..missing.len() - 1].join(", ");
+        // things will be empty if and only if length == 1 (i.e. the only case
+        // to have no `or`).
+        if !things.is_empty() {
+            things.push_str(" or ");
+        }
+        things.push_str(missing.last().unwrap());
+
+        config.shell().warn(&format!(
+            "manifest has no {things}.\n\
+             See http://doc.crates.io/manifest.html#package-metadata for more info.",
+            things = things
+        ))?
+    }
+    Ok(())
+}
+
+// check that the package dependencies are safe to deploy.
+fn verify_dependencies(pkg: &Package) -> CargoResult<()> {
+    for dep in pkg.dependencies() {
+        if dep.source_id().is_path() && !dep.specified_req() {
+            bail!(
+                "all path dependencies must have a version specified \
+                 when packaging.\ndependency `{}` does not specify \
+                 a version.",
+                dep.name_in_toml()
+            )
+        }
+    }
+    Ok(())
+}
+
+// Check if the package source is in a *git* DVCS repository. If *git*, and
+// the source is *dirty* (e.g. has uncommited changes) and not `allow_dirty`
+// then `bail!` with an informative message. Otherwise return the sha1 hash of
+// the current *HEAD* commit, or `None` if *dirty*.
+fn check_repo_state(
+    p: &Package,
+    src_files: &[PathBuf],
+    config: &Config,
+    allow_dirty: bool
+) -> CargoResult<Option<String>> {
+    if let Ok(repo) = git2::Repository::discover(p.root()) {
+        if let Some(workdir) = repo.workdir() {
+            debug!("found a git repo at {:?}", workdir);
+            let path = p.manifest_path();
+            let path = path.strip_prefix(workdir).unwrap_or(path);
+            if let Ok(status) = repo.status_file(path) {
+                if (status & git2::Status::IGNORED).is_empty() {
+                    debug!(
+                        "found (git) Cargo.toml at {:?} in workdir {:?}",
+                        path, workdir
+                    );
+                    return git(p, src_files, &repo, allow_dirty);
+                }
+            }
+            config.shell().verbose(|shell| {
+                shell.warn(format!(
+                    "No (git) Cargo.toml found at `{}` in workdir `{}`",
+                    path.display(), workdir.display()
+                ))
+            })?;
+        }
+    } else {
+        config.shell().verbose(|shell| {
+            shell.warn(format!("No (git) VCS found for `{}`", p.root().display()))
+        })?;
+    }
+
+    // No VCS with a checked in Cargo.toml found. so we don't know if the
+    // directory is dirty or not, so we have to assume that it's clean.
+    return Ok(None);
+
+    fn git(
+        p: &Package,
+        src_files: &[PathBuf],
+        repo: &git2::Repository,
+        allow_dirty: bool
+    ) -> CargoResult<Option<String>> {
+        let workdir = repo.workdir().unwrap();
+        let dirty = src_files
+            .iter()
+            .filter(|file| {
+                let relative = file.strip_prefix(workdir).unwrap();
+                if let Ok(status) = repo.status_file(relative) {
+                    status != git2::Status::CURRENT
+                } else {
+                    false
+                }
+            })
+            .map(|path| {
+                path.strip_prefix(p.root())
+                    .unwrap_or(path)
+                    .display()
+                    .to_string()
+            })
+            .collect::<Vec<_>>();
+        if dirty.is_empty() {
+            let rev_obj = repo.revparse_single("HEAD")?;
+            Ok(Some(rev_obj.id().to_string()))
+        } else {
+            if !allow_dirty {
+                bail!(
+                    "{} files in the working directory contain changes that were \
+                     not yet committed into git:\n\n{}\n\n\
+                     to proceed despite this, pass the `--allow-dirty` flag",
+                    dirty.len(),
+                    dirty.join("\n")
+                )
+            }
+            Ok(None)
+        }
+    }
+}
+
+// Check for and `bail!` if a source file matches ROOT/VCS_INFO_FILE, since
+// this is now a cargo reserved file name, and we don't want to allow
+// forgery.
+fn check_vcs_file_collision(pkg: &Package, src_files: &[PathBuf]) -> CargoResult<()> {
+    let root = pkg.root();
+    let vcs_info_path = Path::new(VCS_INFO_FILE);
+    let collision = src_files.iter().find(|&p| {
+        util::without_prefix(&p, root).unwrap() == vcs_info_path
+    });
+    if collision.is_some() {
+        bail!("Invalid inclusion of reserved file name \
+               {} in package source", VCS_INFO_FILE);
+    }
+    Ok(())
+}
+
+fn tar(
+    ws: &Workspace,
+    src_files: &[PathBuf],
+    vcs_info: Option<&serde_json::Value>,
+    dst: &File,
+    filename: &str
+) -> CargoResult<()> {
+    // Prepare the encoder and its header
+    let filename = Path::new(filename);
+    let encoder = GzBuilder::new()
+        .filename(util::path2bytes(filename)?)
+        .write(dst, Compression::best());
+
+    // Put all package files into a compressed archive
+    let mut ar = Builder::new(encoder);
+    let pkg = ws.current()?;
+    let config = ws.config();
+    let root = pkg.root();
+    for file in src_files.iter() {
+        let relative = util::without_prefix(file, root).unwrap();
+        check_filename(relative)?;
+        let relative = relative.to_str().ok_or_else(|| {
+            format_err!("non-utf8 path in source directory: {}", relative.display())
+        })?;
+        config
+            .shell()
+            .verbose(|shell| shell.status("Archiving", &relative))?;
+        let path = format!(
+            "{}-{}{}{}",
+            pkg.name(),
+            pkg.version(),
+            path::MAIN_SEPARATOR,
+            relative
+        );
+
+        // The tar::Builder type by default will build GNU archives, but
+        // unfortunately we force it here to use UStar archives instead. The
+        // UStar format has more limitations on the length of path name that it
+        // can encode, so it's not quite as nice to use.
+        //
+        // Older cargos, however, had a bug where GNU archives were interpreted
+        // as UStar archives. This bug means that if we publish a GNU archive
+        // which has fully filled out metadata it'll be corrupt when unpacked by
+        // older cargos.
+        //
+        // Hopefully in the future after enough cargos have been running around
+        // with the bugfixed tar-rs library we'll be able to switch this over to
+        // GNU archives, but for now we'll just say that you can't encode paths
+        // in archives that are *too* long.
+        //
+        // For an instance of this in the wild, use the tar-rs 0.3.3 library to
+        // unpack the selectors 0.4.0 crate on crates.io. Either that or take a
+        // look at rust-lang/cargo#2326
+        let mut header = Header::new_ustar();
+        header
+            .set_path(&path)
+            .chain_err(|| format!("failed to add to archive: `{}`", relative))?;
+        let mut file = File::open(file)
+            .chain_err(|| format!("failed to open for archiving: `{}`", file.display()))?;
+        let metadata = file.metadata()
+            .chain_err(|| format!("could not learn metadata for: `{}`", relative))?;
+        header.set_metadata(&metadata);
+
+        if relative == "Cargo.toml" {
+            let orig = Path::new(&path).with_file_name("Cargo.toml.orig");
+            header.set_path(&orig)?;
+            header.set_cksum();
+            ar.append(&header, &mut file)
+                .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
+
+            let mut header = Header::new_ustar();
+            let toml = pkg.to_registry_toml(ws.config())?;
+            header.set_path(&path)?;
+            header.set_entry_type(EntryType::file());
+            header.set_mode(0o644);
+            header.set_size(toml.len() as u64);
+            header.set_cksum();
+            ar.append(&header, toml.as_bytes())
+                .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
+        } else {
+            header.set_cksum();
+            ar.append(&header, &mut file)
+                .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
+        }
+    }
+
+    if let Some(ref json) = vcs_info {
+        let filename: PathBuf = Path::new(VCS_INFO_FILE).into();
+        debug_assert!(check_filename(&filename).is_ok());
+        let fnd = filename.display();
+        config
+            .shell()
+            .verbose(|shell| shell.status("Archiving", &fnd))?;
+        let path = format!(
+            "{}-{}{}{}",
+            pkg.name(),
+            pkg.version(),
+            path::MAIN_SEPARATOR,
+            fnd
+        );
+        let mut header = Header::new_ustar();
+        header.set_path(&path).chain_err(|| {
+            format!("failed to add to archive: `{}`", fnd)
+        })?;
+        let json = format!("{}\n", serde_json::to_string_pretty(json)?);
+        let mut header = Header::new_ustar();
+        header.set_path(&path)?;
+        header.set_entry_type(EntryType::file());
+        header.set_mode(0o644);
+        header.set_size(json.len() as u64);
+        header.set_cksum();
+        ar.append(&header, json.as_bytes()).chain_err(|| {
+            internal(format!("could not archive source file `{}`", fnd))
+        })?;
+    }
+
+    if include_lockfile(pkg) {
+        let toml = paths::read(&ws.root().join("Cargo.lock"))?;
+        let path = format!(
+            "{}-{}{}Cargo.lock",
+            pkg.name(),
+            pkg.version(),
+            path::MAIN_SEPARATOR
+        );
+        let mut header = Header::new_ustar();
+        header.set_path(&path)?;
+        header.set_entry_type(EntryType::file());
+        header.set_mode(0o644);
+        header.set_size(toml.len() as u64);
+        header.set_cksum();
+        ar.append(&header, toml.as_bytes())
+            .chain_err(|| internal("could not archive source file `Cargo.lock`"))?;
+    }
+
+    let encoder = ar.into_inner()?;
+    encoder.finish()?;
+    Ok(())
+}
+
+fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult<()> {
+    let config = ws.config();
+    let pkg = ws.current()?;
+
+    config.shell().status("Verifying", pkg)?;
+
+    let f = GzDecoder::new(tar.file());
+    let dst = tar.parent()
+        .join(&format!("{}-{}", pkg.name(), pkg.version()));
+    if dst.exists() {
+        paths::remove_dir_all(&dst)?;
+    }
+    let mut archive = Archive::new(f);
+    archive.unpack(dst.parent().unwrap())?;
+
+    // Manufacture an ephemeral workspace to ensure that even if the top-level
+    // package has a workspace we can still build our new crate.
+    let id = SourceId::for_path(&dst)?;
+    let mut src = PathSource::new(&dst, &id, ws.config());
+    let new_pkg = src.root_package()?;
+    let pkg_fingerprint = src.last_modified_file(&new_pkg)?;
+    let ws = Workspace::ephemeral(new_pkg, config, None, true)?;
+
+    let exec: Arc<Executor> = Arc::new(DefaultExecutor);
+    ops::compile_ws(
+        &ws,
+        None,
+        &ops::CompileOptions {
+            config,
+            build_config: BuildConfig::new(config, opts.jobs, &opts.target, CompileMode::Build)?,
+            features: Vec::new(),
+            no_default_features: false,
+            all_features: false,
+            spec: ops::Packages::Packages(Vec::new()),
+            filter: ops::CompileFilter::Default {
+                required_features_filterable: true,
+            },
+            target_rustdoc_args: None,
+            target_rustc_args: None,
+            local_rustdoc_args: None,
+            export_dir: None,
+        },
+        &exec,
+    )?;
+
+    // Check that build.rs didn't modify any files in the src directory.
+    let ws_fingerprint = src.last_modified_file(ws.current()?)?;
+    if pkg_fingerprint != ws_fingerprint {
+        let (_, path) = ws_fingerprint;
+        bail!(
+            "Source directory was modified by build.rs during cargo publish. \
+             Build scripts should not modify anything outside of OUT_DIR. \
+             Modified file: {}\n\n\
+             To proceed despite this, pass the `--no-verify` flag.",
+            path.display()
+        )
+    }
+
+    Ok(())
+}
+
+// It can often be the case that files of a particular name on one platform
+// can't actually be created on another platform. For example files with colons
+// in the name are allowed on Unix but not on Windows.
+//
+// To help out in situations like this, issue about weird filenames when
+// packaging as a "heads up" that something may not work on other platforms.
+fn check_filename(file: &Path) -> CargoResult<()> {
+    let name = match file.file_name() {
+        Some(name) => name,
+        None => return Ok(()),
+    };
+    let name = match name.to_str() {
+        Some(name) => name,
+        None => bail!(
+            "path does not have a unicode filename which may not unpack \
+             on all platforms: {}",
+            file.display()
+        ),
+    };
+    let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
+    if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
+        bail!(
+            "cannot package a filename with a special character `{}`: {}",
+            c,
+            file.display()
+        )
+    }
+    Ok(())
+}
diff --git a/src/cargo/ops/cargo_pkgid.rs b/src/cargo/ops/cargo_pkgid.rs
new file mode 100644 (file)
index 0000000..1d55f15
--- /dev/null
@@ -0,0 +1,16 @@
+use core::{PackageIdSpec, Workspace};
+use ops;
+use util::CargoResult;
+
+pub fn pkgid(ws: &Workspace, spec: Option<&str>) -> CargoResult<PackageIdSpec> {
+    let resolve = match ops::load_pkg_lockfile(ws)? {
+        Some(resolve) => resolve,
+        None => bail!("a Cargo.lock must exist for this command"),
+    };
+
+    let pkgid = match spec {
+        Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?,
+        None => ws.current()?.package_id(),
+    };
+    Ok(PackageIdSpec::from_package_id(pkgid))
+}
diff --git a/src/cargo/ops/cargo_read_manifest.rs b/src/cargo/ops/cargo_read_manifest.rs
new file mode 100644 (file)
index 0000000..d3f9d3e
--- /dev/null
@@ -0,0 +1,199 @@
+use std::collections::{HashMap, HashSet};
+use std::fs;
+use std::io;
+use std::path::{Path, PathBuf};
+
+use core::{EitherManifest, Package, PackageId, SourceId};
+use util::{self, Config};
+use util::errors::{CargoError, CargoResult};
+use util::important_paths::find_project_manifest_exact;
+use util::toml::read_manifest;
+
+pub fn read_package(
+    path: &Path,
+    source_id: &SourceId,
+    config: &Config,
+) -> CargoResult<(Package, Vec<PathBuf>)> {
+    trace!(
+        "read_package; path={}; source-id={}",
+        path.display(),
+        source_id
+    );
+    let (manifest, nested) = read_manifest(path, source_id, config)?;
+    let manifest = match manifest {
+        EitherManifest::Real(manifest) => manifest,
+        EitherManifest::Virtual(..) => bail!(
+            "found a virtual manifest at `{}` instead of a package \
+             manifest",
+            path.display()
+        ),
+    };
+
+    Ok((Package::new(manifest, path), nested))
+}
+
+pub fn read_packages(
+    path: &Path,
+    source_id: &SourceId,
+    config: &Config,
+) -> CargoResult<Vec<Package>> {
+    let mut all_packages = HashMap::new();
+    let mut visited = HashSet::<PathBuf>::new();
+    let mut errors = Vec::<CargoError>::new();
+
+    trace!(
+        "looking for root package: {}, source_id={}",
+        path.display(),
+        source_id
+    );
+
+    walk(path, &mut |dir| {
+        trace!("looking for child package: {}", dir.display());
+
+        // Don't recurse into hidden/dot directories unless we're at the toplevel
+        if dir != path {
+            let name = dir.file_name().and_then(|s| s.to_str());
+            if name.map(|s| s.starts_with('.')) == Some(true) {
+                return Ok(false);
+            }
+
+            // Don't automatically discover packages across git submodules
+            if fs::metadata(&dir.join(".git")).is_ok() {
+                return Ok(false);
+            }
+        }
+
+        // Don't ever look at target directories
+        if dir.file_name().and_then(|s| s.to_str()) == Some("target")
+            && has_manifest(dir.parent().unwrap())
+        {
+            return Ok(false);
+        }
+
+        if has_manifest(dir) {
+            read_nested_packages(
+                dir,
+                &mut all_packages,
+                source_id,
+                config,
+                &mut visited,
+                &mut errors,
+            )?;
+        }
+        Ok(true)
+    })?;
+
+    if all_packages.is_empty() {
+        match errors.pop() {
+            Some(err) => Err(err),
+            None => Err(format_err!(
+                "Could not find Cargo.toml in `{}`",
+                path.display()
+            )),
+        }
+    } else {
+        Ok(all_packages.into_iter().map(|(_, v)| v).collect())
+    }
+}
+
+fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult<bool>) -> CargoResult<()> {
+    if !callback(path)? {
+        trace!("not processing {}", path.display());
+        return Ok(());
+    }
+
+    // Ignore any permission denied errors because temporary directories
+    // can often have some weird permissions on them.
+    let dirs = match fs::read_dir(path) {
+        Ok(dirs) => dirs,
+        Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => return Ok(()),
+        Err(e) => {
+            let cx = format!("failed to read directory `{}`", path.display());
+            let e = CargoError::from(e);
+            return Err(e.context(cx).into());
+        }
+    };
+    for dir in dirs {
+        let dir = dir?;
+        if dir.file_type()?.is_dir() {
+            walk(&dir.path(), callback)?;
+        }
+    }
+    Ok(())
+}
+
+fn has_manifest(path: &Path) -> bool {
+    find_project_manifest_exact(path, "Cargo.toml").is_ok()
+}
+
+fn read_nested_packages(
+    path: &Path,
+    all_packages: &mut HashMap<PackageId, Package>,
+    source_id: &SourceId,
+    config: &Config,
+    visited: &mut HashSet<PathBuf>,
+    errors: &mut Vec<CargoError>,
+) -> CargoResult<()> {
+    if !visited.insert(path.to_path_buf()) {
+        return Ok(());
+    }
+
+    let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?;
+
+    let (manifest, nested) = match read_manifest(&manifest_path, source_id, config) {
+        Err(err) => {
+            // Ignore malformed manifests found on git repositories
+            //
+            // git source try to find and read all manifests from the repository
+            // but since it's not possible to exclude folders from this search
+            // it's safer to ignore malformed manifests to avoid
+            //
+            // TODO: Add a way to exclude folders?
+            info!(
+                "skipping malformed package found at `{}`",
+                path.to_string_lossy()
+            );
+            errors.push(err.into());
+            return Ok(());
+        }
+        Ok(tuple) => tuple,
+    };
+
+    let manifest = match manifest {
+        EitherManifest::Real(manifest) => manifest,
+        EitherManifest::Virtual(..) => return Ok(()),
+    };
+    let pkg = Package::new(manifest, &manifest_path);
+
+    let pkg_id = pkg.package_id().clone();
+    use std::collections::hash_map::Entry;
+    match all_packages.entry(pkg_id) {
+        Entry::Vacant(v) => {
+            v.insert(pkg);
+        }
+        Entry::Occupied(_) => {
+            info!(
+                "skipping nested package `{}` found at `{}`",
+                pkg.name(),
+                path.to_string_lossy()
+            );
+        }
+    }
+
+    // Registry sources are not allowed to have `path=` dependencies because
+    // they're all translated to actual registry dependencies.
+    //
+    // We normalize the path here ensure that we don't infinitely walk around
+    // looking for crates. By normalizing we ensure that we visit this crate at
+    // most once.
+    //
+    // TODO: filesystem/symlink implications?
+    if !source_id.is_registry() {
+        for p in nested.iter() {
+            let path = util::normalize_path(&path.join(p));
+            read_nested_packages(&path, all_packages, source_id, config, visited, errors)?;
+        }
+    }
+
+    Ok(())
+}
diff --git a/src/cargo/ops/cargo_run.rs b/src/cargo/ops/cargo_run.rs
new file mode 100644 (file)
index 0000000..08336d9
--- /dev/null
@@ -0,0 +1,103 @@
+use std::iter;
+use std::path::Path;
+
+use ops;
+use util::{self, CargoResult, ProcessError};
+use core::{TargetKind, Workspace, nightly_features_allowed};
+
+pub fn run(
+    ws: &Workspace,
+    options: &ops::CompileOptions,
+    args: &[String],
+) -> CargoResult<Option<ProcessError>> {
+    let config = ws.config();
+
+    // We compute the `bins` here *just for diagnosis*.  The actual set of
+    // packages to be run is determined by the `ops::compile` call below.
+    let packages = options.spec.get_packages(ws)?;
+    let bins: Vec<_> = packages
+        .into_iter()
+        .flat_map(|pkg| {
+            iter::repeat(pkg).zip(pkg.manifest().targets().iter().filter(|target| {
+                !target.is_lib() && !target.is_custom_build() && if !options.filter.is_specific() {
+                    target.is_bin()
+                } else {
+                    options.filter.target_run(target)
+                }
+            }))
+        }).collect();
+
+    if bins.is_empty() {
+        if !options.filter.is_specific() {
+            bail!("a bin target must be available for `cargo run`")
+        } else {
+            // this will be verified in cargo_compile
+        }
+    }
+
+    if bins.len() == 1 {
+        let target = bins[0].1;
+        if let TargetKind::ExampleLib(..) = target.kind() {
+            bail!(
+                "example target `{}` is a library and cannot be executed",
+                target.name()
+            )
+        }
+    }
+
+    if bins.len() > 1 {
+        if !options.filter.is_specific() {
+            let names: Vec<&str> = bins
+                .into_iter()
+                .map(|(_pkg, target)| target.name())
+                .collect();
+            if nightly_features_allowed() {
+                bail!(
+                    "`cargo run` could not determine which binary to run. \
+                     Use the `--bin` option to specify a binary, \
+                     or (on nightly) the `default-run` manifest key.\n\
+                     available binaries: {}",
+                    names.join(", ")
+                )
+            } else {
+                bail!(
+                    "`cargo run` requires that a package only have one \
+                     executable; use the `--bin` option to specify which one \
+                     to run\navailable binaries: {}",
+                    names.join(", ")
+                )
+            }
+        } else {
+            bail!(
+                "`cargo run` can run at most one executable, but \
+                 multiple were specified"
+            )
+        }
+    }
+
+    let compile = ops::compile(ws, options)?;
+    assert_eq!(compile.binaries.len(), 1);
+    let exe = &compile.binaries[0];
+    let exe = match util::without_prefix(exe, config.cwd()) {
+        Some(path) if path.file_name() == Some(path.as_os_str()) => {
+            Path::new(".").join(path).to_path_buf()
+        }
+        Some(path) => path.to_path_buf(),
+        None => exe.to_path_buf(),
+    };
+    let pkg = bins[0].0;
+    let mut process = compile.target_process(exe, pkg)?;
+    process.args(args).cwd(config.cwd());
+
+    config.shell().status("Running", process.to_string())?;
+
+    let result = process.exec_replace();
+
+    match result {
+        Ok(()) => Ok(None),
+        Err(e) => {
+            let err = e.downcast::<ProcessError>()?;
+            Ok(Some(err))
+        }
+    }
+}
diff --git a/src/cargo/ops/cargo_test.rs b/src/cargo/ops/cargo_test.rs
new file mode 100644 (file)
index 0000000..fe410cc
--- /dev/null
@@ -0,0 +1,204 @@
+use std::ffi::OsString;
+
+use ops;
+use core::compiler::{Compilation, Doctest};
+use util::{self, CargoTestError, ProcessError, Test};
+use util::errors::CargoResult;
+use core::Workspace;
+
+pub struct TestOptions<'a> {
+    pub compile_opts: ops::CompileOptions<'a>,
+    pub no_run: bool,
+    pub no_fail_fast: bool,
+}
+
+pub fn run_tests(
+    ws: &Workspace,
+    options: &TestOptions,
+    test_args: &[String],
+) -> CargoResult<Option<CargoTestError>> {
+    let compilation = compile_tests(ws, options)?;
+
+    if options.no_run {
+        return Ok(None);
+    }
+    let (test, mut errors) = run_unit_tests(options, test_args, &compilation)?;
+
+    // If we have an error and want to fail fast, return
+    if !errors.is_empty() && !options.no_fail_fast {
+        return Ok(Some(CargoTestError::new(test, errors)));
+    }
+
+    let (doctest, docerrors) = run_doc_tests(options, test_args, &compilation)?;
+    let test = if docerrors.is_empty() { test } else { doctest };
+    errors.extend(docerrors);
+    if errors.is_empty() {
+        Ok(None)
+    } else {
+        Ok(Some(CargoTestError::new(test, errors)))
+    }
+}
+
+pub fn run_benches(
+    ws: &Workspace,
+    options: &TestOptions,
+    args: &[String],
+) -> CargoResult<Option<CargoTestError>> {
+    let mut args = args.to_vec();
+    args.push("--bench".to_string());
+    let compilation = compile_tests(ws, options)?;
+
+    if options.no_run {
+        return Ok(None);
+    }
+    let (test, errors) = run_unit_tests(options, &args, &compilation)?;
+    match errors.len() {
+        0 => Ok(None),
+        _ => Ok(Some(CargoTestError::new(test, errors))),
+    }
+}
+
+fn compile_tests<'a>(
+    ws: &Workspace<'a>,
+    options: &TestOptions<'a>,
+) -> CargoResult<Compilation<'a>> {
+    let mut compilation = ops::compile(ws, &options.compile_opts)?;
+    compilation
+        .tests
+        .sort_by(|a, b| (a.0.package_id(), &a.1, &a.2).cmp(&(b.0.package_id(), &b.1, &b.2)));
+    Ok(compilation)
+}
+
+/// Run the unit and integration tests of a package.
+fn run_unit_tests(
+    options: &TestOptions,
+    test_args: &[String],
+    compilation: &Compilation,
+) -> CargoResult<(Test, Vec<ProcessError>)> {
+    let config = options.compile_opts.config;
+    let cwd = options.compile_opts.config.cwd();
+
+    let mut errors = Vec::new();
+
+    for &(ref pkg, ref kind, ref test, ref exe) in &compilation.tests {
+        let to_display = match util::without_prefix(exe, cwd) {
+            Some(path) => path,
+            None => &**exe,
+        };
+        let mut cmd = compilation.target_process(exe, pkg)?;
+        cmd.args(test_args);
+        config
+            .shell()
+            .concise(|shell| shell.status("Running", to_display.display().to_string()))?;
+        config
+            .shell()
+            .verbose(|shell| shell.status("Running", cmd.to_string()))?;
+
+        let result = cmd.exec();
+
+        match result {
+            Err(e) => {
+                let e = e.downcast::<ProcessError>()?;
+                errors.push((kind.clone(), test.clone(), pkg.name().to_string(), e));
+                if !options.no_fail_fast {
+                    break;
+                }
+            }
+            Ok(()) => {}
+        }
+    }
+
+    if errors.len() == 1 {
+        let (kind, name, pkg_name, e) = errors.pop().unwrap();
+        Ok((
+            Test::UnitTest {
+                kind,
+                name,
+                pkg_name,
+            },
+            vec![e],
+        ))
+    } else {
+        Ok((
+            Test::Multiple,
+            errors.into_iter().map(|(_, _, _, e)| e).collect(),
+        ))
+    }
+}
+
+fn run_doc_tests(
+    options: &TestOptions,
+    test_args: &[String],
+    compilation: &Compilation,
+) -> CargoResult<(Test, Vec<ProcessError>)> {
+    let mut errors = Vec::new();
+    let config = options.compile_opts.config;
+
+    // We don't build/rust doctests if target != host
+    if compilation.host != compilation.target {
+        return Ok((Test::Doc, errors));
+    }
+
+    for doctest_info in &compilation.to_doc_test {
+        let Doctest {
+            package,
+            target,
+            deps,
+        } = doctest_info;
+        config.shell().status("Doc-tests", target.name())?;
+        let mut p = compilation.rustdoc_process(package, target)?;
+        p.arg("--test")
+            .arg(target.src_path().path())
+            .arg("--crate-name")
+            .arg(&target.crate_name());
+
+        for &rust_dep in &[&compilation.deps_output] {
+            let mut arg = OsString::from("dependency=");
+            arg.push(rust_dep);
+            p.arg("-L").arg(arg);
+        }
+
+        for native_dep in compilation.native_dirs.iter() {
+            p.arg("-L").arg(native_dep);
+        }
+
+        for &host_rust_dep in &[&compilation.host_deps_output] {
+            let mut arg = OsString::from("dependency=");
+            arg.push(host_rust_dep);
+            p.arg("-L").arg(arg);
+        }
+
+        for arg in test_args {
+            p.arg("--test-args").arg(arg);
+        }
+
+        if let Some(cfgs) = compilation.cfgs.get(package.package_id()) {
+            for cfg in cfgs.iter() {
+                p.arg("--cfg").arg(cfg);
+            }
+        }
+
+        for &(ref extern_crate_name, ref lib) in deps.iter() {
+            let mut arg = OsString::from(extern_crate_name);
+            arg.push("=");
+            arg.push(lib);
+            p.arg("--extern").arg(&arg);
+        }
+
+        if let Some(flags) = compilation.rustdocflags.get(package.package_id()) {
+            p.args(flags);
+        }
+
+        config
+            .shell()
+            .verbose(|shell| shell.status("Running", p.to_string()))?;
+        if let Err(e) = p.exec() {
+            let e = e.downcast::<ProcessError>()?;
+            errors.push(e);
+            if !options.no_fail_fast {
+                return Ok((Test::Doc, errors));
+            }
+        }
+    }
+    Ok((Test::Doc, errors))
+}
diff --git a/src/cargo/ops/fix.rs b/src/cargo/ops/fix.rs
new file mode 100644 (file)
index 0000000..2c1a9cf
--- /dev/null
@@ -0,0 +1,623 @@
+use std::collections::{HashMap, HashSet, BTreeSet};
+use std::env;
+use std::ffi::OsString;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::{self, Command, ExitStatus};
+use std::str;
+
+use failure::{Error, ResultExt};
+use git2;
+use rustfix::diagnostics::Diagnostic;
+use rustfix::{self, CodeFix};
+use serde_json;
+
+use core::Workspace;
+use ops::{self, CompileOptions};
+use util::errors::CargoResult;
+use util::{LockServer, LockServerClient, existing_vcs_repo};
+use util::diagnostic_server::{Message, RustfixDiagnosticServer};
+use util::paths;
+
+const FIX_ENV: &str = "__CARGO_FIX_PLZ";
+const BROKEN_CODE_ENV: &str = "__CARGO_FIX_BROKEN_CODE";
+const PREPARE_FOR_ENV: &str = "__CARGO_FIX_PREPARE_FOR";
+const EDITION_ENV: &str = "__CARGO_FIX_EDITION";
+
+const IDIOMS_ENV: &str = "__CARGO_FIX_IDIOMS";
+
+pub struct FixOptions<'a> {
+    pub edition: bool,
+    pub prepare_for: Option<&'a str>,
+    pub idioms: bool,
+    pub compile_opts: CompileOptions<'a>,
+    pub allow_dirty: bool,
+    pub allow_no_vcs: bool,
+    pub allow_staged: bool,
+    pub broken_code: bool,
+}
+
+pub fn fix(ws: &Workspace, opts: &mut FixOptions) -> CargoResult<()> {
+    check_version_control(opts)?;
+
+    // Spin up our lock server which our subprocesses will use to synchronize
+    // fixes.
+    let lock_server = LockServer::new()?;
+    opts.compile_opts.build_config.extra_rustc_env.push((
+        FIX_ENV.to_string(),
+        lock_server.addr().to_string(),
+    ));
+    let _started = lock_server.start()?;
+
+    opts.compile_opts.build_config.force_rebuild = true;
+
+    if opts.broken_code {
+        let key = BROKEN_CODE_ENV.to_string();
+        opts.compile_opts.build_config.extra_rustc_env.push((key, "1".to_string()));
+    }
+
+    if opts.edition {
+        let key = EDITION_ENV.to_string();
+        opts.compile_opts.build_config.extra_rustc_env.push((key, "1".to_string()));
+    } else if let Some(edition) = opts.prepare_for {
+        opts.compile_opts.build_config.extra_rustc_env.push((
+            PREPARE_FOR_ENV.to_string(),
+            edition.to_string(),
+        ));
+    }
+    if opts.idioms {
+        opts.compile_opts.build_config.extra_rustc_env.push((
+            IDIOMS_ENV.to_string(),
+            "1".to_string(),
+        ));
+    }
+    opts.compile_opts.build_config.cargo_as_rustc_wrapper = true;
+    *opts.compile_opts.build_config.rustfix_diagnostic_server.borrow_mut() =
+        Some(RustfixDiagnosticServer::new()?);
+
+    ops::compile(ws, &opts.compile_opts)?;
+    Ok(())
+}
+
+fn check_version_control(opts: &FixOptions) -> CargoResult<()> {
+    if opts.allow_no_vcs {
+        return Ok(())
+    }
+    let config = opts.compile_opts.config;
+    if !existing_vcs_repo(config.cwd(), config.cwd()) {
+        bail!("no VCS found for this package and `cargo fix` can potentially \
+               perform destructive changes; if you'd like to suppress this \
+               error pass `--allow-no-vcs`")
+    }
+
+    if opts.allow_dirty && opts.allow_staged {
+        return Ok(())
+    }
+
+    let mut dirty_files = Vec::new();
+    let mut staged_files = Vec::new();
+    if let Ok(repo) = git2::Repository::discover(config.cwd()) {
+        let mut repo_opts = git2::StatusOptions::new();
+        repo_opts.include_ignored(false);
+        for status in repo.statuses(Some(&mut repo_opts))?.iter() {
+            if let Some(path) = status.path() {
+                match status.status() {
+                    git2::Status::CURRENT => (),
+                    git2::Status::INDEX_NEW |
+                    git2::Status::INDEX_MODIFIED |
+                    git2::Status::INDEX_DELETED |
+                    git2::Status::INDEX_RENAMED |
+                    git2::Status::INDEX_TYPECHANGE =>
+                        if !opts.allow_staged {
+                            staged_files.push(path.to_string())
+                        },
+                    _ =>
+                        if !opts.allow_dirty {
+                            dirty_files.push(path.to_string())
+                        },
+                };
+            }
+
+        }
+    }
+
+    if dirty_files.is_empty() && staged_files.is_empty() {
+        return Ok(())
+    }
+
+    let mut files_list = String::new();
+    for file in dirty_files {
+        files_list.push_str("  * ");
+        files_list.push_str(&file);
+        files_list.push_str(" (dirty)\n");
+    }
+    for file in staged_files {
+        files_list.push_str("  * ");
+        files_list.push_str(&file);
+        files_list.push_str(" (staged)\n");
+    }
+
+    bail!("the working directory of this package has uncommitted changes, and \
+           `cargo fix` can potentially perform destructive changes; if you'd \
+           like to suppress this error pass `--allow-dirty`, `--allow-staged`, \
+           or commit the changes to these files:\n\
+           \n\
+           {}\n\
+          ", files_list);
+}
+
+pub fn fix_maybe_exec_rustc() -> CargoResult<bool> {
+    let lock_addr = match env::var(FIX_ENV) {
+        Ok(s) => s,
+        Err(_) => return Ok(false),
+    };
+
+    let args = FixArgs::get();
+    trace!("cargo-fix as rustc got file {:?}", args.file);
+    let rustc = env::var_os("RUSTC").expect("failed to find RUSTC env var");
+
+    // Our goal is to fix only the crates that the end user is interested in.
+    // That's very likely to only mean the crates in the workspace the user is
+    // working on, not random crates.io crates.
+    //
+    // To that end we only actually try to fix things if it looks like we're
+    // compiling a Rust file and it *doesn't* have an absolute filename. That's
+    // not the best heuristic but matches what Cargo does today at least.
+    let mut fixes = FixedCrate::default();
+    if let Some(path) = &args.file {
+        if args.primary_package {
+            trace!("start rustfixing {:?}", path);
+            fixes = rustfix_crate(&lock_addr, rustc.as_ref(), path, &args)?;
+        }
+    }
+
+    // Ok now we have our final goal of testing out the changes that we applied.
+    // If these changes went awry and actually started to cause the crate to
+    // *stop* compiling then we want to back them out and continue to print
+    // warnings to the user.
+    //
+    // If we didn't actually make any changes then we can immediately exec the
+    // new rustc, and otherwise we capture the output to hide it in the scenario
+    // that we have to back it all out.
+    if !fixes.files.is_empty() {
+        let mut cmd = Command::new(&rustc);
+        args.apply(&mut cmd);
+        cmd.arg("--error-format=json");
+        let output = cmd.output().context("failed to spawn rustc")?;
+
+        if output.status.success() {
+            for (path, file) in fixes.files.iter() {
+                Message::Fixing {
+                    file: path.clone(),
+                    fixes: file.fixes_applied,
+                }.post()?;
+            }
+        }
+
+        // If we succeeded then we'll want to commit to the changes we made, if
+        // any. If stderr is empty then there's no need for the final exec at
+        // the end, we just bail out here.
+        if output.status.success() && output.stderr.is_empty() {
+            return Ok(true);
+        }
+
+        // Otherwise if our rustc just failed then that means that we broke the
+        // user's code with our changes. Back out everything and fall through
+        // below to recompile again.
+        if !output.status.success() {
+            if env::var_os(BROKEN_CODE_ENV).is_none() {
+                for (path, file) in fixes.files.iter() {
+                    fs::write(path, &file.original_code)
+                        .with_context(|_| format!("failed to write file `{}`", path))?;
+                }
+            }
+            log_failed_fix(&output.stderr)?;
+        }
+    }
+
+    let mut cmd = Command::new(&rustc);
+    args.apply(&mut cmd);
+    exit_with(cmd.status().context("failed to spawn rustc")?);
+}
+
+#[derive(Default)]
+struct FixedCrate {
+    files: HashMap<String, FixedFile>,
+}
+
+struct FixedFile {
+    errors_applying_fixes: Vec<String>,
+    fixes_applied: u32,
+    original_code: String,
+}
+
+fn rustfix_crate(lock_addr: &str, rustc: &Path, filename: &Path, args: &FixArgs)
+    -> Result<FixedCrate, Error>
+{
+    args.verify_not_preparing_for_enabled_edition()?;
+
+    // First up we want to make sure that each crate is only checked by one
+    // process at a time. If two invocations concurrently check a crate then
+    // it's likely to corrupt it.
+    //
+    // Currently we do this by assigning the name on our lock to the first
+    // argument that looks like a Rust file.
+    let _lock = LockServerClient::lock(&lock_addr.parse()?, filename)?;
+
+    // Next up this is a bit suspicious, but we *iteratively* execute rustc and
+    // collect suggestions to feed to rustfix. Once we hit our limit of times to
+    // execute rustc or we appear to be reaching a fixed point we stop running
+    // rustc.
+    //
+    // This is currently done to handle code like:
+    //
+    //      ::foo::<::Bar>();
+    //
+    // where there are two fixes to happen here: `crate::foo::<crate::Bar>()`.
+    // The spans for these two suggestions are overlapping and its difficult in
+    // the compiler to *not* have overlapping spans here. As a result, a naive
+    // implementation would feed the two compiler suggestions for the above fix
+    // into `rustfix`, but one would be rejected because it overlaps with the
+    // other.
+    //
+    // In this case though, both suggestions are valid and can be automatically
+    // applied! To handle this case we execute rustc multiple times, collecting
+    // fixes each time we do so. Along the way we discard any suggestions that
+    // failed to apply, assuming that they can be fixed the next time we run
+    // rustc.
+    //
+    // Naturally we want a few protections in place here though to avoid looping
+    // forever or otherwise losing data. To that end we have a few termination
+    // conditions:
+    //
+    // * Do this whole process a fixed number of times. In theory we probably
+    //   need an infinite number of times to apply fixes, but we're not gonna
+    //   sit around waiting for that.
+    // * If it looks like a fix genuinely can't be applied we need to bail out.
+    //   Detect this when a fix fails to get applied *and* no suggestions
+    //   successfully applied to the same file. In that case looks like we
+    //   definitely can't make progress, so bail out.
+    let mut fixes = FixedCrate::default();
+    let mut last_fix_counts = HashMap::new();
+    let iterations = env::var("CARGO_FIX_MAX_RETRIES")
+        .ok()
+        .and_then(|n| n.parse().ok())
+        .unwrap_or(4);
+    for _ in 0..iterations {
+        last_fix_counts.clear();
+        for (path, file) in fixes.files.iter_mut() {
+            last_fix_counts.insert(path.clone(), file.fixes_applied);
+            file.errors_applying_fixes.clear(); // we'll generate new errors below
+        }
+        rustfix_and_fix(&mut fixes, rustc, filename, args)?;
+        let mut progress_yet_to_be_made = false;
+        for (path, file) in fixes.files.iter_mut() {
+            if file.errors_applying_fixes.is_empty() {
+                continue
+            }
+            // If anything was successfully fixed *and* there's at least one
+            // error, then assume the error was spurious and we'll try again on
+            // the next iteration.
+            if file.fixes_applied != *last_fix_counts.get(path).unwrap_or(&0) {
+                progress_yet_to_be_made = true;
+            }
+        }
+        if !progress_yet_to_be_made {
+            break
+        }
+    }
+
+    // Any errors still remaining at this point need to be reported as probably
+    // bugs in Cargo and/or rustfix.
+    for (path, file) in fixes.files.iter_mut() {
+        for error in file.errors_applying_fixes.drain(..) {
+            Message::ReplaceFailed {
+                file: path.clone(),
+                message: error,
+            }.post()?;
+        }
+    }
+
+    Ok(fixes)
+}
+
+/// Execute `rustc` to apply one round of suggestions to the crate in question.
+///
+/// This will fill in the `fixes` map with original code, suggestions applied,
+/// and any errors encountered while fixing files.
+fn rustfix_and_fix(fixes: &mut FixedCrate, rustc: &Path, filename: &Path, args: &FixArgs)
+    -> Result<(), Error>
+{
+    // If not empty, filter by these lints
+    //
+    // TODO: Implement a way to specify this
+    let only = HashSet::new();
+
+    let mut cmd = Command::new(rustc);
+    cmd.arg("--error-format=json");
+    args.apply(&mut cmd);
+    let output = cmd.output()
+        .with_context(|_| format!("failed to execute `{}`", rustc.display()))?;
+
+    // If rustc didn't succeed for whatever reasons then we're very likely to be
+    // looking at otherwise broken code. Let's not make things accidentally
+    // worse by applying fixes where a bug could cause *more* broken code.
+    // Instead, punt upwards which will reexec rustc over the original code,
+    // displaying pretty versions of the diagnostics we just read out.
+    if !output.status.success() && env::var_os(BROKEN_CODE_ENV).is_none() {
+        debug!(
+            "rustfixing `{:?}` failed, rustc exited with {:?}",
+            filename,
+            output.status.code()
+        );
+        return Ok(());
+    }
+
+    let fix_mode = env::var_os("__CARGO_FIX_YOLO")
+        .map(|_| rustfix::Filter::Everything)
+        .unwrap_or(rustfix::Filter::MachineApplicableOnly);
+
+    // Sift through the output of the compiler to look for JSON messages
+    // indicating fixes that we can apply.
+    let stderr = str::from_utf8(&output.stderr).context("failed to parse rustc stderr as utf-8")?;
+
+    let suggestions = stderr.lines()
+        .filter(|x| !x.is_empty())
+        .inspect(|y| trace!("line: {}", y))
+
+        // Parse each line of stderr ignoring errors as they may not all be json
+        .filter_map(|line| serde_json::from_str::<Diagnostic>(line).ok())
+
+        // From each diagnostic try to extract suggestions from rustc
+        .filter_map(|diag| rustfix::collect_suggestions(&diag, &only, fix_mode));
+
+    // Collect suggestions by file so we can apply them one at a time later.
+    let mut file_map = HashMap::new();
+    let mut num_suggestion = 0;
+    for suggestion in suggestions {
+        trace!("suggestion");
+        // Make sure we've got a file associated with this suggestion and all
+        // snippets point to the same location. Right now it's not clear what
+        // we would do with multiple locations.
+        let (file_name, range) = match suggestion.snippets.get(0) {
+            Some(s) => (s.file_name.clone(), s.line_range),
+            None => {
+                trace!("rejecting as it has no snippets {:?}", suggestion);
+                continue;
+            }
+        };
+        if !suggestion
+            .snippets
+            .iter()
+            .all(|s| s.file_name == file_name && s.line_range == range)
+        {
+            trace!("rejecting as it spans multiple files {:?}", suggestion);
+            continue;
+        }
+
+        file_map
+            .entry(file_name)
+            .or_insert_with(Vec::new)
+            .push(suggestion);
+        num_suggestion += 1;
+    }
+
+    debug!(
+        "collected {} suggestions for `{}`",
+        num_suggestion,
+        filename.display(),
+    );
+
+    for (file, suggestions) in file_map {
+        // Attempt to read the source code for this file. If this fails then
+        // that'd be pretty surprising, so log a message and otherwise keep
+        // going.
+        let code = match paths::read(file.as_ref()) {
+            Ok(s) => s,
+            Err(e) => {
+                warn!("failed to read `{}`: {}", file, e);
+                continue;
+            }
+        };
+        let num_suggestions = suggestions.len();
+        debug!("applying {} fixes to {}", num_suggestions, file);
+
+        // If this file doesn't already exist then we just read the original
+        // code, so save it. If the file already exists then the original code
+        // doesn't need to be updated as we've just read an interim state with
+        // some fixes but perhaps not all.
+        let fixed_file = fixes.files.entry(file.clone())
+            .or_insert_with(|| {
+                FixedFile {
+                    errors_applying_fixes: Vec::new(),
+                    fixes_applied: 0,
+                    original_code: code.clone(),
+                }
+            });
+        let mut fixed = CodeFix::new(&code);
+
+        // As mentioned above in `rustfix_crate`, we don't immediately warn
+        // about suggestions that fail to apply here, and instead we save them
+        // off for later processing.
+        for suggestion in suggestions.iter().rev() {
+            match fixed.apply(suggestion) {
+                Ok(()) => fixed_file.fixes_applied += 1,
+                Err(e) => fixed_file.errors_applying_fixes.push(e.to_string()),
+            }
+        }
+        let new_code = fixed.finish()?;
+        fs::write(&file, new_code)
+            .with_context(|_| format!("failed to write file `{}`", file))?;
+    }
+
+    Ok(())
+}
+
+fn exit_with(status: ExitStatus) -> ! {
+    #[cfg(unix)]
+    {
+        use std::os::unix::prelude::*;
+        if let Some(signal) = status.signal() {
+            eprintln!("child failed with signal `{}`", signal);
+            process::exit(2);
+        }
+    }
+    process::exit(status.code().unwrap_or(3));
+}
+
+fn log_failed_fix(stderr: &[u8]) -> Result<(), Error> {
+    let stderr = str::from_utf8(stderr).context("failed to parse rustc stderr as utf-8")?;
+
+    let diagnostics = stderr
+        .lines()
+        .filter(|x| !x.is_empty())
+        .filter_map(|line| serde_json::from_str::<Diagnostic>(line).ok());
+    let mut files = BTreeSet::new();
+    for diagnostic in diagnostics {
+        for span in diagnostic.spans.into_iter() {
+            files.insert(span.file_name);
+        }
+    }
+    let mut krate = None;
+    let mut prev_dash_dash_krate_name = false;
+    for arg in env::args() {
+        if prev_dash_dash_krate_name {
+            krate = Some(arg.clone());
+        }
+
+        if arg == "--crate-name" {
+            prev_dash_dash_krate_name = true;
+        } else {
+            prev_dash_dash_krate_name = false;
+        }
+    }
+
+    let files = files.into_iter().collect();
+    Message::FixFailed { files, krate }.post()?;
+
+    Ok(())
+}
+
+#[derive(Default)]
+struct FixArgs {
+    file: Option<PathBuf>,
+    prepare_for_edition: PrepareFor,
+    idioms: bool,
+    enabled_edition: Option<String>,
+    other: Vec<OsString>,
+    primary_package: bool,
+}
+
+enum PrepareFor {
+    Next,
+    Edition(String),
+    None,
+}
+
+impl Default for PrepareFor {
+    fn default() -> PrepareFor {
+        PrepareFor::None
+    }
+}
+
+impl FixArgs {
+    fn get() -> FixArgs {
+        let mut ret = FixArgs::default();
+        for arg in env::args_os().skip(1) {
+            let path = PathBuf::from(arg);
+            if path.extension().and_then(|s| s.to_str()) == Some("rs") {
+                if path.exists() {
+                    ret.file = Some(path);
+                    continue
+                }
+            }
+            if let Some(s) = path.to_str() {
+                let prefix = "--edition=";
+                if s.starts_with(prefix) {
+                    ret.enabled_edition = Some(s[prefix.len()..].to_string());
+                    continue
+                }
+            }
+            ret.other.push(path.into());
+        }
+        if let Ok(s) = env::var(PREPARE_FOR_ENV) {
+            ret.prepare_for_edition = PrepareFor::Edition(s);
+        } else if env::var(EDITION_ENV).is_ok() {
+            ret.prepare_for_edition = PrepareFor::Next;
+        }
+        ret.idioms = env::var(IDIOMS_ENV).is_ok();
+        ret.primary_package = env::var("CARGO_PRIMARY_PACKAGE").is_ok();
+        ret
+    }
+
+    fn apply(&self, cmd: &mut Command) {
+        if let Some(path) = &self.file {
+            cmd.arg(path);
+        }
+        cmd.args(&self.other)
+            .arg("--cap-lints=warn");
+        if let Some(edition) = &self.enabled_edition {
+            cmd.arg("--edition").arg(edition);
+            if self.idioms && self.primary_package {
+                if edition == "2018" { cmd.arg("-Wrust-2018-idioms"); }
+            }
+        }
+        if self.primary_package {
+            if let Some(edition) = self.prepare_for_edition_resolve() {
+                cmd.arg("-W").arg(format!("rust-{}-compatibility", edition));
+            }
+        }
+    }
+
+    /// Verify that we're not both preparing for an enabled edition and enabling
+    /// the edition.
+    ///
+    /// This indicates that `cargo fix --prepare-for` is being executed out of
+    /// order with enabling the edition itself, meaning that we wouldn't
+    /// actually be able to fix anything! If it looks like this is happening
+    /// then yield an error to the user, indicating that this is happening.
+    fn verify_not_preparing_for_enabled_edition(&self) -> CargoResult<()> {
+        let edition = match self.prepare_for_edition_resolve() {
+            Some(s) => s,
+            None => return Ok(()),
+        };
+        let enabled = match &self.enabled_edition {
+            Some(s) => s,
+            None => return Ok(()),
+        };
+        if edition != enabled {
+            return Ok(())
+        }
+        let path = match &self.file {
+            Some(s) => s,
+            None => return Ok(()),
+        };
+
+        Message::EditionAlreadyEnabled {
+            file: path.display().to_string(),
+            edition: edition.to_string(),
+        }.post()?;
+
+        process::exit(1);
+    }
+
+    fn prepare_for_edition_resolve(&self) -> Option<&str> {
+        match &self.prepare_for_edition {
+            PrepareFor::Edition(s) => Some(s),
+            PrepareFor::Next => Some(self.next_edition()),
+            PrepareFor::None => None,
+        }
+    }
+
+    fn next_edition(&self) -> &str {
+        match self.enabled_edition.as_ref().map(|s| &**s) {
+            // 2015 -> 2018,
+            None | Some("2015") => "2018",
+
+            // This'll probably be wrong in 2020, but that's future Cargo's
+            // problem. Eventually though we'll just add more editions here as
+            // necessary.
+            _ => "2018",
+        }
+    }
+}
diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs
new file mode 100644 (file)
index 0000000..5547a75
--- /dev/null
@@ -0,0 +1,171 @@
+use std::io::prelude::*;
+
+use toml;
+
+use core::resolver::WorkspaceResolve;
+use core::{resolver, Resolve, Workspace};
+use util::errors::{CargoResult, CargoResultExt};
+use util::toml as cargo_toml;
+use util::Filesystem;
+
+pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
+    if !ws.root().join("Cargo.lock").exists() {
+        return Ok(None);
+    }
+
+    let root = Filesystem::new(ws.root().to_path_buf());
+    let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?;
+
+    let mut s = String::new();
+    f.read_to_string(&mut s)
+        .chain_err(|| format!("failed to read file: {}", f.path().display()))?;
+
+    let resolve =
+        (|| -> CargoResult<Option<Resolve>> {
+            let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?;
+            let v: resolver::EncodableResolve = resolve.try_into()?;
+            Ok(Some(v.into_resolve(ws)?))
+        })().chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?;
+    Ok(resolve)
+}
+
+pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> {
+    // Load the original lockfile if it exists.
+    let ws_root = Filesystem::new(ws.root().to_path_buf());
+    let orig = ws_root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file");
+    let orig = orig.and_then(|mut f| {
+        let mut s = String::new();
+        f.read_to_string(&mut s)?;
+        Ok(s)
+    });
+
+    let toml = toml::Value::try_from(WorkspaceResolve { ws, resolve }).unwrap();
+
+    let mut out = String::new();
+
+    // Preserve the top comments in the lockfile
+    // This is in preparation for marking it as generated
+    // https://github.com/rust-lang/cargo/issues/6180
+    if let Ok(orig) = &orig {
+        for line in orig.lines().take_while(|line| line.starts_with("#")) {
+            out.push_str(line);
+            out.push_str("\n");
+        }
+    }
+
+    let deps = toml["package"].as_array().unwrap();
+    for dep in deps.iter() {
+        let dep = dep.as_table().unwrap();
+
+        out.push_str("[[package]]\n");
+        emit_package(dep, &mut out);
+    }
+
+    if let Some(patch) = toml.get("patch") {
+        let list = patch["unused"].as_array().unwrap();
+        for entry in list {
+            out.push_str("[[patch.unused]]\n");
+            emit_package(entry.as_table().unwrap(), &mut out);
+            out.push_str("\n");
+        }
+    }
+
+    if let Some(meta) = toml.get("metadata") {
+        out.push_str("[metadata]\n");
+        out.push_str(&meta.to_string());
+    }
+
+    // If the lockfile contents haven't changed so don't rewrite it. This is
+    // helpful on read-only filesystems.
+    if let Ok(orig) = orig {
+        if are_equal_lockfiles(orig, &out, ws) {
+            return Ok(());
+        }
+    }
+
+    if !ws.config().lock_update_allowed() {
+        if ws.config().cli_unstable().offline {
+            bail!("can't update in the offline mode");
+        }
+
+        let flag = if ws.config().network_allowed() {
+            "--locked"
+        } else {
+            "--frozen"
+        };
+        bail!(
+            "the lock file {} needs to be updated but {} was passed to \
+             prevent this",
+            ws.root().to_path_buf().join("Cargo.lock").display(),
+            flag
+        );
+    }
+
+    // Ok, if that didn't work just write it out
+    ws_root
+        .open_rw("Cargo.lock", ws.config(), "Cargo.lock file")
+        .and_then(|mut f| {
+            f.file().set_len(0)?;
+            f.write_all(out.as_bytes())?;
+            Ok(())
+        })
+        .chain_err(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?;
+    Ok(())
+}
+
+fn are_equal_lockfiles(mut orig: String, current: &str, ws: &Workspace) -> bool {
+    if has_crlf_line_endings(&orig) {
+        orig = orig.replace("\r\n", "\n");
+    }
+
+    // If we want to try and avoid updating the lockfile, parse both and
+    // compare them; since this is somewhat expensive, don't do it in the
+    // common case where we can update lockfiles.
+    if !ws.config().lock_update_allowed() {
+        let res: CargoResult<bool> = (|| {
+            let old: resolver::EncodableResolve = toml::from_str(&orig)?;
+            let new: resolver::EncodableResolve = toml::from_str(current)?;
+            Ok(old.into_resolve(ws)? == new.into_resolve(ws)?)
+        })();
+        if let Ok(true) = res {
+            return true;
+        }
+    }
+
+    current == orig
+}
+
+fn has_crlf_line_endings(s: &str) -> bool {
+    // Only check the first line.
+    if let Some(lf) = s.find('\n') {
+        s[..lf].ends_with('\r')
+    } else {
+        false
+    }
+}
+
+fn emit_package(dep: &toml::value::Table, out: &mut String) {
+    out.push_str(&format!("name = {}\n", &dep["name"]));
+    out.push_str(&format!("version = {}\n", &dep["version"]));
+
+    if dep.contains_key("source") {
+        out.push_str(&format!("source = {}\n", &dep["source"]));
+    }
+
+    if let Some(s) = dep.get("dependencies") {
+        let slice = s.as_array().unwrap();
+
+        if !slice.is_empty() {
+            out.push_str("dependencies = [\n");
+
+            for child in slice.iter() {
+                out.push_str(&format!(" {},\n", child));
+            }
+
+            out.push_str("]\n");
+        }
+        out.push_str("\n");
+    } else if dep.contains_key("replace") {
+        out.push_str(&format!("replace = {}\n\n", &dep["replace"]));
+    }
+}
diff --git a/src/cargo/ops/mod.rs b/src/cargo/ops/mod.rs
new file mode 100644 (file)
index 0000000..3b653b0
--- /dev/null
@@ -0,0 +1,43 @@
+pub use self::cargo_clean::{clean, CleanOptions};
+pub use self::cargo_compile::{compile, compile_with_exec, compile_ws, CompileOptions};
+pub use self::cargo_compile::{CompileFilter, FilterRule, Packages};
+pub use self::cargo_read_manifest::{read_package, read_packages};
+pub use self::cargo_run::run;
+pub use self::cargo_install::{install, install_list, uninstall};
+pub use self::cargo_new::{init, new, NewOptions, VersionControl};
+pub use self::cargo_doc::{doc, DocOptions};
+pub use self::cargo_generate_lockfile::generate_lockfile;
+pub use self::cargo_generate_lockfile::update_lockfile;
+pub use self::cargo_generate_lockfile::UpdateOptions;
+pub use self::lockfile::{load_pkg_lockfile, write_pkg_lockfile};
+pub use self::cargo_test::{run_benches, run_tests, TestOptions};
+pub use self::cargo_package::{package, PackageOpts};
+pub use self::registry::{publish, registry_configuration, RegistryConfig};
+pub use self::registry::{http_handle, needs_custom_http_transport, registry_login, search};
+pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts};
+pub use self::registry::{configure_http_handle, http_handle_and_timeout};
+pub use self::registry::HttpTimeout;
+pub use self::cargo_fetch::{fetch, FetchOptions};
+pub use self::cargo_pkgid::pkgid;
+pub use self::resolve::{add_overrides, get_resolved_packages, resolve_with_previous, resolve_ws,
+                        resolve_ws_precisely, resolve_ws_with_method};
+pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions};
+pub use self::fix::{fix, FixOptions, fix_maybe_exec_rustc};
+
+mod cargo_clean;
+mod cargo_compile;
+mod cargo_doc;
+mod cargo_fetch;
+mod cargo_generate_lockfile;
+mod cargo_install;
+mod cargo_new;
+mod cargo_output_metadata;
+mod cargo_package;
+mod cargo_pkgid;
+mod cargo_read_manifest;
+mod cargo_run;
+mod cargo_test;
+mod lockfile;
+mod registry;
+mod resolve;
+mod fix;
diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs
new file mode 100644 (file)
index 0000000..3932abd
--- /dev/null
@@ -0,0 +1,709 @@
+use std::collections::BTreeMap;
+use std::fs::{self, File};
+use std::iter::repeat;
+use std::str;
+use std::time::Duration;
+use std::{cmp, env};
+
+use log::Level;
+use curl::easy::{Easy, SslOpt, InfoType};
+use git2;
+use registry::{NewCrate, NewCrateDependency, Registry};
+
+use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
+
+use core::dependency::Kind;
+use core::manifest::ManifestMetadata;
+use core::source::Source;
+use core::{Package, SourceId, Workspace};
+use ops;
+use sources::{RegistrySource, SourceConfigMap};
+use util::config::{self, Config};
+use util::errors::{CargoResult, CargoResultExt};
+use util::important_paths::find_root_manifest_for_wd;
+use util::paths;
+use util::ToUrl;
+use version;
+
+pub struct RegistryConfig {
+    pub index: Option<String>,
+    pub token: Option<String>,
+}
+
+pub struct PublishOpts<'cfg> {
+    pub config: &'cfg Config,
+    pub token: Option<String>,
+    pub index: Option<String>,
+    pub verify: bool,
+    pub allow_dirty: bool,
+    pub jobs: Option<u32>,
+    pub target: Option<String>,
+    pub dry_run: bool,
+    pub registry: Option<String>,
+}
+
+pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
+    let pkg = ws.current()?;
+
+    if let Some(ref allowed_registries) = *pkg.publish() {
+        if !match opts.registry {
+            Some(ref registry) => allowed_registries.contains(registry),
+            None => false,
+        } {
+            bail!(
+                "some crates cannot be published.\n\
+                 `{}` is marked as unpublishable",
+                pkg.name()
+            );
+        }
+    }
+
+    if !pkg.manifest().patch().is_empty() {
+        bail!("published crates cannot contain [patch] sections");
+    }
+
+    let (mut registry, reg_id) = registry(
+        opts.config,
+        opts.token.clone(),
+        opts.index.clone(),
+        opts.registry.clone(),
+    )?;
+    verify_dependencies(pkg, &reg_id)?;
+
+    // Prepare a tarball, with a non-surpressable warning if metadata
+    // is missing since this is being put online.
+    let tarball = ops::package(
+        ws,
+        &ops::PackageOpts {
+            config: opts.config,
+            verify: opts.verify,
+            list: false,
+            check_metadata: true,
+            allow_dirty: opts.allow_dirty,
+            target: opts.target.clone(),
+            jobs: opts.jobs,
+            registry: opts.registry.clone(),
+        },
+    )?.unwrap();
+
+    // Upload said tarball to the specified destination
+    opts.config
+        .shell()
+        .status("Uploading", pkg.package_id().to_string())?;
+    transmit(
+        opts.config,
+        pkg,
+        tarball.file(),
+        &mut registry,
+        &reg_id,
+        opts.dry_run,
+    )?;
+
+    Ok(())
+}
+
+fn verify_dependencies(pkg: &Package, registry_src: &SourceId) -> CargoResult<()> {
+    for dep in pkg.dependencies().iter() {
+        if dep.source_id().is_path() {
+            if !dep.specified_req() {
+                bail!(
+                    "all path dependencies must have a version specified \
+                     when publishing.\ndependency `{}` does not specify \
+                     a version",
+                    dep.package_name()
+                )
+            }
+        } else if dep.source_id() != registry_src {
+            if dep.source_id().is_registry() {
+                // Block requests to send to a registry if it is not an alternative
+                // registry
+                if !registry_src.is_alt_registry() {
+                    bail!("crates cannot be published to crates.io with dependencies sourced from other\n\
+                           registries either publish `{}` on crates.io or pull it into this repository\n\
+                           and specify it with a path and version\n\
+                           (crate `{}` is pulled from {})",
+                          dep.package_name(),
+                          dep.package_name(),
+                          dep.source_id());
+                }
+            } else {
+                bail!(
+                    "crates cannot be published to crates.io with dependencies sourced from \
+                     a repository\neither publish `{}` as its own crate on crates.io and \
+                     specify a crates.io version as a dependency or pull it into this \
+                     repository and specify it with a path and version\n(crate `{}` has \
+                     repository path `{}`)",
+                    dep.package_name(),
+                    dep.package_name(),
+                    dep.source_id()
+                );
+            }
+        }
+    }
+    Ok(())
+}
+
+fn transmit(
+    config: &Config,
+    pkg: &Package,
+    tarball: &File,
+    registry: &mut Registry,
+    registry_id: &SourceId,
+    dry_run: bool,
+) -> CargoResult<()> {
+    let deps = pkg.dependencies()
+        .iter()
+        .map(|dep| {
+            // If the dependency is from a different registry, then include the
+            // registry in the dependency.
+            let dep_registry_id = match dep.registry_id() {
+                Some(id) => id,
+                None => bail!("dependency missing registry ID"),
+            };
+            let dep_registry = if dep_registry_id != registry_id {
+                Some(dep_registry_id.url().to_string())
+            } else {
+                None
+            };
+
+            Ok(NewCrateDependency {
+                optional: dep.is_optional(),
+                default_features: dep.uses_default_features(),
+                name: dep.package_name().to_string(),
+                features: dep.features().iter().map(|s| s.to_string()).collect(),
+                version_req: dep.version_req().to_string(),
+                target: dep.platform().map(|s| s.to_string()),
+                kind: match dep.kind() {
+                    Kind::Normal => "normal",
+                    Kind::Build => "build",
+                    Kind::Development => "dev",
+                }.to_string(),
+                registry: dep_registry,
+                explicit_name_in_toml: dep.explicit_name_in_toml().map(|s| s.to_string()),
+            })
+        })
+        .collect::<CargoResult<Vec<NewCrateDependency>>>()?;
+    let manifest = pkg.manifest();
+    let ManifestMetadata {
+        ref authors,
+        ref description,
+        ref homepage,
+        ref documentation,
+        ref keywords,
+        ref readme,
+        ref repository,
+        ref license,
+        ref license_file,
+        ref categories,
+        ref badges,
+        ref links,
+    } = *manifest.metadata();
+    let readme_content = match *readme {
+        Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?),
+        None => None,
+    };
+    if let Some(ref file) = *license_file {
+        if fs::metadata(&pkg.root().join(file)).is_err() {
+            bail!("the license file `{}` does not exist", file)
+        }
+    }
+
+    // Do not upload if performing a dry run
+    if dry_run {
+        config.shell().warn("aborting upload due to dry run")?;
+        return Ok(());
+    }
+
+    let summary = pkg.summary();
+    let string_features = summary
+        .features()
+        .iter()
+        .map(|(feat, values)| {
+            (
+                feat.to_string(),
+                values.iter().map(|fv| fv.to_string(&summary)).collect(),
+            )
+        })
+        .collect::<BTreeMap<String, Vec<String>>>();
+
+    let publish = registry.publish(
+        &NewCrate {
+            name: pkg.name().to_string(),
+            vers: pkg.version().to_string(),
+            deps,
+            features: string_features,
+            authors: authors.clone(),
+            description: description.clone(),
+            homepage: homepage.clone(),
+            documentation: documentation.clone(),
+            keywords: keywords.clone(),
+            categories: categories.clone(),
+            readme: readme_content,
+            readme_file: readme.clone(),
+            repository: repository.clone(),
+            license: license.clone(),
+            license_file: license_file.clone(),
+            badges: badges.clone(),
+            links: links.clone(),
+        },
+        tarball,
+    );
+
+    match publish {
+        Ok(warnings) => {
+            if !warnings.invalid_categories.is_empty() {
+                let msg = format!(
+                    "\
+                     the following are not valid category slugs and were \
+                     ignored: {}. Please see https://crates.io/category_slugs \
+                     for the list of all category slugs. \
+                     ",
+                    warnings.invalid_categories.join(", ")
+                );
+                config.shell().warn(&msg)?;
+            }
+
+            if !warnings.invalid_badges.is_empty() {
+                let msg = format!(
+                    "\
+                     the following are not valid badges and were ignored: {}. \
+                     Either the badge type specified is unknown or a required \
+                     attribute is missing. Please see \
+                     http://doc.crates.io/manifest.html#package-metadata \
+                     for valid badge types and their required attributes.",
+                    warnings.invalid_badges.join(", ")
+                );
+                config.shell().warn(&msg)?;
+            }
+
+            Ok(())
+        }
+        Err(e) => Err(e),
+    }
+}
+
+pub fn registry_configuration(
+    config: &Config,
+    registry: Option<String>,
+) -> CargoResult<RegistryConfig> {
+    let (index, token) = match registry {
+        Some(registry) => (
+            Some(config.get_registry_index(&registry)?.to_string()),
+            config
+                .get_string(&format!("registries.{}.token", registry))?
+                .map(|p| p.val),
+        ),
+        None => {
+            // Checking out for default index and token
+            (
+                config.get_string("registry.index")?.map(|p| p.val),
+                config.get_string("registry.token")?.map(|p| p.val),
+            )
+        }
+    };
+
+    Ok(RegistryConfig { index, token })
+}
+
+pub fn registry(
+    config: &Config,
+    token: Option<String>,
+    index: Option<String>,
+    registry: Option<String>,
+) -> CargoResult<(Registry, SourceId)> {
+    // Parse all configuration options
+    let RegistryConfig {
+        token: token_config,
+        index: index_config,
+    } = registry_configuration(config, registry.clone())?;
+    let token = token.or(token_config);
+    let sid = get_source_id(config, index_config.or(index), registry)?;
+    let api_host = {
+        let mut src = RegistrySource::remote(&sid, config);
+        src.update()
+            .chain_err(|| format!("failed to update {}", sid))?;
+        (src.config()?).unwrap().api.unwrap()
+    };
+    let handle = http_handle(config)?;
+    Ok((Registry::new_handle(api_host, token, handle), sid))
+}
+
+/// Create a new HTTP handle with appropriate global configuration for cargo.
+pub fn http_handle(config: &Config) -> CargoResult<Easy> {
+    let (mut handle, timeout) = http_handle_and_timeout(config)?;
+    timeout.configure(&mut handle)?;
+    Ok(handle)
+}
+
+pub fn http_handle_and_timeout(config: &Config) -> CargoResult<(Easy, HttpTimeout)> {
+    if config.frozen() {
+        bail!(
+            "attempting to make an HTTP request, but --frozen was \
+             specified"
+        )
+    }
+    if !config.network_allowed() {
+        bail!("can't make HTTP request in the offline mode")
+    }
+
+    // The timeout option for libcurl by default times out the entire transfer,
+    // but we probably don't want this. Instead we only set timeouts for the
+    // connect phase as well as a "low speed" timeout so if we don't receive
+    // many bytes in a large-ish period of time then we time out.
+    let mut handle = Easy::new();
+    let timeout = configure_http_handle(config, &mut handle)?;
+    Ok((handle, timeout))
+}
+
+pub fn needs_custom_http_transport(config: &Config) -> CargoResult<bool> {
+    let proxy_exists = http_proxy_exists(config)?;
+    let timeout = HttpTimeout::new(config)?.is_non_default();
+    let cainfo = config.get_path("http.cainfo")?;
+    let check_revoke = config.get_bool("http.check-revoke")?;
+    let user_agent = config.get_string("http.user-agent")?;
+
+    Ok(proxy_exists
+        || timeout
+        || cainfo.is_some()
+        || check_revoke.is_some()
+        || user_agent.is_some())
+}
+
+/// Configure a libcurl http handle with the defaults options for Cargo
+pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<HttpTimeout> {
+    if let Some(proxy) = http_proxy(config)? {
+        handle.proxy(&proxy)?;
+    }
+    if let Some(cainfo) = config.get_path("http.cainfo")? {
+        handle.cainfo(&cainfo.val)?;
+    }
+    if let Some(check) = config.get_bool("http.check-revoke")? {
+        handle.ssl_options(SslOpt::new().no_revoke(!check.val))?;
+    }
+    if let Some(user_agent) = config.get_string("http.user-agent")? {
+        handle.useragent(&user_agent.val)?;
+    } else {
+        handle.useragent(&version().to_string())?;
+    }
+
+    if let Some(true) = config.get::<Option<bool>>("http.debug")? {
+        handle.verbose(true)?;
+        handle.debug_function(|kind, data| {
+            let (prefix, level) = match kind {
+                InfoType::Text => ("*", Level::Debug),
+                InfoType::HeaderIn => ("<", Level::Debug),
+                InfoType::HeaderOut => (">", Level::Debug),
+                InfoType::DataIn => ("{", Level::Trace),
+                InfoType::DataOut => ("}", Level::Trace),
+                InfoType::SslDataIn |
+                InfoType::SslDataOut => return,
+                _ => return,
+            };
+            match str::from_utf8(data) {
+                Ok(s) => {
+                    for line in s.lines() {
+                        log!(level, "http-debug: {} {}", prefix, line);
+                    }
+                }
+                Err(_) => {
+                    log!(level, "http-debug: {} ({} bytes of data)", prefix, data.len());
+                }
+            }
+        })?;
+    }
+
+    HttpTimeout::new(config)
+}
+
+#[must_use]
+pub struct HttpTimeout {
+    pub dur: Duration,
+    pub low_speed_limit: u32,
+}
+
+impl HttpTimeout {
+    pub fn new(config: &Config) -> CargoResult<HttpTimeout> {
+        let low_speed_limit = config.get::<Option<u32>>("http.low-speed-limit")?
+            .unwrap_or(10);
+        let seconds = config.get::<Option<u64>>("http.timeout")?
+            .or_else(|| env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok()))
+            .unwrap_or(30);
+        Ok(HttpTimeout {
+            dur: Duration::new(seconds, 0),
+            low_speed_limit,
+        })
+    }
+
+    fn is_non_default(&self) -> bool {
+        self.dur != Duration::new(30, 0) || self.low_speed_limit != 10
+    }
+
+    pub fn configure(&self, handle: &mut Easy) -> CargoResult<()> {
+        // The timeout option for libcurl by default times out the entire
+        // transfer, but we probably don't want this. Instead we only set
+        // timeouts for the connect phase as well as a "low speed" timeout so
+        // if we don't receive many bytes in a large-ish period of time then we
+        // time out.
+        handle.connect_timeout(self.dur)?;
+        handle.low_speed_time(self.dur)?;
+        handle.low_speed_limit(self.low_speed_limit)?;
+        Ok(())
+    }
+}
+
+/// Find an explicit HTTP proxy if one is available.
+///
+/// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified
+/// via environment variables are picked up by libcurl.
+fn http_proxy(config: &Config) -> CargoResult<Option<String>> {
+    if let Some(s) = config.get_string("http.proxy")? {
+        return Ok(Some(s.val));
+    }
+    if let Ok(cfg) = git2::Config::open_default() {
+        if let Ok(s) = cfg.get_str("http.proxy") {
+            return Ok(Some(s.to_string()));
+        }
+    }
+    Ok(None)
+}
+
+/// Determine if an http proxy exists.
+///
+/// Checks the following for existence, in order:
+///
+/// * cargo's `http.proxy`
+/// * git's `http.proxy`
+/// * `http_proxy` env var
+/// * `HTTP_PROXY` env var
+/// * `https_proxy` env var
+/// * `HTTPS_PROXY` env var
+fn http_proxy_exists(config: &Config) -> CargoResult<bool> {
+    if http_proxy(config)?.is_some() {
+        Ok(true)
+    } else {
+        Ok(["http_proxy", "HTTP_PROXY", "https_proxy", "HTTPS_PROXY"]
+            .iter()
+            .any(|v| env::var(v).is_ok()))
+    }
+}
+
+pub fn registry_login(config: &Config, token: String, registry: Option<String>) -> CargoResult<()> {
+    let RegistryConfig {
+        token: old_token, ..
+    } = registry_configuration(config, registry.clone())?;
+
+    if let Some(old_token) = old_token {
+        if old_token == token {
+            return Ok(());
+        }
+    }
+
+    config::save_credentials(config, token, registry)
+}
+
+pub struct OwnersOptions {
+    pub krate: Option<String>,
+    pub token: Option<String>,
+    pub index: Option<String>,
+    pub to_add: Option<Vec<String>>,
+    pub to_remove: Option<Vec<String>>,
+    pub list: bool,
+    pub registry: Option<String>,
+}
+
+pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
+    let name = match opts.krate {
+        Some(ref name) => name.clone(),
+        None => {
+            let manifest_path = find_root_manifest_for_wd(config.cwd())?;
+            let ws = Workspace::new(&manifest_path, config)?;
+            ws.current()?.package_id().name().to_string()
+        }
+    };
+
+    let (mut registry, _) = registry(
+        config,
+        opts.token.clone(),
+        opts.index.clone(),
+        opts.registry.clone(),
+    )?;
+
+    if let Some(ref v) = opts.to_add {
+        let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
+        let msg = registry
+            .add_owners(&name, &v)
+            .map_err(|e| format_err!("failed to invite owners to crate {}: {}", name, e))?;
+
+        config.shell().status("Owner", msg)?;
+    }
+
+    if let Some(ref v) = opts.to_remove {
+        let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
+        config
+            .shell()
+            .status("Owner", format!("removing {:?} from crate {}", v, name))?;
+        registry
+            .remove_owners(&name, &v)
+            .chain_err(|| format!("failed to remove owners from crate {}", name))?;
+    }
+
+    if opts.list {
+        let owners = registry
+            .list_owners(&name)
+            .chain_err(|| format!("failed to list owners of crate {}", name))?;
+        for owner in owners.iter() {
+            print!("{}", owner.login);
+            match (owner.name.as_ref(), owner.email.as_ref()) {
+                (Some(name), Some(email)) => println!(" ({} <{}>)", name, email),
+                (Some(s), None) | (None, Some(s)) => println!(" ({})", s),
+                (None, None) => println!(),
+            }
+        }
+    }
+
+    Ok(())
+}
+
+pub fn yank(
+    config: &Config,
+    krate: Option<String>,
+    version: Option<String>,
+    token: Option<String>,
+    index: Option<String>,
+    undo: bool,
+    reg: Option<String>,
+) -> CargoResult<()> {
+    let name = match krate {
+        Some(name) => name,
+        None => {
+            let manifest_path = find_root_manifest_for_wd(config.cwd())?;
+            let ws = Workspace::new(&manifest_path, config)?;
+            ws.current()?.package_id().name().to_string()
+        }
+    };
+    let version = match version {
+        Some(v) => v,
+        None => bail!("a version must be specified to yank"),
+    };
+
+    let (mut registry, _) = registry(config, token, index, reg)?;
+
+    if undo {
+        config
+            .shell()
+            .status("Unyank", format!("{}:{}", name, version))?;
+        registry
+            .unyank(&name, &version)
+            .chain_err(|| "failed to undo a yank")?;
+    } else {
+        config
+            .shell()
+            .status("Yank", format!("{}:{}", name, version))?;
+        registry
+            .yank(&name, &version)
+            .chain_err(|| "failed to yank")?;
+    }
+
+    Ok(())
+}
+
+fn get_source_id(
+    config: &Config,
+    index: Option<String>,
+    reg: Option<String>,
+) -> CargoResult<SourceId> {
+    match (reg, index) {
+        (Some(r), _) => SourceId::alt_registry(config, &r),
+        (_, Some(i)) => SourceId::for_registry(&i.to_url()?),
+        _ => {
+            let map = SourceConfigMap::new(config)?;
+            let src = map.load(&SourceId::crates_io(config)?)?;
+            Ok(src.replaced_source_id().clone())
+        }
+    }
+}
+
+pub fn search(
+    query: &str,
+    config: &Config,
+    index: Option<String>,
+    limit: u32,
+    reg: Option<String>,
+) -> CargoResult<()> {
+    fn truncate_with_ellipsis(s: &str, max_width: usize) -> String {
+        // We should truncate at grapheme-boundary and compute character-widths,
+        // yet the dependencies on unicode-segmentation and unicode-width are
+        // not worth it.
+        let mut chars = s.chars();
+        let mut prefix = (&mut chars).take(max_width - 1).collect::<String>();
+        if chars.next().is_some() {
+            prefix.push('…');
+        }
+        prefix
+    }
+
+    let sid = get_source_id(config, index, reg)?;
+
+    let mut regsrc = RegistrySource::remote(&sid, config);
+    let cfg = match regsrc.config() {
+        Ok(c) => c,
+        Err(_) => {
+            regsrc
+                .update()
+                .chain_err(|| format!("failed to update {}", &sid))?;
+            regsrc.config()?
+        }
+    };
+
+    let api_host = cfg.unwrap().api.unwrap();
+    let handle = http_handle(config)?;
+    let mut registry = Registry::new_handle(api_host, None, handle);
+    let (crates, total_crates) = registry
+        .search(query, limit)
+        .chain_err(|| "failed to retrieve search results from the registry")?;
+
+    let names = crates
+        .iter()
+        .map(|krate| format!("{} = \"{}\"", krate.name, krate.max_version))
+        .collect::<Vec<String>>();
+
+    let description_margin = names.iter().map(|s| s.len() + 4).max().unwrap_or_default();
+
+    let description_length = cmp::max(80, 128 - description_margin);
+
+    let descriptions = crates.iter().map(|krate| {
+        krate
+            .description
+            .as_ref()
+            .map(|desc| truncate_with_ellipsis(&desc.replace("\n", " "), description_length))
+    });
+
+    for (name, description) in names.into_iter().zip(descriptions) {
+        let line = match description {
+            Some(desc) => {
+                let space = repeat(' ')
+                    .take(description_margin - name.len())
+                    .collect::<String>();
+                name + &space + "# " + &desc
+            }
+            None => name,
+        };
+        println!("{}", line);
+    }
+
+    let search_max_limit = 100;
+    if total_crates > limit && limit < search_max_limit {
+        println!(
+            "... and {} crates more (use --limit N to see more)",
+            total_crates - limit
+        );
+    } else if total_crates > limit && limit >= search_max_limit {
+        println!(
+            "... and {} crates more (go to http://crates.io/search?q={} to see more)",
+            total_crates - limit,
+            percent_encode(query.as_bytes(), QUERY_ENCODE_SET)
+        );
+    }
+
+    Ok(())
+}
diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs
new file mode 100644 (file)
index 0000000..da01a77
--- /dev/null
@@ -0,0 +1,570 @@
+use std::collections::HashSet;
+
+use core::registry::PackageRegistry;
+use core::resolver::{self, Method, Resolve};
+use core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace};
+use ops;
+use sources::PathSource;
+use util::errors::{CargoResult, CargoResultExt};
+use util::profile;
+
+/// Resolve all dependencies for the workspace using the previous
+/// lockfile as a guide if present.
+///
+/// This function will also write the result of resolution as a new
+/// lockfile.
+pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> {
+    let mut registry = PackageRegistry::new(ws.config())?;
+    let resolve = resolve_with_registry(ws, &mut registry, true)?;
+    let packages = get_resolved_packages(&resolve, registry)?;
+    Ok((packages, resolve))
+}
+
+/// Resolves dependencies for some packages of the workspace,
+/// taking into account `paths` overrides and activated features.
+pub fn resolve_ws_precisely<'a>(
+    ws: &Workspace<'a>,
+    source: Option<Box<Source + 'a>>,
+    features: &[String],
+    all_features: bool,
+    no_default_features: bool,
+    specs: &[PackageIdSpec],
+) -> CargoResult<(PackageSet<'a>, Resolve)> {
+    let features = Method::split_features(features);
+    let method = if all_features {
+        Method::Everything
+    } else {
+        Method::Required {
+            dev_deps: true,
+            features: &features,
+            all_features: false,
+            uses_default_features: !no_default_features,
+        }
+    };
+    resolve_ws_with_method(ws, source, method, specs)
+}
+
+pub fn resolve_ws_with_method<'a>(
+    ws: &Workspace<'a>,
+    source: Option<Box<Source + 'a>>,
+    method: Method,
+    specs: &[PackageIdSpec],
+) -> CargoResult<(PackageSet<'a>, Resolve)> {
+    let mut registry = PackageRegistry::new(ws.config())?;
+    if let Some(source) = source {
+        registry.add_preloaded(source);
+    }
+    let mut add_patches = true;
+
+    let resolve = if ws.require_optional_deps() {
+        // First, resolve the root_package's *listed* dependencies, as well as
+        // downloading and updating all remotes and such.
+        let resolve = resolve_with_registry(ws, &mut registry, false)?;
+        add_patches = false;
+
+        // Second, resolve with precisely what we're doing. Filter out
+        // transitive dependencies if necessary, specify features, handle
+        // overrides, etc.
+        let _p = profile::start("resolving w/ overrides...");
+
+        add_overrides(&mut registry, ws)?;
+
+        for &(ref replace_spec, ref dep) in ws.root_replace() {
+            if !resolve
+                .iter()
+                .any(|r| replace_spec.matches(r) && !dep.matches_id(r))
+            {
+                ws.config()
+                    .shell()
+                    .warn(format!("package replacement is not used: {}", replace_spec))?
+            }
+        }
+
+        Some(resolve)
+    } else {
+        ops::load_pkg_lockfile(ws)?
+    };
+
+    let resolved_with_overrides = ops::resolve_with_previous(
+        &mut registry,
+        ws,
+        method,
+        resolve.as_ref(),
+        None,
+        specs,
+        add_patches,
+        true,
+    )?;
+
+    let packages = get_resolved_packages(&resolved_with_overrides, registry)?;
+
+    Ok((packages, resolved_with_overrides))
+}
+
+fn resolve_with_registry<'cfg>(
+    ws: &Workspace<'cfg>,
+    registry: &mut PackageRegistry<'cfg>,
+    warn: bool,
+) -> CargoResult<Resolve> {
+    let prev = ops::load_pkg_lockfile(ws)?;
+    let resolve = resolve_with_previous(
+        registry,
+        ws,
+        Method::Everything,
+        prev.as_ref(),
+        None,
+        &[],
+        true,
+        warn,
+    )?;
+
+    if !ws.is_ephemeral() {
+        ops::write_pkg_lockfile(ws, &resolve)?;
+    }
+    Ok(resolve)
+}
+
+/// Resolve all dependencies for a package using an optional previous instance
+/// of resolve to guide the resolution process.
+///
+/// This also takes an optional hash set, `to_avoid`, which is a list of package
+/// ids that should be avoided when consulting the previous instance of resolve
+/// (often used in pairings with updates).
+///
+/// The previous resolve normally comes from a lockfile. This function does not
+/// read or write lockfiles from the filesystem.
+pub fn resolve_with_previous<'a, 'cfg>(
+    registry: &mut PackageRegistry<'cfg>,
+    ws: &Workspace<'cfg>,
+    method: Method,
+    previous: Option<&'a Resolve>,
+    to_avoid: Option<&HashSet<&'a PackageId>>,
+    specs: &[PackageIdSpec],
+    register_patches: bool,
+    warn: bool,
+) -> CargoResult<Resolve> {
+    // Here we place an artificial limitation that all non-registry sources
+    // cannot be locked at more than one revision. This means that if a git
+    // repository provides more than one package, they must all be updated in
+    // step when any of them are updated.
+    //
+    // TODO: This seems like a hokey reason to single out the registry as being
+    //       different
+    let mut to_avoid_sources = HashSet::new();
+    if let Some(to_avoid) = to_avoid {
+        to_avoid_sources.extend(
+            to_avoid
+                .iter()
+                .map(|p| p.source_id())
+                .filter(|s| !s.is_registry()),
+        );
+    }
+
+    let keep = |p: &&'a PackageId| {
+        !to_avoid_sources.contains(&p.source_id()) && match to_avoid {
+            Some(set) => !set.contains(p),
+            None => true,
+        }
+    };
+
+    // In the case where a previous instance of resolve is available, we
+    // want to lock as many packages as possible to the previous version
+    // without disturbing the graph structure.
+    let mut try_to_use = HashSet::new();
+    if let Some(r) = previous {
+        trace!("previous: {:?}", r);
+        register_previous_locks(ws, registry, r, &keep);
+
+        // Everything in the previous lock file we want to keep is prioritized
+        // in dependency selection if it comes up, aka we want to have
+        // conservative updates.
+        try_to_use.extend(r.iter().filter(keep).inspect(|id| {
+            debug!("attempting to prefer {}", id);
+        }));
+    }
+
+    if register_patches {
+        for (url, patches) in ws.root_patch() {
+            let previous = match previous {
+                Some(r) => r,
+                None => {
+                    registry.patch(url, patches)?;
+                    continue;
+                }
+            };
+            let patches = patches
+                .iter()
+                .map(|dep| {
+                    let unused = previous.unused_patches();
+                    let candidates = previous.iter().chain(unused);
+                    match candidates.filter(keep).find(|id| dep.matches_id(id)) {
+                        Some(id) => {
+                            let mut dep = dep.clone();
+                            dep.lock_to(id);
+                            dep
+                        }
+                        None => dep.clone(),
+                    }
+                })
+                .collect::<Vec<_>>();
+            registry.patch(url, &patches)?;
+        }
+
+        registry.lock_patches();
+    }
+
+    for member in ws.members() {
+        registry.add_sources(&[member.package_id().source_id().clone()])?;
+    }
+
+    let mut summaries = Vec::new();
+    if ws.config().cli_unstable().package_features {
+        let mut members = Vec::new();
+        match method {
+            Method::Everything => members.extend(ws.members()),
+            Method::Required {
+                features,
+                all_features,
+                uses_default_features,
+                ..
+            } => {
+                if specs.len() > 1 && !features.is_empty() {
+                    bail!("cannot specify features for more than one package");
+                }
+                members.extend(
+                    ws.members()
+                        .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))),
+                );
+                // Edge case: running `cargo build -p foo`, where `foo` is not a member
+                // of current workspace. Add all packages from workspace to get `foo`
+                // into the resolution graph.
+                if members.is_empty() {
+                    if !(features.is_empty() && !all_features && uses_default_features) {
+                        bail!("cannot specify features for packages outside of workspace");
+                    }
+                    members.extend(ws.members());
+                }
+            }
+        }
+        for member in members {
+            let summary = registry.lock(member.summary().clone());
+            summaries.push((summary, method))
+        }
+    } else {
+        for member in ws.members() {
+            let method_to_resolve = match method {
+                // When everything for a workspace we want to be sure to resolve all
+                // members in the workspace, so propagate the `Method::Everything`.
+                Method::Everything => Method::Everything,
+
+                // If we're not resolving everything though then we're constructing the
+                // exact crate graph we're going to build. Here we don't necessarily
+                // want to keep around all workspace crates as they may not all be
+                // built/tested.
+                //
+                // Additionally, the `method` specified represents command line
+                // flags, which really only matters for the current package
+                // (determined by the cwd). If other packages are specified (via
+                // `-p`) then the command line flags like features don't apply to
+                // them.
+                //
+                // As a result, if this `member` is the current member of the
+                // workspace, then we use `method` specified. Otherwise we use a
+                // base method with no features specified but using default features
+                // for any other packages specified with `-p`.
+                Method::Required {
+                    dev_deps,
+                    all_features,
+                    ..
+                } => {
+                    let base = Method::Required {
+                        dev_deps,
+                        features: &[],
+                        all_features,
+                        uses_default_features: true,
+                    };
+                    let member_id = member.package_id();
+                    match ws.current_opt() {
+                        Some(current) if member_id == current.package_id() => method,
+                        _ => {
+                            if specs.iter().any(|spec| spec.matches(member_id)) {
+                                base
+                            } else {
+                                continue;
+                            }
+                        }
+                    }
+                }
+            };
+
+            let summary = registry.lock(member.summary().clone());
+            summaries.push((summary, method_to_resolve));
+        }
+    };
+
+    let root_replace = ws.root_replace();
+
+    let replace = match previous {
+        Some(r) => root_replace
+            .iter()
+            .map(|&(ref spec, ref dep)| {
+                for (key, val) in r.replacements().iter() {
+                    if spec.matches(key) && dep.matches_id(val) && keep(&val) {
+                        let mut dep = dep.clone();
+                        dep.lock_to(val);
+                        return (spec.clone(), dep);
+                    }
+                }
+                (spec.clone(), dep.clone())
+            })
+            .collect::<Vec<_>>(),
+        None => root_replace.to_vec(),
+    };
+
+    ws.preload(registry);
+    let mut resolved = resolver::resolve(
+        &summaries,
+        &replace,
+        registry,
+        &try_to_use,
+        Some(ws.config()),
+        warn,
+    )?;
+    resolved.register_used_patches(registry.patches());
+    if let Some(previous) = previous {
+        resolved.merge_from(previous)?;
+    }
+    Ok(resolved)
+}
+
+/// Read the `paths` configuration variable to discover all path overrides that
+/// have been configured.
+pub fn add_overrides<'a>(
+    registry: &mut PackageRegistry<'a>,
+    ws: &Workspace<'a>,
+) -> CargoResult<()> {
+    let paths = match ws.config().get_list("paths")? {
+        Some(list) => list,
+        None => return Ok(()),
+    };
+
+    let paths = paths.val.iter().map(|&(ref s, ref p)| {
+        // The path listed next to the string is the config file in which the
+        // key was located, so we want to pop off the `.cargo/config` component
+        // to get the directory containing the `.cargo` folder.
+        (p.parent().unwrap().parent().unwrap().join(s), p)
+    });
+
+    for (path, definition) in paths {
+        let id = SourceId::for_path(&path)?;
+        let mut source = PathSource::new_recursive(&path, &id, ws.config());
+        source.update().chain_err(|| {
+            format!(
+                "failed to update path override `{}` \
+                 (defined in `{}`)",
+                path.display(),
+                definition.display()
+            )
+        })?;
+        registry.add_override(Box::new(source));
+    }
+    Ok(())
+}
+
+pub fn get_resolved_packages<'a>(
+    resolve: &Resolve,
+    registry: PackageRegistry<'a>,
+) -> CargoResult<PackageSet<'a>> {
+    let ids: Vec<PackageId> = resolve.iter().cloned().collect();
+    registry.get(&ids)
+}
+
+/// In this function we're responsible for informing the `registry` of all
+/// locked dependencies from the previous lock file we had, `resolve`.
+///
+/// This gets particularly tricky for a couple of reasons. The first is that we
+/// want all updates to be conservative, so we actually want to take the
+/// `resolve` into account (and avoid unnecessary registry updates and such).
+/// the second, however, is that we want to be resilient to updates of
+/// manifests. For example if a dependency is added or a version is changed we
+/// want to make sure that we properly re-resolve (conservatively) instead of
+/// providing an opaque error.
+///
+/// The logic here is somewhat subtle but there should be more comments below to
+/// help out, and otherwise feel free to ask on IRC if there's questions!
+///
+/// Note that this function, at the time of this writing, is basically the
+/// entire fix for #4127
+fn register_previous_locks<'a>(
+    ws: &Workspace,
+    registry: &mut PackageRegistry,
+    resolve: &'a Resolve,
+    keep: &Fn(&&'a PackageId) -> bool,
+) {
+    let path_pkg = |id: &SourceId| {
+        if !id.is_path() {
+            return None;
+        }
+        if let Ok(path) = id.url().to_file_path() {
+            if let Ok(pkg) = ws.load(&path.join("Cargo.toml")) {
+                return Some(pkg);
+            }
+        }
+        None
+    };
+
+    // Ok so we've been passed in a `keep` function which basically says "if I
+    // return true then this package wasn't listed for an update on the command
+    // line". AKA if we run `cargo update -p foo` then `keep(bar)` will return
+    // `true`, whereas `keep(foo)` will return `true` (roughly).
+    //
+    // This isn't actually quite what we want, however. Instead we want to
+    // further refine this `keep` function with *all transitive dependencies* of
+    // the packages we're not keeping. For example consider a case like this:
+    //
+    // * There's a crate `log`
+    // * There's a crate `serde` which depends on `log`
+    //
+    // Let's say we then run `cargo update -p serde`. This may *also* want to
+    // update the `log` dependency as our newer version of `serde` may have a
+    // new minimum version required for `log`. Now this isn't always guaranteed
+    // to work. What'll happen here is we *won't* lock the `log` dependency nor
+    // the `log` crate itself, but we will inform the registry "please prefer
+    // this version of `log`". That way if our newer version of serde works with
+    // the older version of `log`, we conservatively won't update `log`. If,
+    // however, nothing else in the dependency graph depends on `log` and the
+    // newer version of `serde` requires a new version of `log` it'll get pulled
+    // in (as we didn't accidentally lock it to an old version).
+    //
+    // Additionally here we process all path dependencies listed in the previous
+    // resolve. They can not only have their dependencies change but also
+    // the versions of the package change as well. If this ends up happening
+    // then we want to make sure we don't lock a package id node that doesn't
+    // actually exist. Note that we don't do transitive visits of all the
+    // package's dependencies here as that'll be covered below to poison those
+    // if they changed.
+    let mut avoid_locking = HashSet::new();
+    for node in resolve.iter() {
+        if !keep(&node) {
+            add_deps(resolve, node, &mut avoid_locking);
+        } else if let Some(pkg) = path_pkg(node.source_id()) {
+            if pkg.package_id() != node {
+                avoid_locking.insert(node);
+            }
+        }
+    }
+
+    // Ok but the above loop isn't the entire story! Updates to the dependency
+    // graph can come from two locations, the `cargo update` command or
+    // manifests themselves. For example a manifest on the filesystem may
+    // have been updated to have an updated version requirement on `serde`. In
+    // this case both `keep(serde)` and `keep(log)` return `true` (the `keep`
+    // that's an argument to this function). We, however, don't want to keep
+    // either of those! Otherwise we'll get obscure resolve errors about locked
+    // versions.
+    //
+    // To solve this problem we iterate over all packages with path sources
+    // (aka ones with manifests that are changing) and take a look at all of
+    // their dependencies. If any dependency does not match something in the
+    // previous lock file, then we're guaranteed that the main resolver will
+    // update the source of this dependency no matter what. Knowing this we
+    // poison all packages from the same source, forcing them all to get
+    // updated.
+    //
+    // This may seem like a heavy hammer, and it is! It means that if you change
+    // anything from crates.io then all of crates.io becomes unlocked. Note,
+    // however, that we still want conservative updates. This currently happens
+    // because the first candidate the resolver picks is the previously locked
+    // version, and only if that fails to activate to we move on and try
+    // a different version. (giving the guise of conservative updates)
+    //
+    // For example let's say we had `serde = "0.1"` written in our lock file.
+    // When we later edit this to `serde = "0.1.3"` we don't want to lock serde
+    // at its old version, 0.1.1. Instead we want to allow it to update to
+    // `0.1.3` and update its own dependencies (like above). To do this *all
+    // crates from crates.io* are not locked (aka added to `avoid_locking`).
+    // For dependencies like `log` their previous version in the lock file will
+    // come up first before newer version, if newer version are available.
+    let mut path_deps = ws.members().cloned().collect::<Vec<_>>();
+    let mut visited = HashSet::new();
+    while let Some(member) = path_deps.pop() {
+        if !visited.insert(member.package_id().clone()) {
+            continue;
+        }
+        for dep in member.dependencies() {
+            // If this dependency didn't match anything special then we may want
+            // to poison the source as it may have been added. If this path
+            // dependencies is *not* a workspace member, however, and it's an
+            // optional/non-transitive dependency then it won't be necessarily
+            // be in our lock file. If this shows up then we avoid poisoning
+            // this source as otherwise we'd repeatedly update the registry.
+            //
+            // TODO: this breaks adding an optional dependency in a
+            //       non-workspace member and then simultaneously editing the
+            //       dependency on that crate to enable the feature. For now
+            //       this bug is better than the always updating registry
+            //       though...
+            if !ws
+                .members()
+                .any(|pkg| pkg.package_id() == member.package_id())
+                && (dep.is_optional() || !dep.is_transitive())
+            {
+                continue;
+            }
+
+            // If this is a path dependency then try to push it onto our
+            // worklist
+            if let Some(pkg) = path_pkg(dep.source_id()) {
+                path_deps.push(pkg);
+                continue;
+            }
+
+            // If we match *anything* in the dependency graph then we consider
+            // ourselves A-OK and assume that we'll resolve to that.
+            if resolve.iter().any(|id| dep.matches_ignoring_source(id)) {
+                continue;
+            }
+
+            // Ok if nothing matches, then we poison the source of this
+            // dependencies and the previous lock file.
+            debug!(
+                "poisoning {} because {} looks like it changed {}",
+                dep.source_id(),
+                member.package_id(),
+                dep.package_name()
+            );
+            for id in resolve
+                .iter()
+                .filter(|id| id.source_id() == dep.source_id())
+            {
+                add_deps(resolve, id, &mut avoid_locking);
+            }
+        }
+    }
+
+    // Alright now that we've got our new, fresh, shiny, and refined `keep`
+    // function let's put it to action. Take a look at the previous lockfile,
+    // filter everything by this callback, and then shove everything else into
+    // the registry as a locked dependency.
+    let keep = |id: &&'a PackageId| keep(id) && !avoid_locking.contains(id);
+
+    for node in resolve.iter().filter(keep) {
+        let deps = resolve
+            .deps_not_replaced(node)
+            .filter(keep)
+            .cloned()
+            .collect();
+        registry.register_lock(node.clone(), deps);
+    }
+
+    /// recursively add `node` and all its transitive dependencies to `set`
+    fn add_deps<'a>(resolve: &'a Resolve, node: &'a PackageId, set: &mut HashSet<&'a PackageId>) {
+        if !set.insert(node) {
+            return;
+        }
+        debug!("ignoring any lock pointing directly at {}", node);
+        for dep in resolve.deps_not_replaced(node) {
+            add_deps(resolve, dep, set);
+        }
+    }
+}
diff --git a/src/cargo/sources/config.rs b/src/cargo/sources/config.rs
new file mode 100644 (file)
index 0000000..ca52bc4
--- /dev/null
@@ -0,0 +1,244 @@
+//! Implementation of configuration for various sources
+//!
+//! This module will parse the various `source.*` TOML configuration keys into a
+//! structure usable by Cargo itself. Currently this is primarily used to map
+//! sources to one another via the `replace-with` key in `.cargo/config`.
+
+use std::collections::HashMap;
+use std::path::{Path, PathBuf};
+
+use url::Url;
+
+use core::{GitReference, Source, SourceId};
+use sources::{ReplacedSource, CRATES_IO_REGISTRY};
+use util::{Config, ToUrl};
+use util::config::ConfigValue;
+use util::errors::{CargoResult, CargoResultExt};
+
+#[derive(Clone)]
+pub struct SourceConfigMap<'cfg> {
+    cfgs: HashMap<String, SourceConfig>,
+    id2name: HashMap<SourceId, String>,
+    config: &'cfg Config,
+}
+
+/// Configuration for a particular source, found in TOML looking like:
+///
+/// ```toml
+/// [source.crates-io]
+/// registry = 'https://github.com/rust-lang/crates.io-index'
+/// replace-with = 'foo'    # optional
+/// ```
+#[derive(Clone)]
+struct SourceConfig {
+    // id this source corresponds to, inferred from the various defined keys in
+    // the configuration
+    id: SourceId,
+
+    // Name of the source that this source should be replaced with. This field
+    // is a tuple of (name, path) where path is where this configuration key was
+    // defined (the literal `.cargo/config` file).
+    replace_with: Option<(String, PathBuf)>,
+}
+
+impl<'cfg> SourceConfigMap<'cfg> {
+    pub fn new(config: &'cfg Config) -> CargoResult<SourceConfigMap<'cfg>> {
+        let mut base = SourceConfigMap::empty(config)?;
+        if let Some(table) = config.get_table("source")? {
+            for (key, value) in table.val.iter() {
+                base.add_config(key, value)?;
+            }
+        }
+        Ok(base)
+    }
+
+    pub fn empty(config: &'cfg Config) -> CargoResult<SourceConfigMap<'cfg>> {
+        let mut base = SourceConfigMap {
+            cfgs: HashMap::new(),
+            id2name: HashMap::new(),
+            config,
+        };
+        base.add(
+            CRATES_IO_REGISTRY,
+            SourceConfig {
+                id: SourceId::crates_io(config)?,
+                replace_with: None,
+            },
+        );
+        Ok(base)
+    }
+
+    pub fn config(&self) -> &'cfg Config {
+        self.config
+    }
+
+    pub fn load(&self, id: &SourceId) -> CargoResult<Box<Source + 'cfg>> {
+        debug!("loading: {}", id);
+        let mut name = match self.id2name.get(id) {
+            Some(name) => name,
+            None => return Ok(id.load(self.config)?),
+        };
+        let mut path = Path::new("/");
+        let orig_name = name;
+        let new_id;
+        loop {
+            let cfg = match self.cfgs.get(name) {
+                Some(cfg) => cfg,
+                None => bail!(
+                    "could not find a configured source with the \
+                     name `{}` when attempting to lookup `{}` \
+                     (configuration in `{}`)",
+                    name,
+                    orig_name,
+                    path.display()
+                ),
+            };
+            match cfg.replace_with {
+                Some((ref s, ref p)) => {
+                    name = s;
+                    path = p;
+                }
+                None if *id == cfg.id => return Ok(id.load(self.config)?),
+                None => {
+                    new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string()));
+                    break;
+                }
+            }
+            debug!("following pointer to {}", name);
+            if name == orig_name {
+                bail!(
+                    "detected a cycle of `replace-with` sources, the source \
+                     `{}` is eventually replaced with itself \
+                     (configuration in `{}`)",
+                    name,
+                    path.display()
+                )
+            }
+        }
+        let new_src = new_id.load(self.config)?;
+        let old_src = id.load(self.config)?;
+        if !new_src.supports_checksums() && old_src.supports_checksums() {
+            bail!(
+                "\
+cannot replace `{orig}` with `{name}`, the source `{orig}` supports \
+checksums, but `{name}` does not
+
+a lock file compatible with `{orig}` cannot be generated in this situation
+",
+                orig = orig_name,
+                name = name
+            );
+        }
+
+        if old_src.requires_precise() && id.precise().is_none() {
+            bail!(
+                "\
+the source {orig} requires a lock file to be present first before it can be
+used against vendored source code
+
+remove the source replacement configuration, generate a lock file, and then
+restore the source replacement configuration to continue the build
+",
+                orig = orig_name
+            );
+        }
+
+        Ok(Box::new(ReplacedSource::new(id, &new_id, new_src)))
+    }
+
+    fn add(&mut self, name: &str, cfg: SourceConfig) {
+        self.id2name.insert(cfg.id.clone(), name.to_string());
+        self.cfgs.insert(name.to_string(), cfg);
+    }
+
+    fn add_config(&mut self, name: &str, cfg: &ConfigValue) -> CargoResult<()> {
+        let (table, _path) = cfg.table(&format!("source.{}", name))?;
+        let mut srcs = Vec::new();
+        if let Some(val) = table.get("registry") {
+            let url = url(val, &format!("source.{}.registry", name))?;
+            srcs.push(SourceId::for_registry(&url)?);
+        }
+        if let Some(val) = table.get("local-registry") {
+            let (s, path) = val.string(&format!("source.{}.local-registry", name))?;
+            let mut path = path.to_path_buf();
+            path.pop();
+            path.pop();
+            path.push(s);
+            srcs.push(SourceId::for_local_registry(&path)?);
+        }
+        if let Some(val) = table.get("directory") {
+            let (s, path) = val.string(&format!("source.{}.directory", name))?;
+            let mut path = path.to_path_buf();
+            path.pop();
+            path.pop();
+            path.push(s);
+            srcs.push(SourceId::for_directory(&path)?);
+        }
+        if let Some(val) = table.get("git") {
+            let url = url(val, &format!("source.{}.git", name))?;
+            let try = |s: &str| {
+                let val = match table.get(s) {
+                    Some(s) => s,
+                    None => return Ok(None),
+                };
+                let key = format!("source.{}.{}", name, s);
+                val.string(&key).map(Some)
+            };
+            let reference = match try("branch")? {
+                Some(b) => GitReference::Branch(b.0.to_string()),
+                None => match try("tag")? {
+                    Some(b) => GitReference::Tag(b.0.to_string()),
+                    None => match try("rev")? {
+                        Some(b) => GitReference::Rev(b.0.to_string()),
+                        None => GitReference::Branch("master".to_string()),
+                    },
+                },
+            };
+            srcs.push(SourceId::for_git(&url, reference)?);
+        }
+        if name == "crates-io" && srcs.is_empty() {
+            srcs.push(SourceId::crates_io(self.config)?);
+        }
+
+        let mut srcs = srcs.into_iter();
+        let src = srcs.next().ok_or_else(|| {
+            format_err!(
+                "no source URL specified for `source.{}`, need \
+                 either `registry` or `local-registry` defined",
+                name
+            )
+        })?;
+        if srcs.next().is_some() {
+            bail!("more than one source URL specified for `source.{}`", name)
+        }
+
+        let mut replace_with = None;
+        if let Some(val) = table.get("replace-with") {
+            let (s, path) = val.string(&format!("source.{}.replace-with", name))?;
+            replace_with = Some((s.to_string(), path.to_path_buf()));
+        }
+
+        self.add(
+            name,
+            SourceConfig {
+                id: src,
+                replace_with,
+            },
+        );
+
+        return Ok(());
+
+        fn url(cfg: &ConfigValue, key: &str) -> CargoResult<Url> {
+            let (url, path) = cfg.string(key)?;
+            let url = url.to_url().chain_err(|| {
+                format!(
+                    "configuration key `{}` specified an invalid \
+                     URL (in {})",
+                    key,
+                    path.display()
+                )
+            })?;
+            Ok(url)
+        }
+    }
+}
diff --git a/src/cargo/sources/directory.rs b/src/cargo/sources/directory.rs
new file mode 100644 (file)
index 0000000..0076b75
--- /dev/null
@@ -0,0 +1,219 @@
+use std::collections::HashMap;
+use std::fmt::{self, Debug, Formatter};
+use std::fs::File;
+use std::io::Read;
+use std::path::{Path, PathBuf};
+
+use hex;
+
+use serde_json;
+
+use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
+use core::source::MaybePackage;
+use sources::PathSource;
+use util::{Config, Sha256};
+use util::errors::{CargoResult, CargoResultExt};
+use util::paths;
+
+pub struct DirectorySource<'cfg> {
+    source_id: SourceId,
+    root: PathBuf,
+    packages: HashMap<PackageId, (Package, Checksum)>,
+    config: &'cfg Config,
+}
+
+#[derive(Deserialize)]
+struct Checksum {
+    package: Option<String>,
+    files: HashMap<String, String>,
+}
+
+impl<'cfg> DirectorySource<'cfg> {
+    pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> DirectorySource<'cfg> {
+        DirectorySource {
+            source_id: id.clone(),
+            root: path.to_path_buf(),
+            config,
+            packages: HashMap::new(),
+        }
+    }
+}
+
+impl<'cfg> Debug for DirectorySource<'cfg> {
+    fn fmt(&self, f: &mut Formatter) -> fmt::Result {
+        write!(f, "DirectorySource {{ root: {:?} }}", self.root)
+    }
+}
+
+impl<'cfg> Source for DirectorySource<'cfg> {
+    fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        let packages = self.packages.values().map(|p| &p.0);
+        let matches = packages.filter(|pkg| dep.matches(pkg.summary()));
+        for summary in matches.map(|pkg| pkg.summary().clone()) {
+            f(summary);
+        }
+        Ok(())
+    }
+
+    fn fuzzy_query(&mut self, _dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        let packages = self.packages.values().map(|p| &p.0);
+        for summary in packages.map(|pkg| pkg.summary().clone()) {
+            f(summary);
+        }
+        Ok(())
+    }
+
+    fn supports_checksums(&self) -> bool {
+        true
+    }
+
+    fn requires_precise(&self) -> bool {
+        true
+    }
+
+    fn source_id(&self) -> &SourceId {
+        &self.source_id
+    }
+
+    fn update(&mut self) -> CargoResult<()> {
+        self.packages.clear();
+        let entries = self.root.read_dir().chain_err(|| {
+            format!(
+                "failed to read root of directory source: {}",
+                self.root.display()
+            )
+        })?;
+
+        for entry in entries {
+            let entry = entry?;
+            let path = entry.path();
+
+            // Ignore hidden/dot directories as they typically don't contain
+            // crates and otherwise may conflict with a VCS
+            // (rust-lang/cargo#3414).
+            if let Some(s) = path.file_name().and_then(|s| s.to_str()) {
+                if s.starts_with('.') {
+                    continue;
+                }
+            }
+
+            // Vendor directories are often checked into a VCS, but throughout
+            // the lifetime of a vendor dir crates are often added and deleted.
+            // Some VCS implementations don't always fully delete the directory
+            // when a dir is removed from a different checkout. Sometimes a
+            // mostly-empty dir is left behind.
+            //
+            // Additionally vendor directories are sometimes accompanied with
+            // readme files and other auxiliary information not too interesting
+            // to Cargo.
+            //
+            // To help handle all this we only try processing folders with a
+            // `Cargo.toml` in them. This has the upside of being pretty
+            // flexible with the contents of vendor directories but has the
+            // downside of accidentally misconfigured vendor directories
+            // silently returning less crates.
+            if !path.join("Cargo.toml").exists() {
+                continue;
+            }
+
+            let mut src = PathSource::new(&path, &self.source_id, self.config);
+            src.update()?;
+            let pkg = src.root_package()?;
+
+            let cksum_file = path.join(".cargo-checksum.json");
+            let cksum = paths::read(&path.join(cksum_file)).chain_err(|| {
+                format!(
+                    "failed to load checksum `.cargo-checksum.json` \
+                     of {} v{}",
+                    pkg.package_id().name(),
+                    pkg.package_id().version()
+                )
+            })?;
+            let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| {
+                format!(
+                    "failed to decode `.cargo-checksum.json` of \
+                     {} v{}",
+                    pkg.package_id().name(),
+                    pkg.package_id().version()
+                )
+            })?;
+
+            let mut manifest = pkg.manifest().clone();
+            let mut summary = manifest.summary().clone();
+            if let Some(ref package) = cksum.package {
+                summary = summary.set_checksum(package.clone());
+            }
+            manifest.set_summary(summary);
+            let pkg = Package::new(manifest, pkg.manifest_path());
+            self.packages.insert(pkg.package_id().clone(), (pkg, cksum));
+        }
+
+        Ok(())
+    }
+
+    fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
+        self.packages
+            .get(id)
+            .map(|p| &p.0)
+            .cloned()
+            .map(MaybePackage::Ready)
+            .ok_or_else(|| format_err!("failed to find package with id: {}", id))
+    }
+
+    fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
+        panic!("no downloads to do")
+    }
+
+    fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
+        Ok(pkg.package_id().version().to_string())
+    }
+
+    fn verify(&self, id: &PackageId) -> CargoResult<()> {
+        let (pkg, cksum) = match self.packages.get(id) {
+            Some(&(ref pkg, ref cksum)) => (pkg, cksum),
+            None => bail!("failed to find entry for `{}` in directory source", id),
+        };
+
+        let mut buf = [0; 16 * 1024];
+        for (file, cksum) in cksum.files.iter() {
+            let mut h = Sha256::new();
+            let file = pkg.root().join(file);
+
+            (|| -> CargoResult<()> {
+                let mut f = File::open(&file)?;
+                loop {
+                    match f.read(&mut buf)? {
+                        0 => return Ok(()),
+                        n => h.update(&buf[..n]),
+                    }
+                }
+            })()
+                .chain_err(|| format!("failed to calculate checksum of: {}", file.display()))?;
+
+            let actual = hex::encode(h.finish());
+            if &*actual != cksum {
+                bail!(
+                    "\
+                     the listed checksum of `{}` has changed:\n\
+                     expected: {}\n\
+                     actual:   {}\n\
+                     \n\
+                     directory sources are not intended to be edited, if \
+                     modifications are required then it is recommended \
+                     that [replace] is used with a forked copy of the \
+                     source\
+                     ",
+                    file.display(),
+                    cksum,
+                    actual
+                );
+            }
+        }
+
+        Ok(())
+    }
+
+    fn describe(&self) -> String {
+        format!("directory source `{}`", self.root.display())
+    }
+}
diff --git a/src/cargo/sources/git/mod.rs b/src/cargo/sources/git/mod.rs
new file mode 100644 (file)
index 0000000..0b43786
--- /dev/null
@@ -0,0 +1,4 @@
+pub use self::utils::{fetch, GitCheckout, GitDatabase, GitRemote, GitRevision};
+pub use self::source::{canonicalize_url, GitSource};
+mod utils;
+mod source;
diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs
new file mode 100644 (file)
index 0000000..4a89959
--- /dev/null
@@ -0,0 +1,293 @@
+use std::fmt::{self, Debug, Formatter};
+
+use url::Url;
+
+use core::source::{Source, SourceId, MaybePackage};
+use core::GitReference;
+use core::{Dependency, Package, PackageId, Summary};
+use util::Config;
+use util::errors::CargoResult;
+use util::hex::short_hash;
+use sources::PathSource;
+use sources::git::utils::{GitRemote, GitRevision};
+
+pub struct GitSource<'cfg> {
+    remote: GitRemote,
+    reference: GitReference,
+    source_id: SourceId,
+    path_source: Option<PathSource<'cfg>>,
+    rev: Option<GitRevision>,
+    ident: String,
+    config: &'cfg Config,
+}
+
+impl<'cfg> GitSource<'cfg> {
+    pub fn new(source_id: &SourceId, config: &'cfg Config) -> CargoResult<GitSource<'cfg>> {
+        assert!(source_id.is_git(), "id is not git, id={}", source_id);
+
+        let remote = GitRemote::new(source_id.url());
+        let ident = ident(source_id.url())?;
+
+        let reference = match source_id.precise() {
+            Some(s) => GitReference::Rev(s.to_string()),
+            None => source_id.git_reference().unwrap().clone(),
+        };
+
+        let source = GitSource {
+            remote,
+            reference,
+            source_id: source_id.clone(),
+            path_source: None,
+            rev: None,
+            ident,
+            config,
+        };
+
+        Ok(source)
+    }
+
+    pub fn url(&self) -> &Url {
+        self.remote.url()
+    }
+
+    pub fn read_packages(&mut self) -> CargoResult<Vec<Package>> {
+        if self.path_source.is_none() {
+            self.update()?;
+        }
+        self.path_source.as_mut().unwrap().read_packages()
+    }
+}
+
+fn ident(url: &Url) -> CargoResult<String> {
+    let url = canonicalize_url(url)?;
+    let ident = url.path_segments()
+        .and_then(|mut s| s.next_back())
+        .unwrap_or("");
+
+    let ident = if ident == "" { "_empty" } else { ident };
+
+    Ok(format!("{}-{}", ident, short_hash(&url)))
+}
+
+// Some hacks and heuristics for making equivalent URLs hash the same
+pub fn canonicalize_url(url: &Url) -> CargoResult<Url> {
+    let mut url = url.clone();
+
+    // cannot-be-a-base-urls are not supported
+    // eg. github.com:rust-lang-nursery/rustfmt.git
+    if url.cannot_be_a_base() {
+        bail!(
+            "invalid url `{}`: cannot-be-a-base-URLs are not supported",
+            url
+        )
+    }
+
+    // Strip a trailing slash
+    if url.path().ends_with('/') {
+        url.path_segments_mut().unwrap().pop_if_empty();
+    }
+
+    // HACKHACK: For GitHub URL's specifically just lowercase
+    // everything.  GitHub treats both the same, but they hash
+    // differently, and we're gonna be hashing them. This wants a more
+    // general solution, and also we're almost certainly not using the
+    // same case conversion rules that GitHub does. (#84)
+    if url.host_str() == Some("github.com") {
+        url.set_scheme("https").unwrap();
+        let path = url.path().to_lowercase();
+        url.set_path(&path);
+    }
+
+    // Repos generally can be accessed with or w/o '.git'
+    let needs_chopping = url.path().ends_with(".git");
+    if needs_chopping {
+        let last = {
+            let last = url.path_segments().unwrap().next_back().unwrap();
+            last[..last.len() - 4].to_owned()
+        };
+        url.path_segments_mut().unwrap().pop().push(&last);
+    }
+
+    Ok(url)
+}
+
+impl<'cfg> Debug for GitSource<'cfg> {
+    fn fmt(&self, f: &mut Formatter) -> fmt::Result {
+        write!(f, "git repo at {}", self.remote.url())?;
+
+        match self.reference.pretty_ref() {
+            Some(s) => write!(f, " ({})", s),
+            None => Ok(()),
+        }
+    }
+}
+
+impl<'cfg> Source for GitSource<'cfg> {
+    fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        let src = self.path_source
+            .as_mut()
+            .expect("BUG: update() must be called before query()");
+        src.query(dep, f)
+    }
+
+    fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        let src = self.path_source
+            .as_mut()
+            .expect("BUG: update() must be called before query()");
+        src.fuzzy_query(dep, f)
+    }
+
+    fn supports_checksums(&self) -> bool {
+        false
+    }
+
+    fn requires_precise(&self) -> bool {
+        true
+    }
+
+    fn source_id(&self) -> &SourceId {
+        &self.source_id
+    }
+
+    fn update(&mut self) -> CargoResult<()> {
+        let lock =
+            self.config
+                .git_path()
+                .open_rw(".cargo-lock-git", self.config, "the git checkouts")?;
+
+        let db_path = lock.parent().join("db").join(&self.ident);
+
+        if self.config.cli_unstable().offline && !db_path.exists() {
+            bail!(
+                "can't checkout from '{}': you are in the offline mode (-Z offline)",
+                self.remote.url()
+            );
+        }
+
+        // Resolve our reference to an actual revision, and check if the
+        // database already has that revision. If it does, we just load a
+        // database pinned at that revision, and if we don't we issue an update
+        // to try to find the revision.
+        let actual_rev = self.remote.rev_for(&db_path, &self.reference);
+        let should_update = actual_rev.is_err() || self.source_id.precise().is_none();
+
+        let (db, actual_rev) = if should_update && !self.config.cli_unstable().offline {
+            self.config.shell().status(
+                "Updating",
+                format!("git repository `{}`", self.remote.url()),
+            )?;
+
+            trace!("updating git source `{:?}`", self.remote);
+
+            self.remote
+                .checkout(&db_path, &self.reference, self.config)?
+        } else {
+            (self.remote.db_at(&db_path)?, actual_rev.unwrap())
+        };
+
+        // Don’t use the full hash,
+        // to contribute less to reaching the path length limit on Windows:
+        // https://github.com/servo/servo/pull/14397
+        let short_id = db.to_short_id(&actual_rev).unwrap();
+
+        let checkout_path = lock.parent()
+            .join("checkouts")
+            .join(&self.ident)
+            .join(short_id.as_str());
+
+        // Copy the database to the checkout location. After this we could drop
+        // the lock on the database as we no longer needed it, but we leave it
+        // in scope so the destructors here won't tamper with too much.
+        // Checkout is immutable, so we don't need to protect it with a lock once
+        // it is created.
+        db.copy_to(actual_rev.clone(), &checkout_path, self.config)?;
+
+        let source_id = self.source_id.with_precise(Some(actual_rev.to_string()));
+        let path_source = PathSource::new_recursive(&checkout_path, &source_id, self.config);
+
+        self.path_source = Some(path_source);
+        self.rev = Some(actual_rev);
+        self.path_source.as_mut().unwrap().update()
+    }
+
+    fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
+        trace!(
+            "getting packages for package id `{}` from `{:?}`",
+            id,
+            self.remote
+        );
+        self.path_source
+            .as_mut()
+            .expect("BUG: update() must be called before get()")
+            .download(id)
+    }
+
+    fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
+        panic!("no download should have started")
+    }
+
+    fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> {
+        Ok(self.rev.as_ref().unwrap().to_string())
+    }
+
+    fn describe(&self) -> String {
+        format!("git repository {}", self.source_id)
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use url::Url;
+    use super::ident;
+    use util::ToUrl;
+
+    #[test]
+    pub fn test_url_to_path_ident_with_path() {
+        let ident = ident(&url("https://github.com/carlhuda/cargo")).unwrap();
+        assert!(ident.starts_with("cargo-"));
+    }
+
+    #[test]
+    pub fn test_url_to_path_ident_without_path() {
+        let ident = ident(&url("https://github.com")).unwrap();
+        assert!(ident.starts_with("_empty-"));
+    }
+
+    #[test]
+    fn test_canonicalize_idents_by_stripping_trailing_url_slash() {
+        let ident1 = ident(&url("https://github.com/PistonDevelopers/piston/")).unwrap();
+        let ident2 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap();
+        assert_eq!(ident1, ident2);
+    }
+
+    #[test]
+    fn test_canonicalize_idents_by_lowercasing_github_urls() {
+        let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap();
+        let ident2 = ident(&url("https://github.com/pistondevelopers/piston")).unwrap();
+        assert_eq!(ident1, ident2);
+    }
+
+    #[test]
+    fn test_canonicalize_idents_by_stripping_dot_git() {
+        let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap();
+        let ident2 = ident(&url("https://github.com/PistonDevelopers/piston.git")).unwrap();
+        assert_eq!(ident1, ident2);
+    }
+
+    #[test]
+    fn test_canonicalize_idents_different_protocols() {
+        let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap();
+        let ident2 = ident(&url("git://github.com/PistonDevelopers/piston")).unwrap();
+        assert_eq!(ident1, ident2);
+    }
+
+    #[test]
+    fn test_canonicalize_cannot_be_a_base_urls() {
+        assert!(ident(&url("github.com:PistonDevelopers/piston")).is_err());
+        assert!(ident(&url("google.com:PistonDevelopers/piston")).is_err());
+    }
+
+    fn url(s: &str) -> Url {
+        s.to_url().unwrap()
+    }
+}
diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs
new file mode 100644 (file)
index 0000000..66301c1
--- /dev/null
@@ -0,0 +1,901 @@
+use std::env;
+use std::fmt;
+use std::fs::{self, File};
+use std::mem;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+use curl::easy::{Easy, List};
+use git2::{self, ObjectType};
+use serde::ser;
+use url::Url;
+
+use core::GitReference;
+use util::errors::{CargoError, CargoResult, CargoResultExt};
+use util::paths;
+use util::process_builder::process;
+use util::{internal, network, Config, Progress, ToUrl};
+
+#[derive(PartialEq, Clone, Debug)]
+pub struct GitRevision(git2::Oid);
+
+impl ser::Serialize for GitRevision {
+    fn serialize<S: ser::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
+        serialize_str(self, s)
+    }
+}
+
+fn serialize_str<T, S>(t: &T, s: S) -> Result<S::Ok, S::Error>
+where
+    T: fmt::Display,
+    S: ser::Serializer,
+{
+    s.collect_str(t)
+}
+
+impl fmt::Display for GitRevision {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        fmt::Display::fmt(&self.0, f)
+    }
+}
+
+pub struct GitShortID(git2::Buf);
+
+impl GitShortID {
+    pub fn as_str(&self) -> &str {
+        self.0.as_str().unwrap()
+    }
+}
+
+/// `GitRemote` represents a remote repository. It gets cloned into a local
+/// `GitDatabase`.
+#[derive(PartialEq, Clone, Debug, Serialize)]
+pub struct GitRemote {
+    #[serde(serialize_with = "serialize_str")]
+    url: Url,
+}
+
+/// `GitDatabase` is a local clone of a remote repository's database. Multiple
+/// `GitCheckouts` can be cloned from this `GitDatabase`.
+#[derive(Serialize)]
+pub struct GitDatabase {
+    remote: GitRemote,
+    path: PathBuf,
+    #[serde(skip_serializing)]
+    repo: git2::Repository,
+}
+
+/// `GitCheckout` is a local checkout of a particular revision. Calling
+/// `clone_into` with a reference will resolve the reference into a revision,
+/// and return a `CargoError` if no revision for that reference was found.
+#[derive(Serialize)]
+pub struct GitCheckout<'a> {
+    database: &'a GitDatabase,
+    location: PathBuf,
+    revision: GitRevision,
+    #[serde(skip_serializing)]
+    repo: git2::Repository,
+}
+
+// Implementations
+
+impl GitRemote {
+    pub fn new(url: &Url) -> GitRemote {
+        GitRemote { url: url.clone() }
+    }
+
+    pub fn url(&self) -> &Url {
+        &self.url
+    }
+
+    pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult<GitRevision> {
+        reference.resolve(&self.db_at(path)?.repo)
+    }
+
+    pub fn checkout(
+        &self,
+        into: &Path,
+        reference: &GitReference,
+        cargo_config: &Config,
+    ) -> CargoResult<(GitDatabase, GitRevision)> {
+        let mut repo_and_rev = None;
+        if let Ok(mut repo) = git2::Repository::open(into) {
+            self.fetch_into(&mut repo, cargo_config)
+                .chain_err(|| format!("failed to fetch into {}", into.display()))?;
+            if let Ok(rev) = reference.resolve(&repo) {
+                repo_and_rev = Some((repo, rev));
+            }
+        }
+        let (repo, rev) = match repo_and_rev {
+            Some(pair) => pair,
+            None => {
+                let repo = self.clone_into(into, cargo_config)
+                    .chain_err(|| format!("failed to clone into: {}", into.display()))?;
+                let rev = reference.resolve(&repo)?;
+                (repo, rev)
+            }
+        };
+
+        Ok((
+            GitDatabase {
+                remote: self.clone(),
+                path: into.to_path_buf(),
+                repo,
+            },
+            rev,
+        ))
+    }
+
+    pub fn db_at(&self, db_path: &Path) -> CargoResult<GitDatabase> {
+        let repo = git2::Repository::open(db_path)?;
+        Ok(GitDatabase {
+            remote: self.clone(),
+            path: db_path.to_path_buf(),
+            repo,
+        })
+    }
+
+    fn fetch_into(&self, dst: &mut git2::Repository, cargo_config: &Config) -> CargoResult<()> {
+        // Create a local anonymous remote in the repository to fetch the url
+        let refspec = "refs/heads/*:refs/heads/*";
+        fetch(dst, &self.url, refspec, cargo_config)
+    }
+
+    fn clone_into(&self, dst: &Path, cargo_config: &Config) -> CargoResult<git2::Repository> {
+        if fs::metadata(&dst).is_ok() {
+            paths::remove_dir_all(dst)?;
+        }
+        fs::create_dir_all(dst)?;
+        let mut repo = git2::Repository::init_bare(dst)?;
+        fetch(
+            &mut repo,
+            &self.url,
+            "refs/heads/*:refs/heads/*",
+            cargo_config,
+        )?;
+        Ok(repo)
+    }
+}
+
+impl GitDatabase {
+    pub fn copy_to(
+        &self,
+        rev: GitRevision,
+        dest: &Path,
+        cargo_config: &Config,
+    ) -> CargoResult<GitCheckout> {
+        let mut checkout = None;
+        if let Ok(repo) = git2::Repository::open(dest) {
+            let mut co = GitCheckout::new(dest, self, rev.clone(), repo);
+            if !co.is_fresh() {
+                // After a successful fetch operation do a sanity check to
+                // ensure we've got the object in our database to reset to. This
+                // can fail sometimes for corrupt repositories where the fetch
+                // operation succeeds but the object isn't actually there.
+                co.fetch(cargo_config)?;
+                if co.has_object() {
+                    co.reset(cargo_config)?;
+                    assert!(co.is_fresh());
+                    checkout = Some(co);
+                }
+            } else {
+                checkout = Some(co);
+            }
+        };
+        let checkout = match checkout {
+            Some(c) => c,
+            None => GitCheckout::clone_into(dest, self, rev, cargo_config)?,
+        };
+        checkout.update_submodules(cargo_config)?;
+        Ok(checkout)
+    }
+
+    pub fn to_short_id(&self, revision: &GitRevision) -> CargoResult<GitShortID> {
+        let obj = self.repo.find_object(revision.0, None)?;
+        Ok(GitShortID(obj.short_id()?))
+    }
+
+    pub fn has_ref(&self, reference: &str) -> CargoResult<()> {
+        self.repo.revparse_single(reference)?;
+        Ok(())
+    }
+}
+
+impl GitReference {
+    fn resolve(&self, repo: &git2::Repository) -> CargoResult<GitRevision> {
+        let id = match *self {
+            GitReference::Tag(ref s) => (|| -> CargoResult<git2::Oid> {
+                let refname = format!("refs/tags/{}", s);
+                let id = repo.refname_to_id(&refname)?;
+                let obj = repo.find_object(id, None)?;
+                let obj = obj.peel(ObjectType::Commit)?;
+                Ok(obj.id())
+            })()
+                .chain_err(|| format!("failed to find tag `{}`", s))?,
+            GitReference::Branch(ref s) => {
+                let b = repo.find_branch(s, git2::BranchType::Local)
+                    .chain_err(|| format!("failed to find branch `{}`", s))?;
+                b.get()
+                    .target()
+                    .ok_or_else(|| format_err!("branch `{}` did not have a target", s))?
+            }
+            GitReference::Rev(ref s) => {
+                let obj = repo.revparse_single(s)?;
+                match obj.as_tag() {
+                    Some(tag) => tag.target_id(),
+                    None => obj.id(),
+                }
+            }
+        };
+        Ok(GitRevision(id))
+    }
+}
+
+impl<'a> GitCheckout<'a> {
+    fn new(
+        path: &Path,
+        database: &'a GitDatabase,
+        revision: GitRevision,
+        repo: git2::Repository,
+    ) -> GitCheckout<'a> {
+        GitCheckout {
+            location: path.to_path_buf(),
+            database,
+            revision,
+            repo,
+        }
+    }
+
+    fn clone_into(
+        into: &Path,
+        database: &'a GitDatabase,
+        revision: GitRevision,
+        config: &Config,
+    ) -> CargoResult<GitCheckout<'a>> {
+        let dirname = into.parent().unwrap();
+        fs::create_dir_all(&dirname).chain_err(|| format!("Couldn't mkdir {}", dirname.display()))?;
+        if into.exists() {
+            paths::remove_dir_all(into)?;
+        }
+
+        // we're doing a local filesystem-to-filesystem clone so there should
+        // be no need to respect global configuration options, so pass in
+        // an empty instance of `git2::Config` below.
+        let git_config = git2::Config::new()?;
+
+        // Clone the repository, but make sure we use the "local" option in
+        // libgit2 which will attempt to use hardlinks to set up the database.
+        // This should speed up the clone operation quite a bit if it works.
+        //
+        // Note that we still use the same fetch options because while we don't
+        // need authentication information we may want progress bars and such.
+        let url = database.path.to_url()?;
+        let mut repo = None;
+        with_fetch_options(&git_config, &url, config, &mut |fopts| {
+            let mut checkout = git2::build::CheckoutBuilder::new();
+            checkout.dry_run(); // we'll do this below during a `reset`
+
+            let r = git2::build::RepoBuilder::new()
+                // use hard links and/or copy the database, we're doing a
+                // filesystem clone so this'll speed things up quite a bit.
+                .clone_local(git2::build::CloneLocal::Local)
+                .with_checkout(checkout)
+                .fetch_options(fopts)
+                // .remote_create(|repo, _name, url| repo.remote_anonymous(url))
+                .clone(url.as_str(), into)?;
+            repo = Some(r);
+            Ok(())
+        })?;
+        let repo = repo.unwrap();
+
+        let checkout = GitCheckout::new(into, database, revision, repo);
+        checkout.reset(config)?;
+        Ok(checkout)
+    }
+
+    fn is_fresh(&self) -> bool {
+        match self.repo.revparse_single("HEAD") {
+            Ok(ref head) if head.id() == self.revision.0 => {
+                // See comments in reset() for why we check this
+                self.location.join(".cargo-ok").exists()
+            }
+            _ => false,
+        }
+    }
+
+    fn fetch(&mut self, cargo_config: &Config) -> CargoResult<()> {
+        info!("fetch {}", self.repo.path().display());
+        let url = self.database.path.to_url()?;
+        let refspec = "refs/heads/*:refs/heads/*";
+        fetch(&mut self.repo, &url, refspec, cargo_config)?;
+        Ok(())
+    }
+
+    fn has_object(&self) -> bool {
+        self.repo.find_object(self.revision.0, None).is_ok()
+    }
+
+    fn reset(&self, config: &Config) -> CargoResult<()> {
+        // If we're interrupted while performing this reset (e.g. we die because
+        // of a signal) Cargo needs to be sure to try to check out this repo
+        // again on the next go-round.
+        //
+        // To enable this we have a dummy file in our checkout, .cargo-ok, which
+        // if present means that the repo has been successfully reset and is
+        // ready to go. Hence if we start to do a reset, we make sure this file
+        // *doesn't* exist, and then once we're done we create the file.
+        let ok_file = self.location.join(".cargo-ok");
+        let _ = paths::remove_file(&ok_file);
+        info!("reset {} to {}", self.repo.path().display(), self.revision);
+        let object = self.repo.find_object(self.revision.0, None)?;
+        reset(&self.repo, &object, config)?;
+        File::create(ok_file)?;
+        Ok(())
+    }
+
+    fn update_submodules(&self, cargo_config: &Config) -> CargoResult<()> {
+        return update_submodules(&self.repo, cargo_config);
+
+        fn update_submodules(repo: &git2::Repository, cargo_config: &Config) -> CargoResult<()> {
+            info!("update submodules for: {:?}", repo.workdir().unwrap());
+
+            for mut child in repo.submodules()? {
+                update_submodule(repo, &mut child, cargo_config).chain_err(|| {
+                    format!(
+                        "failed to update submodule `{}`",
+                        child.name().unwrap_or("")
+                    )
+                })?;
+            }
+            Ok(())
+        }
+
+        fn update_submodule(
+            parent: &git2::Repository,
+            child: &mut git2::Submodule,
+            cargo_config: &Config,
+        ) -> CargoResult<()> {
+            child.init(false)?;
+            let url = child
+                .url()
+                .ok_or_else(|| internal("non-utf8 url for submodule"))?;
+
+            // A submodule which is listed in .gitmodules but not actually
+            // checked out will not have a head id, so we should ignore it.
+            let head = match child.head_id() {
+                Some(head) => head,
+                None => return Ok(()),
+            };
+
+            // If the submodule hasn't been checked out yet, we need to
+            // clone it. If it has been checked out and the head is the same
+            // as the submodule's head, then we can skip an update and keep
+            // recursing.
+            let head_and_repo = child.open().and_then(|repo| {
+                let target = repo.head()?.target();
+                Ok((target, repo))
+            });
+            let mut repo = match head_and_repo {
+                Ok((head, repo)) => {
+                    if child.head_id() == head {
+                        return update_submodules(&repo, cargo_config);
+                    }
+                    repo
+                }
+                Err(..) => {
+                    let path = parent.workdir().unwrap().join(child.path());
+                    let _ = paths::remove_dir_all(&path);
+                    git2::Repository::init(&path)?
+                }
+            };
+
+            // Fetch data from origin and reset to the head commit
+            let refspec = "refs/heads/*:refs/heads/*";
+            let url = url.to_url()?;
+            fetch(&mut repo, &url, refspec, cargo_config).chain_err(|| {
+                internal(format!(
+                    "failed to fetch submodule `{}` from {}",
+                    child.name().unwrap_or(""),
+                    url
+                ))
+            })?;
+
+            let obj = repo.find_object(head, None)?;
+            reset(&repo, &obj, cargo_config)?;
+            update_submodules(&repo, cargo_config)
+        }
+    }
+}
+
+/// Prepare the authentication callbacks for cloning a git repository.
+///
+/// The main purpose of this function is to construct the "authentication
+/// callback" which is used to clone a repository. This callback will attempt to
+/// find the right authentication on the system (without user input) and will
+/// guide libgit2 in doing so.
+///
+/// The callback is provided `allowed` types of credentials, and we try to do as
+/// much as possible based on that:
+///
+/// * Prioritize SSH keys from the local ssh agent as they're likely the most
+///   reliable. The username here is prioritized from the credential
+///   callback, then from whatever is configured in git itself, and finally
+///   we fall back to the generic user of `git`.
+///
+/// * If a username/password is allowed, then we fallback to git2-rs's
+///   implementation of the credential helper. This is what is configured
+///   with `credential.helper` in git, and is the interface for the OSX
+///   keychain, for example.
+///
+/// * After the above two have failed, we just kinda grapple attempting to
+///   return *something*.
+///
+/// If any form of authentication fails, libgit2 will repeatedly ask us for
+/// credentials until we give it a reason to not do so. To ensure we don't
+/// just sit here looping forever we keep track of authentications we've
+/// attempted and we don't try the same ones again.
+fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F) -> CargoResult<T>
+where
+    F: FnMut(&mut git2::Credentials) -> CargoResult<T>,
+{
+    let mut cred_helper = git2::CredentialHelper::new(url);
+    cred_helper.config(cfg);
+
+    let mut ssh_username_requested = false;
+    let mut cred_helper_bad = None;
+    let mut ssh_agent_attempts = Vec::new();
+    let mut any_attempts = false;
+    let mut tried_sshkey = false;
+
+    let mut res = f(&mut |url, username, allowed| {
+        any_attempts = true;
+        // libgit2's "USERNAME" authentication actually means that it's just
+        // asking us for a username to keep going. This is currently only really
+        // used for SSH authentication and isn't really an authentication type.
+        // The logic currently looks like:
+        //
+        //      let user = ...;
+        //      if (user.is_null())
+        //          user = callback(USERNAME, null, ...);
+        //
+        //      callback(SSH_KEY, user, ...)
+        //
+        // So if we're being called here then we know that (a) we're using ssh
+        // authentication and (b) no username was specified in the URL that
+        // we're trying to clone. We need to guess an appropriate username here,
+        // but that may involve a few attempts. Unfortunately we can't switch
+        // usernames during one authentication session with libgit2, so to
+        // handle this we bail out of this authentication session after setting
+        // the flag `ssh_username_requested`, and then we handle this below.
+        if allowed.contains(git2::CredentialType::USERNAME) {
+            debug_assert!(username.is_none());
+            ssh_username_requested = true;
+            return Err(git2::Error::from_str("gonna try usernames later"));
+        }
+
+        // An "SSH_KEY" authentication indicates that we need some sort of SSH
+        // authentication. This can currently either come from the ssh-agent
+        // process or from a raw in-memory SSH key. Cargo only supports using
+        // ssh-agent currently.
+        //
+        // If we get called with this then the only way that should be possible
+        // is if a username is specified in the URL itself (e.g. `username` is
+        // Some), hence the unwrap() here. We try custom usernames down below.
+        if allowed.contains(git2::CredentialType::SSH_KEY) && !tried_sshkey {
+            // If ssh-agent authentication fails, libgit2 will keep
+            // calling this callback asking for other authentication
+            // methods to try. Make sure we only try ssh-agent once,
+            // to avoid looping forever.
+            tried_sshkey = true;
+            let username = username.unwrap();
+            debug_assert!(!ssh_username_requested);
+            ssh_agent_attempts.push(username.to_string());
+            return git2::Cred::ssh_key_from_agent(username);
+        }
+
+        // Sometimes libgit2 will ask for a username/password in plaintext. This
+        // is where Cargo would have an interactive prompt if we supported it,
+        // but we currently don't! Right now the only way we support fetching a
+        // plaintext password is through the `credential.helper` support, so
+        // fetch that here.
+        if allowed.contains(git2::CredentialType::USER_PASS_PLAINTEXT) {
+            let r = git2::Cred::credential_helper(cfg, url, username);
+            cred_helper_bad = Some(r.is_err());
+            return r;
+        }
+
+        // I'm... not sure what the DEFAULT kind of authentication is, but seems
+        // easy to support?
+        if allowed.contains(git2::CredentialType::DEFAULT) {
+            return git2::Cred::default();
+        }
+
+        // Whelp, we tried our best
+        Err(git2::Error::from_str("no authentication available"))
+    });
+
+    // Ok, so if it looks like we're going to be doing ssh authentication, we
+    // want to try a few different usernames as one wasn't specified in the URL
+    // for us to use. In order, we'll try:
+    //
+    // * A credential helper's username for this URL, if available.
+    // * This account's username.
+    // * "git"
+    //
+    // We have to restart the authentication session each time (due to
+    // constraints in libssh2 I guess? maybe this is inherent to ssh?), so we
+    // call our callback, `f`, in a loop here.
+    if ssh_username_requested {
+        debug_assert!(res.is_err());
+        let mut attempts = Vec::new();
+        attempts.push("git".to_string());
+        if let Ok(s) = env::var("USER").or_else(|_| env::var("USERNAME")) {
+            attempts.push(s);
+        }
+        if let Some(ref s) = cred_helper.username {
+            attempts.push(s.clone());
+        }
+
+        while let Some(s) = attempts.pop() {
+            // We should get `USERNAME` first, where we just return our attempt,
+            // and then after that we should get `SSH_KEY`. If the first attempt
+            // fails we'll get called again, but we don't have another option so
+            // we bail out.
+            let mut attempts = 0;
+            res = f(&mut |_url, username, allowed| {
+                if allowed.contains(git2::CredentialType::USERNAME) {
+                    return git2::Cred::username(&s);
+                }
+                if allowed.contains(git2::CredentialType::SSH_KEY) {
+                    debug_assert_eq!(Some(&s[..]), username);
+                    attempts += 1;
+                    if attempts == 1 {
+                        ssh_agent_attempts.push(s.to_string());
+                        return git2::Cred::ssh_key_from_agent(&s);
+                    }
+                }
+                Err(git2::Error::from_str("no authentication available"))
+            });
+
+            // If we made two attempts then that means:
+            //
+            // 1. A username was requested, we returned `s`.
+            // 2. An ssh key was requested, we returned to look up `s` in the
+            //    ssh agent.
+            // 3. For whatever reason that lookup failed, so we were asked again
+            //    for another mode of authentication.
+            //
+            // Essentially, if `attempts == 2` then in theory the only error was
+            // that this username failed to authenticate (e.g. no other network
+            // errors happened). Otherwise something else is funny so we bail
+            // out.
+            if attempts != 2 {
+                break;
+            }
+        }
+    }
+
+    if res.is_ok() || !any_attempts {
+        return res.map_err(From::from);
+    }
+
+    // In the case of an authentication failure (where we tried something) then
+    // we try to give a more helpful error message about precisely what we
+    // tried.
+    let res = res.map_err(CargoError::from).chain_err(|| {
+        let mut msg = "failed to authenticate when downloading \
+                       repository"
+            .to_string();
+        if !ssh_agent_attempts.is_empty() {
+            let names = ssh_agent_attempts
+                .iter()
+                .map(|s| format!("`{}`", s))
+                .collect::<Vec<_>>()
+                .join(", ");
+            msg.push_str(&format!(
+                "\nattempted ssh-agent authentication, but \
+                 none of the usernames {} succeeded",
+                names
+            ));
+        }
+        if let Some(failed_cred_helper) = cred_helper_bad {
+            if failed_cred_helper {
+                msg.push_str(
+                    "\nattempted to find username/password via \
+                     git's `credential.helper` support, but failed",
+                );
+            } else {
+                msg.push_str(
+                    "\nattempted to find username/password via \
+                     `credential.helper`, but maybe the found \
+                     credentials were incorrect",
+                );
+            }
+        }
+        msg
+    })?;
+    Ok(res)
+}
+
+fn reset(repo: &git2::Repository, obj: &git2::Object, config: &Config) -> CargoResult<()> {
+    let mut pb = Progress::new("Checkout", config);
+    let mut opts = git2::build::CheckoutBuilder::new();
+    opts.progress(|_, cur, max| {
+        drop(pb.tick(cur, max));
+    });
+    repo.reset(obj, git2::ResetType::Hard, Some(&mut opts))?;
+    Ok(())
+}
+
+pub fn with_fetch_options(
+    git_config: &git2::Config,
+    url: &Url,
+    config: &Config,
+    cb: &mut FnMut(git2::FetchOptions) -> CargoResult<()>,
+) -> CargoResult<()> {
+    let mut progress = Progress::new("Fetch", config);
+    network::with_retry(config, || {
+        with_authentication(url.as_str(), git_config, |f| {
+            let mut rcb = git2::RemoteCallbacks::new();
+            rcb.credentials(f);
+
+            rcb.transfer_progress(|stats| {
+                progress
+                    .tick(stats.indexed_objects(), stats.total_objects())
+                    .is_ok()
+            });
+
+            // Create a local anonymous remote in the repository to fetch the
+            // url
+            let mut opts = git2::FetchOptions::new();
+            opts.remote_callbacks(rcb)
+                .download_tags(git2::AutotagOption::All);
+            cb(opts)
+        })?;
+        Ok(())
+    })
+}
+
+pub fn fetch(
+    repo: &mut git2::Repository,
+    url: &Url,
+    refspec: &str,
+    config: &Config,
+) -> CargoResult<()> {
+    if config.frozen() {
+        bail!(
+            "attempting to update a git repository, but --frozen \
+             was specified"
+        )
+    }
+    if !config.network_allowed() {
+        bail!("can't update a git repository in the offline mode")
+    }
+
+    // If we're fetching from GitHub, attempt GitHub's special fast path for
+    // testing if we've already got an up-to-date copy of the repository
+    if url.host_str() == Some("github.com") {
+        if let Ok(oid) = repo.refname_to_id("refs/remotes/origin/master") {
+            let mut handle = config.http()?.borrow_mut();
+            debug!("attempting GitHub fast path for {}", url);
+            if github_up_to_date(&mut handle, url, &oid) {
+                return Ok(());
+            } else {
+                debug!("fast path failed, falling back to a git fetch");
+            }
+        }
+    }
+
+    // We reuse repositories quite a lot, so before we go through and update the
+    // repo check to see if it's a little too old and could benefit from a gc.
+    // In theory this shouldn't be too too expensive compared to the network
+    // request we're about to issue.
+    maybe_gc_repo(repo)?;
+
+    // Unfortuantely `libgit2` is notably lacking in the realm of authentication
+    // when compared to the `git` command line. As a result, allow an escape
+    // hatch for users that would prefer to use `git`-the-CLI for fetching
+    // repositories instead of `libgit2`-the-library. This should make more
+    // flavors of authentication possible while also still giving us all the
+    // speed and portability of using `libgit2`.
+    if let Some(val) = config.get_bool("net.git-fetch-with-cli")? {
+        if val.val {
+            return fetch_with_cli(repo, url, refspec, config);
+        }
+    }
+
+    debug!("doing a fetch for {}", url);
+    let git_config = git2::Config::open_default()?;
+    with_fetch_options(&git_config, url, config, &mut |mut opts| {
+        // The `fetch` operation here may fail spuriously due to a corrupt
+        // repository. It could also fail, however, for a whole slew of other
+        // reasons (aka network related reasons). We want Cargo to automatically
+        // recover from corrupt repositories, but we don't want Cargo to stomp
+        // over other legitimate errors.o
+        //
+        // Consequently we save off the error of the `fetch` operation and if it
+        // looks like a "corrupt repo" error then we blow away the repo and try
+        // again. If it looks like any other kind of error, or if we've already
+        // blown away the repository, then we want to return the error as-is.
+        let mut repo_reinitialized = false;
+        loop {
+            debug!("initiating fetch of {} from {}", refspec, url);
+            let res = repo.remote_anonymous(url.as_str())?
+                .fetch(&[refspec], Some(&mut opts), None);
+            let err = match res {
+                Ok(()) => break,
+                Err(e) => e,
+            };
+            debug!("fetch failed: {}", err);
+
+            if !repo_reinitialized && err.class() == git2::ErrorClass::Reference {
+                repo_reinitialized = true;
+                debug!(
+                    "looks like this is a corrupt repository, reinitializing \
+                     and trying again"
+                );
+                if reinitialize(repo).is_ok() {
+                    continue;
+                }
+            }
+
+            return Err(err.into());
+        }
+        Ok(())
+    })
+}
+
+fn fetch_with_cli(
+    repo: &mut git2::Repository,
+    url: &Url,
+    refspec: &str,
+    config: &Config,
+) -> CargoResult<()> {
+    let mut cmd = process("git");
+    cmd.arg("fetch")
+        .arg("--tags") // fetch all tags
+        .arg("--quiet")
+        .arg(url.to_string())
+        .arg(refspec)
+        .cwd(repo.path());
+    config.shell().verbose(|s| s.status("Running", &cmd.to_string()))?;
+    cmd.exec()?;
+    Ok(())
+}
+
+/// Cargo has a bunch of long-lived git repositories in its global cache and
+/// some, like the index, are updated very frequently. Right now each update
+/// creates a new "pack file" inside the git database, and over time this can
+/// cause bad performance and bad current behavior in libgit2.
+///
+/// One pathological use case today is where libgit2 opens hundreds of file
+/// descriptors, getting us dangerously close to blowing out the OS limits of
+/// how many fds we can have open. This is detailed in #4403.
+///
+/// To try to combat this problem we attempt a `git gc` here. Note, though, that
+/// we may not even have `git` installed on the system! As a result we
+/// opportunistically try a `git gc` when the pack directory looks too big, and
+/// failing that we just blow away the repository and start over.
+fn maybe_gc_repo(repo: &mut git2::Repository) -> CargoResult<()> {
+    // Here we arbitrarily declare that if you have more than 100 files in your
+    // `pack` folder that we need to do a gc.
+    let entries = match repo.path().join("objects/pack").read_dir() {
+        Ok(e) => e.count(),
+        Err(_) => {
+            debug!("skipping gc as pack dir appears gone");
+            return Ok(());
+        }
+    };
+    let max = env::var("__CARGO_PACKFILE_LIMIT")
+        .ok()
+        .and_then(|s| s.parse::<usize>().ok())
+        .unwrap_or(100);
+    if entries < max {
+        debug!("skipping gc as there's only {} pack files", entries);
+        return Ok(());
+    }
+
+    // First up, try a literal `git gc` by shelling out to git. This is pretty
+    // likely to fail though as we may not have `git` installed. Note that
+    // libgit2 doesn't currently implement the gc operation, so there's no
+    // equivalent there.
+    match Command::new("git")
+        .arg("gc")
+        .current_dir(repo.path())
+        .output()
+    {
+        Ok(out) => {
+            debug!(
+                "git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}",
+                out.status,
+                String::from_utf8_lossy(&out.stdout),
+                String::from_utf8_lossy(&out.stderr)
+            );
+            if out.status.success() {
+                let new = git2::Repository::open(repo.path())?;
+                mem::replace(repo, new);
+                return Ok(());
+            }
+        }
+        Err(e) => debug!("git-gc failed to spawn: {}", e),
+    }
+
+    // Alright all else failed, let's start over.
+    reinitialize(repo)
+}
+
+fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> {
+    // Here we want to drop the current repository object pointed to by `repo`,
+    // so we initialize temporary repository in a sub-folder, blow away the
+    // existing git folder, and then recreate the git repo. Finally we blow away
+    // the `tmp` folder we allocated.
+    let path = repo.path().to_path_buf();
+    debug!("reinitializing git repo at {:?}", path);
+    let tmp = path.join("tmp");
+    let bare = !repo.path().ends_with(".git");
+    *repo = git2::Repository::init(&tmp)?;
+    for entry in path.read_dir()? {
+        let entry = entry?;
+        if entry.file_name().to_str() == Some("tmp") {
+            continue;
+        }
+        let path = entry.path();
+        drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path)));
+    }
+    if bare {
+        *repo = git2::Repository::init_bare(path)?;
+    } else {
+        *repo = git2::Repository::init(path)?;
+    }
+    paths::remove_dir_all(&tmp)?;
+    Ok(())
+}
+
+/// Updating the index is done pretty regularly so we want it to be as fast as
+/// possible. For registries hosted on GitHub (like the crates.io index) there's
+/// a fast path available to use [1] to tell us that there's no updates to be
+/// made.
+///
+/// This function will attempt to hit that fast path and verify that the `oid`
+/// is actually the current `master` branch of the repository. If `true` is
+/// returned then no update needs to be performed, but if `false` is returned
+/// then the standard update logic still needs to happen.
+///
+/// [1]: https://developer.github.com/v3/repos/commits/#get-the-sha-1-of-a-commit-reference
+///
+/// Note that this function should never cause an actual failure because it's
+/// just a fast path. As a result all errors are ignored in this function and we
+/// just return a `bool`. Any real errors will be reported through the normal
+/// update path above.
+fn github_up_to_date(handle: &mut Easy, url: &Url, oid: &git2::Oid) -> bool {
+    macro_rules! try {
+        ($e:expr) => (match $e {
+            Some(e) => e,
+            None => return false,
+        })
+    }
+
+    // This expects GitHub urls in the form `github.com/user/repo` and nothing
+    // else
+    let mut pieces = try!(url.path_segments());
+    let username = try!(pieces.next());
+    let repo = try!(pieces.next());
+    if pieces.next().is_some() {
+        return false;
+    }
+
+    let url = format!(
+        "https://api.github.com/repos/{}/{}/commits/master",
+        username, repo
+    );
+    try!(handle.get(true).ok());
+    try!(handle.url(&url).ok());
+    try!(handle.useragent("cargo").ok());
+    let mut headers = List::new();
+    try!(headers.append("Accept: application/vnd.github.3.sha").ok());
+    try!(headers.append(&format!("If-None-Match: \"{}\"", oid)).ok());
+    try!(handle.http_headers(headers).ok());
+    try!(handle.perform().ok());
+
+    try!(handle.response_code().ok()) == 304
+}
diff --git a/src/cargo/sources/mod.rs b/src/cargo/sources/mod.rs
new file mode 100644 (file)
index 0000000..d96a056
--- /dev/null
@@ -0,0 +1,13 @@
+pub use self::config::SourceConfigMap;
+pub use self::directory::DirectorySource;
+pub use self::git::GitSource;
+pub use self::path::PathSource;
+pub use self::registry::{RegistrySource, CRATES_IO_INDEX, CRATES_IO_REGISTRY};
+pub use self::replaced::ReplacedSource;
+
+pub mod config;
+pub mod directory;
+pub mod git;
+pub mod path;
+pub mod registry;
+pub mod replaced;
diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs
new file mode 100644 (file)
index 0000000..6115b62
--- /dev/null
@@ -0,0 +1,568 @@
+use std::fmt::{self, Debug, Formatter};
+use std::fs;
+use std::path::{Path, PathBuf};
+
+use filetime::FileTime;
+use git2;
+use glob::Pattern;
+use ignore::Match;
+use ignore::gitignore::GitignoreBuilder;
+
+use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
+use core::source::MaybePackage;
+use ops;
+use util::{self, internal, CargoResult};
+use util::paths;
+use util::Config;
+
+pub struct PathSource<'cfg> {
+    source_id: SourceId,
+    path: PathBuf,
+    updated: bool,
+    packages: Vec<Package>,
+    config: &'cfg Config,
+    recursive: bool,
+}
+
+impl<'cfg> PathSource<'cfg> {
+    /// Invoked with an absolute path to a directory that contains a Cargo.toml.
+    ///
+    /// This source will only return the package at precisely the `path`
+    /// specified, and it will be an error if there's not a package at `path`.
+    pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> {
+        PathSource {
+            source_id: id.clone(),
+            path: path.to_path_buf(),
+            updated: false,
+            packages: Vec::new(),
+            config,
+            recursive: false,
+        }
+    }
+
+    /// Creates a new source which is walked recursively to discover packages.
+    ///
+    /// This is similar to the `new` method except that instead of requiring a
+    /// valid package to be present at `root` the folder is walked entirely to
+    /// crawl for packages.
+    ///
+    /// Note that this should be used with care and likely shouldn't be chosen
+    /// by default!
+    pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> {
+        PathSource {
+            recursive: true,
+            ..PathSource::new(root, id, config)
+        }
+    }
+
+    pub fn preload_with(&mut self, pkg: Package) {
+        assert!(!self.updated);
+        assert!(!self.recursive);
+        assert!(self.packages.is_empty());
+        self.updated = true;
+        self.packages.push(pkg);
+    }
+
+    pub fn root_package(&mut self) -> CargoResult<Package> {
+        trace!("root_package; source={:?}", self);
+
+        self.update()?;
+
+        match self.packages.iter().find(|p| p.root() == &*self.path) {
+            Some(pkg) => Ok(pkg.clone()),
+            None => Err(internal("no package found in source")),
+        }
+    }
+
+    pub fn read_packages(&self) -> CargoResult<Vec<Package>> {
+        if self.updated {
+            Ok(self.packages.clone())
+        } else if self.recursive {
+            ops::read_packages(&self.path, &self.source_id, self.config)
+        } else {
+            let path = self.path.join("Cargo.toml");
+            let (pkg, _) = ops::read_package(&path, &self.source_id, self.config)?;
+            Ok(vec![pkg])
+        }
+    }
+
+    /// List all files relevant to building this package inside this source.
+    ///
+    /// This function will use the appropriate methods to determine the
+    /// set of files underneath this source's directory which are relevant for
+    /// building `pkg`.
+    ///
+    /// The basic assumption of this method is that all files in the directory
+    /// are relevant for building this package, but it also contains logic to
+    /// use other methods like .gitignore to filter the list of files.
+    ///
+    /// ## Pattern matching strategy
+    ///
+    /// Migrating from a glob-like pattern matching (using `glob` crate) to a
+    /// gitignore-like pattern matching (using `ignore` crate). The migration
+    /// stages are:
+    ///
+    /// 1) Only warn users about the future change iff their matching rules are
+    ///    affected.  (CURRENT STAGE)
+    ///
+    /// 2) Switch to the new strategy and update documents. Still keep warning
+    ///    affected users.
+    ///
+    /// 3) Drop the old strategy and no more warnings.
+    ///
+    /// See <https://github.com/rust-lang/cargo/issues/4268> for more info.
+    pub fn list_files(&self, pkg: &Package) -> CargoResult<Vec<PathBuf>> {
+        let root = pkg.root();
+        let no_include_option = pkg.manifest().include().is_empty();
+
+        // glob-like matching rules
+
+        let glob_parse = |p: &String| {
+            let pattern: &str = if p.starts_with('/') {
+                &p[1..p.len()]
+            } else {
+                p
+            };
+            Pattern::new(pattern)
+                .map_err(|e| format_err!("could not parse glob pattern `{}`: {}", p, e))
+        };
+
+        let glob_exclude = pkg.manifest()
+            .exclude()
+            .iter()
+            .map(|p| glob_parse(p))
+            .collect::<Result<Vec<_>, _>>()?;
+
+        let glob_include = pkg.manifest()
+            .include()
+            .iter()
+            .map(|p| glob_parse(p))
+            .collect::<Result<Vec<_>, _>>()?;
+
+        let glob_should_package = |relative_path: &Path| -> bool {
+            fn glob_match(patterns: &[Pattern], relative_path: &Path) -> bool {
+                patterns
+                    .iter()
+                    .any(|pattern| pattern.matches_path(relative_path))
+            }
+
+            // include and exclude options are mutually exclusive.
+            if no_include_option {
+                !glob_match(&glob_exclude, relative_path)
+            } else {
+                glob_match(&glob_include, relative_path)
+            }
+        };
+
+        // ignore-like matching rules
+
+        let mut exclude_builder = GitignoreBuilder::new(root);
+        for rule in pkg.manifest().exclude() {
+            exclude_builder.add_line(None, rule)?;
+        }
+        let ignore_exclude = exclude_builder.build()?;
+
+        let mut include_builder = GitignoreBuilder::new(root);
+        for rule in pkg.manifest().include() {
+            include_builder.add_line(None, rule)?;
+        }
+        let ignore_include = include_builder.build()?;
+
+        let ignore_should_package = |relative_path: &Path| -> CargoResult<bool> {
+            // include and exclude options are mutually exclusive.
+            if no_include_option {
+                match ignore_exclude
+                    .matched_path_or_any_parents(relative_path, /* is_dir */ false)
+                {
+                    Match::None => Ok(true),
+                    Match::Ignore(_) => Ok(false),
+                    Match::Whitelist(pattern) => Err(format_err!(
+                        "exclude rules cannot start with `!`: {}",
+                        pattern.original()
+                    )),
+                }
+            } else {
+                match ignore_include
+                    .matched_path_or_any_parents(relative_path, /* is_dir */ false)
+                {
+                    Match::None => Ok(false),
+                    Match::Ignore(_) => Ok(true),
+                    Match::Whitelist(pattern) => Err(format_err!(
+                        "include rules cannot start with `!`: {}",
+                        pattern.original()
+                    )),
+                }
+            }
+        };
+
+        // matching to paths
+
+        let mut filter = |path: &Path| -> CargoResult<bool> {
+            let relative_path = util::without_prefix(path, root).unwrap();
+            let glob_should_package = glob_should_package(relative_path);
+            let ignore_should_package = ignore_should_package(relative_path)?;
+
+            if glob_should_package != ignore_should_package {
+                if glob_should_package {
+                    if no_include_option {
+                        self.config.shell().warn(format!(
+                            "Pattern matching for Cargo's include/exclude fields is changing and \
+                             file `{}` WILL be excluded in a future Cargo version.\n\
+                             See https://github.com/rust-lang/cargo/issues/4268 for more info",
+                            relative_path.display()
+                        ))?;
+                    } else {
+                        self.config.shell().warn(format!(
+                            "Pattern matching for Cargo's include/exclude fields is changing and \
+                             file `{}` WILL NOT be included in a future Cargo version.\n\
+                             See https://github.com/rust-lang/cargo/issues/4268 for more info",
+                            relative_path.display()
+                        ))?;
+                    }
+                } else if no_include_option {
+                    self.config.shell().warn(format!(
+                        "Pattern matching for Cargo's include/exclude fields is changing and \
+                         file `{}` WILL NOT be excluded in a future Cargo version.\n\
+                         See https://github.com/rust-lang/cargo/issues/4268 for more info",
+                        relative_path.display()
+                    ))?;
+                } else {
+                    self.config.shell().warn(format!(
+                        "Pattern matching for Cargo's include/exclude fields is changing and \
+                         file `{}` WILL be included in a future Cargo version.\n\
+                         See https://github.com/rust-lang/cargo/issues/4268 for more info",
+                        relative_path.display()
+                    ))?;
+                }
+            }
+
+            // Update to ignore_should_package for Stage 2
+            Ok(glob_should_package)
+        };
+
+        // attempt git-prepopulate only if no `include` (rust-lang/cargo#4135)
+        if no_include_option {
+            if let Some(result) = self.discover_git_and_list_files(pkg, root, &mut filter) {
+                return result;
+            }
+        }
+        self.list_files_walk(pkg, &mut filter)
+    }
+
+    // Returns Some(_) if found sibling Cargo.toml and .git folder;
+    // otherwise caller should fall back on full file list.
+    fn discover_git_and_list_files(
+        &self,
+        pkg: &Package,
+        root: &Path,
+        filter: &mut FnMut(&Path) -> CargoResult<bool>,
+    ) -> Option<CargoResult<Vec<PathBuf>>> {
+        // If this package is in a git repository, then we really do want to
+        // query the git repository as it takes into account items such as
+        // .gitignore. We're not quite sure where the git repository is,
+        // however, so we do a bit of a probe.
+        //
+        // We walk this package's path upwards and look for a sibling
+        // Cargo.toml and .git folder. If we find one then we assume that we're
+        // part of that repository.
+        let mut cur = root;
+        loop {
+            if cur.join("Cargo.toml").is_file() {
+                // If we find a git repository next to this Cargo.toml, we still
+                // check to see if we are indeed part of the index. If not, then
+                // this is likely an unrelated git repo, so keep going.
+                if let Ok(repo) = git2::Repository::open(cur) {
+                    let index = match repo.index() {
+                        Ok(index) => index,
+                        Err(err) => return Some(Err(err.into())),
+                    };
+                    let path = util::without_prefix(root, cur).unwrap().join("Cargo.toml");
+                    if index.get_path(&path, 0).is_some() {
+                        return Some(self.list_files_git(pkg, &repo, filter));
+                    }
+                }
+            }
+            // don't cross submodule boundaries
+            if cur.join(".git").is_dir() {
+                break;
+            }
+            match cur.parent() {
+                Some(parent) => cur = parent,
+                None => break,
+            }
+        }
+        None
+    }
+
+    fn list_files_git(
+        &self,
+        pkg: &Package,
+        repo: &git2::Repository,
+        filter: &mut FnMut(&Path) -> CargoResult<bool>,
+    ) -> CargoResult<Vec<PathBuf>> {
+        warn!("list_files_git {}", pkg.package_id());
+        let index = repo.index()?;
+        let root = repo.workdir()
+            .ok_or_else(|| internal("Can't list files on a bare repository."))?;
+        let pkg_path = pkg.root();
+
+        let mut ret = Vec::<PathBuf>::new();
+
+        // We use information from the git repository to guide us in traversing
+        // its tree. The primary purpose of this is to take advantage of the
+        // .gitignore and auto-ignore files that don't matter.
+        //
+        // Here we're also careful to look at both tracked and untracked files as
+        // the untracked files are often part of a build and may become relevant
+        // as part of a future commit.
+        let index_files = index.iter().map(|entry| {
+            use libgit2_sys::GIT_FILEMODE_COMMIT;
+            let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32;
+            (join(root, &entry.path), Some(is_dir))
+        });
+        let mut opts = git2::StatusOptions::new();
+        opts.include_untracked(true);
+        if let Some(suffix) = util::without_prefix(pkg_path, root) {
+            opts.pathspec(suffix);
+        }
+        let statuses = repo.statuses(Some(&mut opts))?;
+        let untracked = statuses.iter().filter_map(|entry| match entry.status() {
+            git2::Status::WT_NEW => Some((join(root, entry.path_bytes()), None)),
+            _ => None,
+        });
+
+        let mut subpackages_found = Vec::new();
+
+        for (file_path, is_dir) in index_files.chain(untracked) {
+            let file_path = file_path?;
+
+            // Filter out files blatantly outside this package. This is helped a
+            // bit obove via the `pathspec` function call, but we need to filter
+            // the entries in the index as well.
+            if !file_path.starts_with(pkg_path) {
+                continue;
+            }
+
+            match file_path.file_name().and_then(|s| s.to_str()) {
+                // Filter out Cargo.lock and target always, we don't want to
+                // package a lock file no one will ever read and we also avoid
+                // build artifacts
+                Some("Cargo.lock") | Some("target") => continue,
+
+                // Keep track of all sub-packages found and also strip out all
+                // matches we've found so far. Note, though, that if we find
+                // our own `Cargo.toml` we keep going.
+                Some("Cargo.toml") => {
+                    let path = file_path.parent().unwrap();
+                    if path != pkg_path {
+                        warn!("subpackage found: {}", path.display());
+                        ret.retain(|p| !p.starts_with(path));
+                        subpackages_found.push(path.to_path_buf());
+                        continue;
+                    }
+                }
+
+                _ => {}
+            }
+
+            // If this file is part of any other sub-package we've found so far,
+            // skip it.
+            if subpackages_found.iter().any(|p| file_path.starts_with(p)) {
+                continue;
+            }
+
+            if is_dir.unwrap_or_else(|| file_path.is_dir()) {
+                warn!("  found submodule {}", file_path.display());
+                let rel = util::without_prefix(&file_path, root).unwrap();
+                let rel = rel.to_str()
+                    .ok_or_else(|| format_err!("invalid utf-8 filename: {}", rel.display()))?;
+                // Git submodules are currently only named through `/` path
+                // separators, explicitly not `\` which windows uses. Who knew?
+                let rel = rel.replace(r"\", "/");
+                match repo.find_submodule(&rel).and_then(|s| s.open()) {
+                    Ok(repo) => {
+                        let files = self.list_files_git(pkg, &repo, filter)?;
+                        ret.extend(files.into_iter());
+                    }
+                    Err(..) => {
+                        PathSource::walk(&file_path, &mut ret, false, filter)?;
+                    }
+                }
+            } else if (*filter)(&file_path)? {
+                // We found a file!
+                warn!("  found {}", file_path.display());
+                ret.push(file_path);
+            }
+        }
+        return Ok(ret);
+
+        #[cfg(unix)]
+        fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {
+            use std::os::unix::prelude::*;
+            use std::ffi::OsStr;
+            Ok(path.join(<OsStr as OsStrExt>::from_bytes(data)))
+        }
+        #[cfg(windows)]
+        fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {
+            use std::str;
+            match str::from_utf8(data) {
+                Ok(s) => Ok(path.join(s)),
+                Err(..) => Err(internal(
+                    "cannot process path in git with a non \
+                     unicode filename",
+                )),
+            }
+        }
+    }
+
+    fn list_files_walk(
+        &self,
+        pkg: &Package,
+        filter: &mut FnMut(&Path) -> CargoResult<bool>,
+    ) -> CargoResult<Vec<PathBuf>> {
+        let mut ret = Vec::new();
+        PathSource::walk(pkg.root(), &mut ret, true, filter)?;
+        Ok(ret)
+    }
+
+    fn walk(
+        path: &Path,
+        ret: &mut Vec<PathBuf>,
+        is_root: bool,
+        filter: &mut FnMut(&Path) -> CargoResult<bool>,
+    ) -> CargoResult<()> {
+        if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) {
+            if (*filter)(path)? {
+                ret.push(path.to_path_buf());
+            }
+            return Ok(());
+        }
+        // Don't recurse into any sub-packages that we have
+        if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() {
+            return Ok(());
+        }
+
+        // For package integration tests, we need to sort the paths in a deterministic order to
+        // be able to match stdout warnings in the same order.
+        //
+        // TODO: Drop collect and sort after transition period and dropping warning tests.
+        // See <https://github.com/rust-lang/cargo/issues/4268>
+        // and <https://github.com/rust-lang/cargo/pull/4270>
+        let mut entries: Vec<PathBuf> = fs::read_dir(path)?.map(|e| e.unwrap().path()).collect();
+        entries.sort_unstable_by(|a, b| a.as_os_str().cmp(b.as_os_str()));
+        for path in entries {
+            let name = path.file_name().and_then(|s| s.to_str());
+            // Skip dotfile directories
+            if name.map(|s| s.starts_with('.')) == Some(true) {
+                continue;
+            }
+            if is_root {
+                // Skip cargo artifacts
+                match name {
+                    Some("target") | Some("Cargo.lock") => continue,
+                    _ => {}
+                }
+            }
+            PathSource::walk(&path, ret, false, filter)?;
+        }
+        Ok(())
+    }
+
+    pub fn last_modified_file(&self, pkg: &Package) -> CargoResult<(FileTime, PathBuf)> {
+        if !self.updated {
+            return Err(internal("BUG: source was not updated"));
+        }
+
+        let mut max = FileTime::zero();
+        let mut max_path = PathBuf::new();
+        for file in self.list_files(pkg)? {
+            // An fs::stat error here is either because path is a
+            // broken symlink, a permissions error, or a race
+            // condition where this path was rm'ed - either way,
+            // we can ignore the error and treat the path's mtime
+            // as 0.
+            let mtime = paths::mtime(&file).unwrap_or_else(|_| FileTime::zero());
+            if mtime > max {
+                max = mtime;
+                max_path = file;
+            }
+        }
+        trace!("last modified file {}: {}", self.path.display(), max);
+        Ok((max, max_path))
+    }
+
+    pub fn path(&self) -> &Path {
+        &self.path
+    }
+}
+
+impl<'cfg> Debug for PathSource<'cfg> {
+    fn fmt(&self, f: &mut Formatter) -> fmt::Result {
+        write!(f, "the paths source")
+    }
+}
+
+impl<'cfg> Source for PathSource<'cfg> {
+    fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        for s in self.packages.iter().map(|p| p.summary()) {
+            if dep.matches(s) {
+                f(s.clone())
+            }
+        }
+        Ok(())
+    }
+
+    fn fuzzy_query(&mut self, _dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        for s in self.packages.iter().map(|p| p.summary()) {
+            f(s.clone())
+        }
+        Ok(())
+    }
+
+    fn supports_checksums(&self) -> bool {
+        false
+    }
+
+    fn requires_precise(&self) -> bool {
+        false
+    }
+
+    fn source_id(&self) -> &SourceId {
+        &self.source_id
+    }
+
+    fn update(&mut self) -> CargoResult<()> {
+        if !self.updated {
+            let packages = self.read_packages()?;
+            self.packages.extend(packages.into_iter());
+            self.updated = true;
+        }
+
+        Ok(())
+    }
+
+    fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
+        trace!("getting packages; id={}", id);
+
+        let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id);
+        pkg.cloned()
+            .map(MaybePackage::Ready)
+            .ok_or_else(|| internal(format!("failed to find {} in path source", id)))
+    }
+
+    fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
+        panic!("no download should have started")
+    }
+
+    fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
+        let (max, max_path) = self.last_modified_file(pkg)?;
+        Ok(format!("{} ({})", max, max_path.display()))
+    }
+
+    fn describe(&self) -> String {
+        match self.source_id.url().to_file_path() {
+            Ok(path) => path.display().to_string(),
+            Err(_) => self.source_id.to_string(),
+        }
+    }
+}
diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs
new file mode 100644 (file)
index 0000000..e9db7cf
--- /dev/null
@@ -0,0 +1,304 @@
+use std::collections::HashMap;
+use std::path::Path;
+use std::str;
+
+use semver::Version;
+use serde_json;
+
+use core::dependency::Dependency;
+use core::{PackageId, SourceId, Summary};
+use sources::registry::RegistryData;
+use sources::registry::{RegistryPackage, INDEX_LOCK};
+use util::{internal, CargoResult, Config, Filesystem};
+
+/// Crates.io treats hyphen and underscores as interchangeable
+/// but, the index and old cargo do not. So the index must store uncanonicalized version
+/// of the name so old cargos can find it.
+/// This loop tries all possible combinations of switching
+/// hyphen and underscores to find the uncanonicalized one.
+/// As all stored inputs have the correct spelling, we start with the spelling as provided.
+struct UncanonicalizedIter<'s> {
+    input: &'s str,
+    num_hyphen_underscore: u32,
+    hyphen_combination_num: u16,
+}
+
+impl<'s> UncanonicalizedIter<'s> {
+    fn new(input: &'s str) -> Self {
+        let num_hyphen_underscore = input.chars().filter(|&c| c == '_' || c == '-').count() as u32;
+        UncanonicalizedIter {
+            input,
+            num_hyphen_underscore,
+            hyphen_combination_num: 0,
+        }
+    }
+}
+
+impl<'s> Iterator for UncanonicalizedIter<'s> {
+    type Item = String;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        if self.hyphen_combination_num > 0 && self.hyphen_combination_num.trailing_zeros() >= self.num_hyphen_underscore {
+            return None;
+        }
+
+        let ret = Some(self.input
+            .chars()
+            .scan(0u16, |s, c| {
+                // the check against 15 here's to prevent
+                // shift overflow on inputs with more then 15 hyphens
+                if (c == '_' || c == '-') && *s <= 15 {
+                    let switch = (self.hyphen_combination_num & (1u16 << *s)) > 0;
+                    let out = if (c == '_') ^ switch {
+                        '_'
+                    } else {
+                        '-'
+                    };
+                    *s += 1;
+                    Some(out)
+                } else {
+                    Some(c)
+                }
+            })
+            .collect());
+        self.hyphen_combination_num += 1;
+        ret
+    }
+}
+
+#[test]
+fn no_hyphen() {
+    assert_eq!(
+        UncanonicalizedIter::new("test").collect::<Vec<_>>(),
+        vec!["test".to_string()]
+    )
+}
+
+#[test]
+fn two_hyphen() {
+    assert_eq!(
+        UncanonicalizedIter::new("te-_st").collect::<Vec<_>>(),
+        vec!["te-_st".to_string(), "te__st".to_string(), "te--st".to_string(), "te_-st".to_string()]
+    )
+}
+
+#[test]
+fn overflow_hyphen() {
+    assert_eq!(
+        UncanonicalizedIter::new("te-_-_-_-_-_-_-_-_-st").take(100).count(),
+        100
+    )
+}
+
+pub struct RegistryIndex<'cfg> {
+    source_id: SourceId,
+    path: Filesystem,
+    cache: HashMap<&'static str, Vec<(Summary, bool)>>,
+    hashes: HashMap<&'static str, HashMap<Version, String>>, // (name, vers) => cksum
+    config: &'cfg Config,
+    locked: bool,
+}
+
+impl<'cfg> RegistryIndex<'cfg> {
+    pub fn new(
+        id: &SourceId,
+        path: &Filesystem,
+        config: &'cfg Config,
+        locked: bool,
+    ) -> RegistryIndex<'cfg> {
+        RegistryIndex {
+            source_id: id.clone(),
+            path: path.clone(),
+            cache: HashMap::new(),
+            hashes: HashMap::new(),
+            config,
+            locked,
+        }
+    }
+
+    /// Return the hash listed for a specified PackageId.
+    pub fn hash(&mut self, pkg: &PackageId, load: &mut RegistryData) -> CargoResult<String> {
+        let name = pkg.name().as_str();
+        let version = pkg.version();
+        if let Some(s) = self.hashes.get(name).and_then(|v| v.get(version)) {
+            return Ok(s.clone());
+        }
+        // Ok, we're missing the key, so parse the index file to load it.
+        self.summaries(name, load)?;
+        self.hashes
+            .get(name)
+            .and_then(|v| v.get(version))
+            .ok_or_else(|| internal(format!("no hash listed for {}", pkg)))
+            .map(|s| s.clone())
+    }
+
+    /// Parse the on-disk metadata for the package provided
+    ///
+    /// Returns a list of pairs of (summary, yanked) for the package name
+    /// specified.
+    pub fn summaries(
+        &mut self,
+        name: &'static str,
+        load: &mut RegistryData,
+    ) -> CargoResult<&Vec<(Summary, bool)>> {
+        if self.cache.contains_key(name) {
+            return Ok(&self.cache[name]);
+        }
+        let summaries = self.load_summaries(name, load)?;
+        self.cache.insert(name, summaries);
+        Ok(&self.cache[name])
+    }
+
+    fn load_summaries(
+        &mut self,
+        name: &str,
+        load: &mut RegistryData,
+    ) -> CargoResult<Vec<(Summary, bool)>> {
+        // Prepare the `RegistryData` which will lazily initialize internal data
+        // structures. Note that this is also importantly needed to initialize
+        // to avoid deadlocks where we acquire a lock below but the `load`
+        // function inside *also* wants to acquire a lock. See an instance of
+        // this on #5551.
+        load.prepare()?;
+        let (root, _lock) = if self.locked {
+            let lock = self
+                .path
+                .open_ro(Path::new(INDEX_LOCK), self.config, "the registry index");
+            match lock {
+                Ok(lock) => (lock.path().parent().unwrap().to_path_buf(), Some(lock)),
+                Err(_) => return Ok(Vec::new()),
+            }
+        } else {
+            (self.path.clone().into_path_unlocked(), None)
+        };
+
+        let fs_name = name
+            .chars()
+            .flat_map(|c| c.to_lowercase())
+            .collect::<String>();
+
+        // see module comment for why this is structured the way it is
+        let raw_path = match fs_name.len() {
+            1 => format!("1/{}", fs_name),
+            2 => format!("2/{}", fs_name),
+            3 => format!("3/{}/{}", &fs_name[..1], fs_name),
+            _ => format!("{}/{}/{}", &fs_name[0..2], &fs_name[2..4], fs_name),
+        };
+        let mut ret = Vec::new();
+         for path in UncanonicalizedIter::new(&raw_path).take(1024) {
+            let mut hit_closure = false;
+            let err = load.load(&root, Path::new(&path), &mut |contents| {
+                hit_closure = true;
+                let contents = str::from_utf8(contents)
+                    .map_err(|_| format_err!("registry index file was not valid utf-8"))?;
+                ret.reserve(contents.lines().count());
+                let lines = contents.lines().map(|s| s.trim()).filter(|l| !l.is_empty());
+
+                let online = !self.config.cli_unstable().offline;
+                // Attempt forwards-compatibility on the index by ignoring
+                // everything that we ourselves don't understand, that should
+                // allow future cargo implementations to break the
+                // interpretation of each line here and older cargo will simply
+                // ignore the new lines.
+                ret.extend(lines.filter_map(|line| {
+                    let (summary, locked) = match self.parse_registry_package(line) {
+                        Ok(p) => p,
+                        Err(e) => {
+                            info!("failed to parse `{}` registry package: {}", name, e);
+                            trace!("line: {}", line);
+                            return None;
+                        }
+                    };
+                    if online || load.is_crate_downloaded(summary.package_id()) {
+                        Some((summary, locked))
+                    } else {
+                        None
+                    }
+                }));
+
+                Ok(())
+            });
+
+            // We ignore lookup failures as those are just crates which don't exist
+            // or we haven't updated the registry yet. If we actually ran the
+            // closure though then we care about those errors.
+            if hit_closure {
+                err?;
+                // Crates.io ensures that there is only one hyphen and underscore equivalent
+                // result in the index so return when we find it.
+                return Ok(ret);
+            }
+        }
+
+        Ok(ret)
+    }
+
+    /// Parse a line from the registry's index file into a Summary for a
+    /// package.
+    ///
+    /// The returned boolean is whether or not the summary has been yanked.
+    fn parse_registry_package(&mut self, line: &str) -> CargoResult<(Summary, bool)> {
+        let RegistryPackage {
+            name,
+            vers,
+            cksum,
+            deps,
+            features,
+            yanked,
+            links,
+        } = serde_json::from_str(line)?;
+        let pkgid = PackageId::new(&name, &vers, &self.source_id)?;
+        let name = pkgid.name();
+        let deps = deps
+            .into_iter()
+            .map(|dep| dep.into_dep(&self.source_id))
+            .collect::<CargoResult<Vec<_>>>()?;
+        let summary = Summary::new(pkgid, deps, &features, links, false)?;
+        let summary = summary.set_checksum(cksum.clone());
+        self.hashes
+            .entry(name.as_str())
+            .or_insert_with(HashMap::new)
+            .insert(vers, cksum);
+        Ok((summary, yanked.unwrap_or(false)))
+    }
+
+    pub fn query_inner(
+        &mut self,
+        dep: &Dependency,
+        load: &mut RegistryData,
+        f: &mut FnMut(Summary),
+    ) -> CargoResult<()> {
+        let source_id = self.source_id.clone();
+        let name = dep.package_name().as_str();
+        let summaries = self.summaries(name, load)?;
+        let summaries = summaries
+            .iter()
+            .filter(|&&(_, yanked)| dep.source_id().precise().is_some() || !yanked)
+            .map(|s| s.0.clone());
+
+        // Handle `cargo update --precise` here. If specified, our own source
+        // will have a precise version listed of the form
+        // `<pkg>=<p_req>o-><f_req>` where `<pkg>` is the name of a crate on
+        // this source, `<p_req>` is the version installed and `<f_req> is the
+        // version requested (argument to `--precise`).
+        let summaries = summaries.filter(|s| match source_id.precise() {
+            Some(p) if p.starts_with(name) && p[name.len()..].starts_with('=') => {
+                let mut vers = p[name.len() + 1..].splitn(2, "->");
+                if dep
+                    .version_req()
+                    .matches(&Version::parse(vers.next().unwrap()).unwrap())
+                {
+                    vers.next().unwrap() == s.version().to_string()
+                } else {
+                    true
+                }
+            }
+            _ => true,
+        });
+
+        for summary in summaries {
+            f(summary);
+        }
+        Ok(())
+    }
+}
diff --git a/src/cargo/sources/registry/local.rs b/src/cargo/sources/registry/local.rs
new file mode 100644 (file)
index 0000000..023e955
--- /dev/null
@@ -0,0 +1,112 @@
+use std::io::SeekFrom;
+use std::io::prelude::*;
+use std::path::Path;
+
+use core::PackageId;
+use hex;
+use sources::registry::{RegistryConfig, RegistryData, MaybeLock};
+use util::paths;
+use util::{Config, Filesystem, Sha256, FileLock};
+use util::errors::{CargoResult, CargoResultExt};
+
+pub struct LocalRegistry<'cfg> {
+    index_path: Filesystem,
+    root: Filesystem,
+    src_path: Filesystem,
+    config: &'cfg Config,
+}
+
+impl<'cfg> LocalRegistry<'cfg> {
+    pub fn new(root: &Path, config: &'cfg Config, name: &str) -> LocalRegistry<'cfg> {
+        LocalRegistry {
+            src_path: config.registry_source_path().join(name),
+            index_path: Filesystem::new(root.join("index")),
+            root: Filesystem::new(root.to_path_buf()),
+            config,
+        }
+    }
+}
+
+impl<'cfg> RegistryData for LocalRegistry<'cfg> {
+    fn prepare(&self) -> CargoResult<()> {
+        Ok(())
+    }
+
+    fn index_path(&self) -> &Filesystem {
+        &self.index_path
+    }
+
+    fn load(
+        &self,
+        root: &Path,
+        path: &Path,
+        data: &mut FnMut(&[u8]) -> CargoResult<()>,
+    ) -> CargoResult<()> {
+        data(&paths::read_bytes(&root.join(path))?)
+    }
+
+    fn config(&mut self) -> CargoResult<Option<RegistryConfig>> {
+        // Local registries don't have configuration for remote APIs or anything
+        // like that
+        Ok(None)
+    }
+
+    fn update_index(&mut self) -> CargoResult<()> {
+        // Nothing to update, we just use what's on disk. Verify it actually
+        // exists though. We don't use any locks as we're just checking whether
+        // these directories exist.
+        let root = self.root.clone().into_path_unlocked();
+        if !root.is_dir() {
+            bail!("local registry path is not a directory: {}", root.display())
+        }
+        let index_path = self.index_path.clone().into_path_unlocked();
+        if !index_path.is_dir() {
+            bail!(
+                "local registry index path is not a directory: {}",
+                index_path.display()
+            )
+        }
+        Ok(())
+    }
+
+    fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<MaybeLock> {
+        let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version());
+        let mut crate_file = self.root.open_ro(&crate_file, self.config, "crate file")?;
+
+        // If we've already got an unpacked version of this crate, then skip the
+        // checksum below as it is in theory already verified.
+        let dst = format!("{}-{}", pkg.name(), pkg.version());
+        if self.src_path.join(dst).into_path_unlocked().exists() {
+            return Ok(MaybeLock::Ready(crate_file));
+        }
+
+        self.config.shell().status("Unpacking", pkg)?;
+
+        // We don't actually need to download anything per-se, we just need to
+        // verify the checksum matches the .crate file itself.
+        let mut state = Sha256::new();
+        let mut buf = [0; 64 * 1024];
+        loop {
+            let n = crate_file
+                .read(&mut buf)
+                .chain_err(|| format!("failed to read `{}`", crate_file.path().display()))?;
+            if n == 0 {
+                break;
+            }
+            state.update(&buf[..n]);
+        }
+        if hex::encode(state.finish()) != checksum {
+            bail!("failed to verify the checksum of `{}`", pkg)
+        }
+
+        crate_file.seek(SeekFrom::Start(0))?;
+
+        Ok(MaybeLock::Ready(crate_file))
+    }
+
+    fn finish_download(&mut self, _pkg: &PackageId, _checksum: &str, _data: &[u8])
+        -> CargoResult<FileLock>
+    {
+        panic!("this source doesn't download")
+    }
+}
diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs
new file mode 100644 (file)
index 0000000..b061716
--- /dev/null
@@ -0,0 +1,593 @@
+//! A `Source` for registry-based packages.
+//!
+//! # What's a Registry?
+//!
+//! Registries are central locations where packages can be uploaded to,
+//! discovered, and searched for. The purpose of a registry is to have a
+//! location that serves as permanent storage for versions of a crate over time.
+//!
+//! Compared to git sources, a registry provides many packages as well as many
+//! versions simultaneously. Git sources can also have commits deleted through
+//! rebasings where registries cannot have their versions deleted.
+//!
+//! # The Index of a Registry
+//!
+//! One of the major difficulties with a registry is that hosting so many
+//! packages may quickly run into performance problems when dealing with
+//! dependency graphs. It's infeasible for cargo to download the entire contents
+//! of the registry just to resolve one package's dependencies, for example. As
+//! a result, cargo needs some efficient method of querying what packages are
+//! available on a registry, what versions are available, and what the
+//! dependencies for each version is.
+//!
+//! One method of doing so would be having the registry expose an HTTP endpoint
+//! which can be queried with a list of packages and a response of their
+//! dependencies and versions is returned. This is somewhat inefficient however
+//! as we may have to hit the endpoint many times and we may have already
+//! queried for much of the data locally already (for other packages, for
+//! example). This also involves inventing a transport format between the
+//! registry and Cargo itself, so this route was not taken.
+//!
+//! Instead, Cargo communicates with registries through a git repository
+//! referred to as the Index. The Index of a registry is essentially an easily
+//! query-able version of the registry's database for a list of versions of a
+//! package as well as a list of dependencies for each version.
+//!
+//! Using git to host this index provides a number of benefits:
+//!
+//! * The entire index can be stored efficiently locally on disk. This means
+//!   that all queries of a registry can happen locally and don't need to touch
+//!   the network.
+//!
+//! * Updates of the index are quite efficient. Using git buys incremental
+//!   updates, compressed transmission, etc for free. The index must be updated
+//!   each time we need fresh information from a registry, but this is one
+//!   update of a git repository that probably hasn't changed a whole lot so
+//!   it shouldn't be too expensive.
+//!
+//!   Additionally, each modification to the index is just appending a line at
+//!   the end of a file (the exact format is described later). This means that
+//!   the commits for an index are quite small and easily applied/compressable.
+//!
+//! ## The format of the Index
+//!
+//! The index is a store for the list of versions for all packages known, so its
+//! format on disk is optimized slightly to ensure that `ls registry` doesn't
+//! produce a list of all packages ever known. The index also wants to ensure
+//! that there's not a million files which may actually end up hitting
+//! filesystem limits at some point. To this end, a few decisions were made
+//! about the format of the registry:
+//!
+//! 1. Each crate will have one file corresponding to it. Each version for a
+//!    crate will just be a line in this file.
+//! 2. There will be two tiers of directories for crate names, under which
+//!    crates corresponding to those tiers will be located.
+//!
+//! As an example, this is an example hierarchy of an index:
+//!
+//! ```notrust
+//! .
+//! ├── 3
+//! │   └── u
+//! │       └── url
+//! ├── bz
+//! │   └── ip
+//! │       └── bzip2
+//! ├── config.json
+//! ├── en
+//! │   └── co
+//! │       └── encoding
+//! └── li
+//!     ├── bg
+//!     │   └── libgit2
+//!     └── nk
+//!         └── link-config
+//! ```
+//!
+//! The root of the index contains a `config.json` file with a few entries
+//! corresponding to the registry (see `RegistryConfig` below).
+//!
+//! Otherwise, there are three numbered directories (1, 2, 3) for crates with
+//! names 1, 2, and 3 characters in length. The 1/2 directories simply have the
+//! crate files underneath them, while the 3 directory is sharded by the first
+//! letter of the crate name.
+//!
+//! Otherwise the top-level directory contains many two-letter directory names,
+//! each of which has many sub-folders with two letters. At the end of all these
+//! are the actual crate files themselves.
+//!
+//! The purpose of this layout is to hopefully cut down on `ls` sizes as well as
+//! efficient lookup based on the crate name itself.
+//!
+//! ## Crate files
+//!
+//! Each file in the index is the history of one crate over time. Each line in
+//! the file corresponds to one version of a crate, stored in JSON format (see
+//! the `RegistryPackage` structure below).
+//!
+//! As new versions are published, new lines are appended to this file. The only
+//! modifications to this file that should happen over time are yanks of a
+//! particular version.
+//!
+//! # Downloading Packages
+//!
+//! The purpose of the Index was to provide an efficient method to resolve the
+//! dependency graph for a package. So far we only required one network
+//! interaction to update the registry's repository (yay!). After resolution has
+//! been performed, however we need to download the contents of packages so we
+//! can read the full manifest and build the source code.
+//!
+//! To accomplish this, this source's `download` method will make an HTTP
+//! request per-package requested to download tarballs into a local cache. These
+//! tarballs will then be unpacked into a destination folder.
+//!
+//! Note that because versions uploaded to the registry are frozen forever that
+//! the HTTP download and unpacking can all be skipped if the version has
+//! already been downloaded and unpacked. This caching allows us to only
+//! download a package when absolutely necessary.
+//!
+//! # Filesystem Hierarchy
+//!
+//! Overall, the `$HOME/.cargo` looks like this when talking about the registry:
+//!
+//! ```notrust
+//! # A folder under which all registry metadata is hosted (similar to
+//! # $HOME/.cargo/git)
+//! $HOME/.cargo/registry/
+//!
+//!     # For each registry that cargo knows about (keyed by hostname + hash)
+//!     # there is a folder which is the checked out version of the index for
+//!     # the registry in this location. Note that this is done so cargo can
+//!     # support multiple registries simultaneously
+//!     index/
+//!         registry1-<hash>/
+//!         registry2-<hash>/
+//!         ...
+//!
+//!     # This folder is a cache for all downloaded tarballs from a registry.
+//!     # Once downloaded and verified, a tarball never changes.
+//!     cache/
+//!         registry1-<hash>/<pkg>-<version>.crate
+//!         ...
+//!
+//!     # Location in which all tarballs are unpacked. Each tarball is known to
+//!     # be frozen after downloading, so transitively this folder is also
+//!     # frozen once its unpacked (it's never unpacked again)
+//!     src/
+//!         registry1-<hash>/<pkg>-<version>/...
+//!         ...
+//! ```
+
+use std::borrow::Cow;
+use std::collections::BTreeMap;
+use std::fs::File;
+use std::path::{Path, PathBuf};
+
+use flate2::read::GzDecoder;
+use semver::Version;
+#[cfg(test)]
+use serde_json;
+use tar::Archive;
+
+use core::dependency::{Dependency, Kind};
+use core::source::MaybePackage;
+use core::{Package, PackageId, Source, SourceId, Summary};
+use sources::PathSource;
+use util::errors::CargoResultExt;
+use util::hex;
+use util::to_url::ToUrl;
+use util::{internal, CargoResult, Config, FileLock, Filesystem};
+
+const INDEX_LOCK: &str = ".cargo-index-lock";
+pub const CRATES_IO_INDEX: &str = "https://github.com/rust-lang/crates.io-index";
+pub const CRATES_IO_REGISTRY: &str = "crates-io";
+const CRATE_TEMPLATE: &str = "{crate}";
+const VERSION_TEMPLATE: &str = "{version}";
+
+pub struct RegistrySource<'cfg> {
+    source_id: SourceId,
+    src_path: Filesystem,
+    config: &'cfg Config,
+    updated: bool,
+    ops: Box<RegistryData + 'cfg>,
+    index: index::RegistryIndex<'cfg>,
+    index_locked: bool,
+}
+
+#[derive(Deserialize)]
+pub struct RegistryConfig {
+    /// Download endpoint for all crates.
+    ///
+    /// The string is a template which will generate the download URL for the
+    /// tarball of a specific version of a crate. The substrings `{crate}` and
+    /// `{version}` will be replaced with the crate's name and version
+    /// respectively.
+    ///
+    /// For backwards compatibility, if the string does not contain `{crate}` or
+    /// `{version}`, it will be extended with `/{crate}/{version}/download` to
+    /// support registries like crates.io which were crated before the
+    /// templating setup was created.
+    pub dl: String,
+
+    /// API endpoint for the registry. This is what's actually hit to perform
+    /// operations like yanks, owner modifications, publish new crates, etc.
+    pub api: Option<String>,
+}
+
+#[derive(Deserialize)]
+pub struct RegistryPackage<'a> {
+    name: Cow<'a, str>,
+    vers: Version,
+    deps: Vec<RegistryDependency<'a>>,
+    features: BTreeMap<Cow<'a, str>, Vec<Cow<'a, str>>>,
+    cksum: String,
+    yanked: Option<bool>,
+    links: Option<Cow<'a, str>>,
+}
+
+#[test]
+fn escaped_cher_in_json() {
+    let _: RegistryPackage = serde_json::from_str(
+        r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{}}"#
+    ).unwrap();
+    let _: RegistryPackage = serde_json::from_str(
+        r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{"test":["k","q"]},"links":"a-sys"}"#
+    ).unwrap();
+
+    // Now we add escaped cher all the places they can go
+    // these are not valid, but it should error later than json parsing
+    let _: RegistryPackage = serde_json::from_str(r#"{
+        "name":"This name has a escaped cher in it \n\t\" ",
+        "vers":"0.0.1",
+        "deps":[{
+            "name": " \n\t\" ",
+            "req": " \n\t\" ",
+            "features": [" \n\t\" "],
+            "optional": true,
+            "default_features": true,
+            "target": " \n\t\" ",
+            "kind": " \n\t\" ",
+            "registry": " \n\t\" "
+        }],
+        "cksum":"bae3",
+        "features":{"test \n\t\" ":["k \n\t\" ","q \n\t\" "]},
+        "links":" \n\t\" "}"#
+    ).unwrap();
+}
+
+#[derive(Deserialize)]
+#[serde(field_identifier, rename_all = "lowercase")]
+enum Field {
+    Name,
+    Vers,
+    Deps,
+    Features,
+    Cksum,
+    Yanked,
+    Links,
+}
+
+#[derive(Deserialize)]
+struct RegistryDependency<'a> {
+    name: Cow<'a, str>,
+    req: Cow<'a, str>,
+    features: Vec<Cow<'a, str>>,
+    optional: bool,
+    default_features: bool,
+    target: Option<Cow<'a, str>>,
+    kind: Option<Cow<'a, str>>,
+    registry: Option<Cow<'a, str>>,
+    package: Option<Cow<'a, str>>,
+}
+
+impl<'a> RegistryDependency<'a> {
+    /// Converts an encoded dependency in the registry to a cargo dependency
+    pub fn into_dep(self, default: &SourceId) -> CargoResult<Dependency> {
+        let RegistryDependency {
+            name,
+            req,
+            mut features,
+            optional,
+            default_features,
+            target,
+            kind,
+            registry,
+            package,
+        } = self;
+
+        let id = if let Some(registry) = registry {
+            SourceId::for_registry(&registry.to_url()?)?
+        } else {
+            default.clone()
+        };
+
+
+        let mut dep = Dependency::parse_no_deprecated(
+            package.as_ref().unwrap_or(&name),
+            Some(&req),
+            &id,
+        )?;
+        if package.is_some() {
+            dep.set_explicit_name_in_toml(&name);
+        }
+        let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") {
+            "dev" => Kind::Development,
+            "build" => Kind::Build,
+            _ => Kind::Normal,
+        };
+
+        let platform = match target {
+            Some(target) => Some(target.parse()?),
+            None => None,
+        };
+
+        // Unfortunately older versions of cargo and/or the registry ended up
+        // publishing lots of entries where the features array contained the
+        // empty feature, "", inside. This confuses the resolution process much
+        // later on and these features aren't actually valid, so filter them all
+        // out here.
+        features.retain(|s| !s.is_empty());
+
+        dep.set_optional(optional)
+            .set_default_features(default_features)
+            .set_features(features)
+            .set_platform(platform)
+            .set_kind(kind);
+
+        Ok(dep)
+    }
+}
+
+pub trait RegistryData {
+    fn prepare(&self) -> CargoResult<()>;
+    fn index_path(&self) -> &Filesystem;
+    fn load(
+        &self,
+        _root: &Path,
+        path: &Path,
+        data: &mut FnMut(&[u8]) -> CargoResult<()>,
+    ) -> CargoResult<()>;
+    fn config(&mut self) -> CargoResult<Option<RegistryConfig>>;
+    fn update_index(&mut self) -> CargoResult<()>;
+    fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<MaybeLock>;
+    fn finish_download(&mut self, pkg: &PackageId, checksum: &str, data: &[u8])
+        -> CargoResult<FileLock>;
+
+    fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool {
+        true
+    }
+}
+
+pub enum MaybeLock {
+    Ready(FileLock),
+    Download { url: String, descriptor: String }
+}
+
+mod index;
+mod local;
+mod remote;
+
+fn short_name(id: &SourceId) -> String {
+    let hash = hex::short_hash(id);
+    let ident = id.url().host_str().unwrap_or("").to_string();
+    format!("{}-{}", ident, hash)
+}
+
+impl<'cfg> RegistrySource<'cfg> {
+    pub fn remote(source_id: &SourceId, config: &'cfg Config) -> RegistrySource<'cfg> {
+        let name = short_name(source_id);
+        let ops = remote::RemoteRegistry::new(source_id, config, &name);
+        RegistrySource::new(source_id, config, &name, Box::new(ops), true)
+    }
+
+    pub fn local(source_id: &SourceId, path: &Path, config: &'cfg Config) -> RegistrySource<'cfg> {
+        let name = short_name(source_id);
+        let ops = local::LocalRegistry::new(path, config, &name);
+        RegistrySource::new(source_id, config, &name, Box::new(ops), false)
+    }
+
+    fn new(
+        source_id: &SourceId,
+        config: &'cfg Config,
+        name: &str,
+        ops: Box<RegistryData + 'cfg>,
+        index_locked: bool,
+    ) -> RegistrySource<'cfg> {
+        RegistrySource {
+            src_path: config.registry_source_path().join(name),
+            config,
+            source_id: source_id.clone(),
+            updated: false,
+            index: index::RegistryIndex::new(source_id, ops.index_path(), config, index_locked),
+            index_locked,
+            ops,
+        }
+    }
+
+    /// Decode the configuration stored within the registry.
+    ///
+    /// This requires that the index has been at least checked out.
+    pub fn config(&mut self) -> CargoResult<Option<RegistryConfig>> {
+        self.ops.config()
+    }
+
+    /// Unpacks a downloaded package into a location where it's ready to be
+    /// compiled.
+    ///
+    /// No action is taken if the source looks like it's already unpacked.
+    fn unpack_package(&self, pkg: &PackageId, tarball: &FileLock) -> CargoResult<PathBuf> {
+        let dst = self
+            .src_path
+            .join(&format!("{}-{}", pkg.name(), pkg.version()));
+        dst.create_dir()?;
+        // Note that we've already got the `tarball` locked above, and that
+        // implies a lock on the unpacked destination as well, so this access
+        // via `into_path_unlocked` should be ok.
+        let dst = dst.into_path_unlocked();
+        let ok = dst.join(".cargo-ok");
+        if ok.exists() {
+            return Ok(dst);
+        }
+
+        let gz = GzDecoder::new(tarball.file());
+        let mut tar = Archive::new(gz);
+        let prefix = dst.file_name().unwrap();
+        let parent = dst.parent().unwrap();
+        for entry in tar.entries()? {
+            let mut entry = entry.chain_err(|| "failed to iterate over archive")?;
+            let entry_path = entry
+                .path()
+                .chain_err(|| "failed to read entry path")?
+                .into_owned();
+
+            // We're going to unpack this tarball into the global source
+            // directory, but we want to make sure that it doesn't accidentally
+            // (or maliciously) overwrite source code from other crates. Cargo
+            // itself should never generate a tarball that hits this error, and
+            // crates.io should also block uploads with these sorts of tarballs,
+            // but be extra sure by adding a check here as well.
+            if !entry_path.starts_with(prefix) {
+                bail!(
+                    "invalid tarball downloaded, contains \
+                     a file at {:?} which isn't under {:?}",
+                    entry_path,
+                    prefix
+                )
+            }
+
+            // Once that's verified, unpack the entry as usual.
+            entry
+                .unpack_in(parent)
+                .chain_err(|| format!("failed to unpack entry at `{}`", entry_path.display()))?;
+        }
+        File::create(&ok)?;
+        Ok(dst.clone())
+    }
+
+    fn do_update(&mut self) -> CargoResult<()> {
+        self.ops.update_index()?;
+        let path = self.ops.index_path();
+        self.index =
+            index::RegistryIndex::new(&self.source_id, path, self.config, self.index_locked);
+        Ok(())
+    }
+
+    fn get_pkg(&mut self, package: &PackageId, path: FileLock) -> CargoResult<Package> {
+        let path = self
+            .unpack_package(package, &path)
+            .chain_err(|| internal(format!("failed to unpack package `{}`", package)))?;
+        let mut src = PathSource::new(&path, &self.source_id, self.config);
+        src.update()?;
+        let pkg = match src.download(package)? {
+            MaybePackage::Ready(pkg) => pkg,
+            MaybePackage::Download { .. } => unreachable!(),
+        };
+
+        // Unfortunately the index and the actual Cargo.toml in the index can
+        // differ due to historical Cargo bugs. To paper over these we trash the
+        // *summary* loaded from the Cargo.toml we just downloaded with the one
+        // we loaded from the index.
+        let summaries = self
+            .index
+            .summaries(package.name().as_str(), &mut *self.ops)?;
+        let summary = summaries
+            .iter()
+            .map(|s| &s.0)
+            .find(|s| s.package_id() == package)
+            .expect("summary not found");
+        let mut manifest = pkg.manifest().clone();
+        manifest.set_summary(summary.clone());
+        Ok(Package::new(manifest, pkg.manifest_path()))
+    }
+}
+
+impl<'cfg> Source for RegistrySource<'cfg> {
+    fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        // If this is a precise dependency, then it came from a lockfile and in
+        // theory the registry is known to contain this version. If, however, we
+        // come back with no summaries, then our registry may need to be
+        // updated, so we fall back to performing a lazy update.
+        if dep.source_id().precise().is_some() && !self.updated {
+            debug!("attempting query without update");
+            let mut called = false;
+            self.index.query_inner(dep, &mut *self.ops, &mut |s| {
+                if dep.matches(&s) {
+                    called = true;
+                    f(s);
+                }
+            })?;
+            if called {
+                return Ok(());
+            } else {
+                debug!("falling back to an update");
+                self.do_update()?;
+            }
+        }
+
+        self.index.query_inner(dep, &mut *self.ops, &mut |s| {
+            if dep.matches(&s) {
+                f(s);
+            }
+        })
+    }
+
+    fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        self.index.query_inner(dep, &mut *self.ops, f)
+    }
+
+    fn supports_checksums(&self) -> bool {
+        true
+    }
+
+    fn requires_precise(&self) -> bool {
+        false
+    }
+
+    fn source_id(&self) -> &SourceId {
+        &self.source_id
+    }
+
+    fn update(&mut self) -> CargoResult<()> {
+        // If we have an imprecise version then we don't know what we're going
+        // to look for, so we always attempt to perform an update here.
+        //
+        // If we have a precise version, then we'll update lazily during the
+        // querying phase. Note that precise in this case is only
+        // `Some("locked")` as other `Some` values indicate a `cargo update
+        // --precise` request
+        if self.source_id.precise() != Some("locked") {
+            self.do_update()?;
+        } else {
+            debug!("skipping update due to locked registry");
+        }
+        Ok(())
+    }
+
+    fn download(&mut self, package: &PackageId) -> CargoResult<MaybePackage> {
+        let hash = self.index.hash(package, &mut *self.ops)?;
+        match self.ops.download(package, &hash)? {
+            MaybeLock::Ready(file) => {
+                self.get_pkg(package, file).map(MaybePackage::Ready)
+            }
+            MaybeLock::Download { url, descriptor } => {
+                Ok(MaybePackage::Download { url, descriptor })
+            }
+        }
+    }
+
+    fn finish_download(&mut self, package: &PackageId, data: Vec<u8>)
+        -> CargoResult<Package>
+    {
+        let hash = self.index.hash(package, &mut *self.ops)?;
+        let file = self.ops.finish_download(package, &hash, &data)?;
+        self.get_pkg(package, file)
+    }
+
+    fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
+        Ok(pkg.package_id().version().to_string())
+    }
+
+    fn describe(&self) -> String {
+        self.source_id.display_registry()
+    }
+}
diff --git a/src/cargo/sources/registry/remote.rs b/src/cargo/sources/registry/remote.rs
new file mode 100644 (file)
index 0000000..854206d
--- /dev/null
@@ -0,0 +1,280 @@
+use std::cell::{Cell, Ref, RefCell};
+use std::fmt::Write as FmtWrite;
+use std::io::SeekFrom;
+use std::io::prelude::*;
+use std::mem;
+use std::path::Path;
+use std::str;
+
+use git2;
+use hex;
+use serde_json;
+use lazycell::LazyCell;
+
+use core::{PackageId, SourceId};
+use sources::git;
+use sources::registry::{RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE};
+use sources::registry::MaybeLock;
+use util::{FileLock, Filesystem};
+use util::{Config, Sha256};
+use util::errors::{CargoResult, CargoResultExt};
+
+pub struct RemoteRegistry<'cfg> {
+    index_path: Filesystem,
+    cache_path: Filesystem,
+    source_id: SourceId,
+    config: &'cfg Config,
+    tree: RefCell<Option<git2::Tree<'static>>>,
+    repo: LazyCell<git2::Repository>,
+    head: Cell<Option<git2::Oid>>,
+}
+
+impl<'cfg> RemoteRegistry<'cfg> {
+    pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> {
+        RemoteRegistry {
+            index_path: config.registry_index_path().join(name),
+            cache_path: config.registry_cache_path().join(name),
+            source_id: source_id.clone(),
+            config,
+            tree: RefCell::new(None),
+            repo: LazyCell::new(),
+            head: Cell::new(None),
+        }
+    }
+
+    fn repo(&self) -> CargoResult<&git2::Repository> {
+        self.repo.try_borrow_with(|| {
+            let path = self.index_path.clone().into_path_unlocked();
+
+            // Fast path without a lock
+            if let Ok(repo) = git2::Repository::open(&path) {
+                trace!("opened a repo without a lock");
+                return Ok(repo);
+            }
+
+            // Ok, now we need to lock and try the whole thing over again.
+            trace!("acquiring registry index lock");
+            let lock =
+                self.index_path
+                    .open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?;
+            match git2::Repository::open(&path) {
+                Ok(repo) => Ok(repo),
+                Err(_) => {
+                    let _ = lock.remove_siblings();
+
+                    // Note that we'd actually prefer to use a bare repository
+                    // here as we're not actually going to check anything out.
+                    // All versions of Cargo, though, share the same CARGO_HOME,
+                    // so for compatibility with older Cargo which *does* do
+                    // checkouts we make sure to initialize a new full
+                    // repository (not a bare one).
+                    //
+                    // We should change this to `init_bare` whenever we feel
+                    // like enough time has passed or if we change the directory
+                    // that the folder is located in, such as by changing the
+                    // hash at the end of the directory.
+                    //
+                    // Note that in the meantime we also skip `init.templatedir`
+                    // as it can be misconfigured sometimes or otherwise add
+                    // things that we don't want.
+                    let mut opts = git2::RepositoryInitOptions::new();
+                    opts.external_template(false);
+                    Ok(git2::Repository::init_opts(&path, &opts).chain_err(|| {
+                        "failed to initialized index git repository"
+                    })?)
+                }
+            }
+        })
+    }
+
+    fn head(&self) -> CargoResult<git2::Oid> {
+        if self.head.get().is_none() {
+            let oid = self.repo()?.refname_to_id("refs/remotes/origin/master")?;
+            self.head.set(Some(oid));
+        }
+        Ok(self.head.get().unwrap())
+    }
+
+    fn tree(&self) -> CargoResult<Ref<git2::Tree>> {
+        {
+            let tree = self.tree.borrow();
+            if tree.is_some() {
+                return Ok(Ref::map(tree, |s| s.as_ref().unwrap()));
+            }
+        }
+        let repo = self.repo()?;
+        let commit = repo.find_commit(self.head()?)?;
+        let tree = commit.tree()?;
+
+        // Unfortunately in libgit2 the tree objects look like they've got a
+        // reference to the repository object which means that a tree cannot
+        // outlive the repository that it came from. Here we want to cache this
+        // tree, though, so to accomplish this we transmute it to a static
+        // lifetime.
+        //
+        // Note that we don't actually hand out the static lifetime, instead we
+        // only return a scoped one from this function. Additionally the repo
+        // we loaded from (above) lives as long as this object
+        // (`RemoteRegistry`) so we then just need to ensure that the tree is
+        // destroyed first in the destructor, hence the destructor on
+        // `RemoteRegistry` below.
+        let tree = unsafe { mem::transmute::<git2::Tree, git2::Tree<'static>>(tree) };
+        *self.tree.borrow_mut() = Some(tree);
+        Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap()))
+    }
+
+    fn filename(&self, pkg: &PackageId) -> String {
+        format!("{}-{}.crate", pkg.name(), pkg.version())
+    }
+}
+
+impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
+    fn prepare(&self) -> CargoResult<()> {
+        self.repo()?; // create intermediate dirs and initialize the repo
+        Ok(())
+    }
+
+    fn index_path(&self) -> &Filesystem {
+        &self.index_path
+    }
+
+    fn load(
+        &self,
+        _root: &Path,
+        path: &Path,
+        data: &mut FnMut(&[u8]) -> CargoResult<()>,
+    ) -> CargoResult<()> {
+        // Note that the index calls this method and the filesystem is locked
+        // in the index, so we don't need to worry about an `update_index`
+        // happening in a different process.
+        let repo = self.repo()?;
+        let tree = self.tree()?;
+        let entry = tree.get_path(path)?;
+        let object = entry.to_object(repo)?;
+        let blob = match object.as_blob() {
+            Some(blob) => blob,
+            None => bail!("path `{}` is not a blob in the git repo", path.display()),
+        };
+        data(blob.content())
+    }
+
+    fn config(&mut self) -> CargoResult<Option<RegistryConfig>> {
+        debug!("loading config");
+        self.prepare()?;
+        let _lock =
+            self.index_path
+                .open_ro(Path::new(INDEX_LOCK), self.config, "the registry index")?;
+        let mut config = None;
+        self.load(Path::new(""), Path::new("config.json"), &mut |json| {
+            config = Some(serde_json::from_slice(json)?);
+            Ok(())
+        })?;
+        trace!("config loaded");
+        Ok(config)
+    }
+
+    fn update_index(&mut self) -> CargoResult<()> {
+        if self.config.cli_unstable().offline {
+            return Ok(());
+        }
+        if self.config.cli_unstable().no_index_update {
+            return Ok(());
+        }
+
+        debug!("updating the index");
+
+        // Ensure that we'll actually be able to acquire an HTTP handle later on
+        // once we start trying to download crates. This will weed out any
+        // problems with `.cargo/config` configuration related to HTTP.
+        //
+        // This way if there's a problem the error gets printed before we even
+        // hit the index, which may not actually read this configuration.
+        self.config.http()?;
+
+        self.prepare()?;
+        self.head.set(None);
+        *self.tree.borrow_mut() = None;
+        let _lock =
+            self.index_path
+                .open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?;
+        self.config
+            .shell()
+            .status("Updating", self.source_id.display_registry())?;
+
+        // git fetch origin master
+        let url = self.source_id.url();
+        let refspec = "refs/heads/master:refs/remotes/origin/master";
+        let repo = self.repo.borrow_mut().unwrap();
+        git::fetch(repo, url, refspec, self.config)
+            .chain_err(|| format!("failed to fetch `{}`", url))?;
+        Ok(())
+    }
+
+    fn download(&mut self, pkg: &PackageId, _checksum: &str) -> CargoResult<MaybeLock> {
+        let filename = self.filename(pkg);
+
+        // Attempt to open an read-only copy first to avoid an exclusive write
+        // lock and also work with read-only filesystems. Note that we check the
+        // length of the file like below to handle interrupted downloads.
+        //
+        // If this fails then we fall through to the exclusive path where we may
+        // have to redownload the file.
+        if let Ok(dst) = self.cache_path.open_ro(&filename, self.config, &filename) {
+            let meta = dst.file().metadata()?;
+            if meta.len() > 0 {
+                return Ok(MaybeLock::Ready(dst));
+            }
+        }
+
+        let config = self.config()?.unwrap();
+        let mut url = config.dl.clone();
+        if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) {
+            write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap();
+        }
+        let url = url.replace(CRATE_TEMPLATE, &*pkg.name())
+            .replace(VERSION_TEMPLATE, &pkg.version().to_string());
+
+        Ok(MaybeLock::Download { url, descriptor: pkg.to_string() })
+    }
+
+    fn finish_download(&mut self, pkg: &PackageId, checksum: &str, data: &[u8])
+        -> CargoResult<FileLock>
+    {
+        // Verify what we just downloaded
+        let mut state = Sha256::new();
+        state.update(data);
+        if hex::encode(state.finish()) != checksum {
+            bail!("failed to verify the checksum of `{}`", pkg)
+        }
+
+        let filename = self.filename(pkg);
+        let mut dst = self.cache_path.open_rw(&filename, self.config, &filename)?;
+        let meta = dst.file().metadata()?;
+        if meta.len() > 0 {
+            return Ok(dst);
+        }
+
+        dst.write_all(data)?;
+        dst.seek(SeekFrom::Start(0))?;
+        Ok(dst)
+    }
+
+    fn is_crate_downloaded(&self, pkg: &PackageId) -> bool {
+        let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
+        let path = Path::new(&filename);
+
+        if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) {
+            if let Ok(meta) = dst.file().metadata() {
+                return meta.len() > 0;
+            }
+        }
+        false
+    }
+}
+
+impl<'cfg> Drop for RemoteRegistry<'cfg> {
+    fn drop(&mut self) {
+        // Just be sure to drop this before our other fields
+        self.tree.borrow_mut().take();
+    }
+}
diff --git a/src/cargo/sources/replaced.rs b/src/cargo/sources/replaced.rs
new file mode 100644 (file)
index 0000000..e413de2
--- /dev/null
@@ -0,0 +1,114 @@
+use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
+use core::source::MaybePackage;
+use util::errors::{CargoResult, CargoResultExt};
+
+pub struct ReplacedSource<'cfg> {
+    to_replace: SourceId,
+    replace_with: SourceId,
+    inner: Box<Source + 'cfg>,
+}
+
+impl<'cfg> ReplacedSource<'cfg> {
+    pub fn new(
+        to_replace: &SourceId,
+        replace_with: &SourceId,
+        src: Box<Source + 'cfg>,
+    ) -> ReplacedSource<'cfg> {
+        ReplacedSource {
+            to_replace: to_replace.clone(),
+            replace_with: replace_with.clone(),
+            inner: src,
+        }
+    }
+}
+
+impl<'cfg> Source for ReplacedSource<'cfg> {
+    fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        let (replace_with, to_replace) = (&self.replace_with, &self.to_replace);
+        let dep = dep.clone().map_source(to_replace, replace_with);
+
+        self.inner
+            .query(
+                &dep,
+                &mut |summary| f(summary.map_source(replace_with, to_replace)),
+            )
+            .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?;
+        Ok(())
+    }
+
+    fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
+        let (replace_with, to_replace) = (&self.replace_with, &self.to_replace);
+        let dep = dep.clone().map_source(to_replace, replace_with);
+
+        self.inner
+            .fuzzy_query(
+                &dep,
+                &mut |summary| f(summary.map_source(replace_with, to_replace)),
+            )
+            .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?;
+        Ok(())
+    }
+
+    fn supports_checksums(&self) -> bool {
+        self.inner.supports_checksums()
+    }
+
+    fn requires_precise(&self) -> bool {
+        self.inner.requires_precise()
+    }
+
+    fn source_id(&self) -> &SourceId {
+        &self.to_replace
+    }
+
+    fn replaced_source_id(&self) -> &SourceId {
+        &self.replace_with
+    }
+
+    fn update(&mut self) -> CargoResult<()> {
+        self.inner
+            .update()
+            .chain_err(|| format!("failed to update replaced source {}", self.to_replace))?;
+        Ok(())
+    }
+
+    fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
+        let id = id.with_source_id(&self.replace_with);
+        let pkg = self.inner
+            .download(&id)
+            .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
+        Ok(match pkg {
+            MaybePackage::Ready(pkg) => {
+                MaybePackage::Ready(pkg.map_source(&self.replace_with, &self.to_replace))
+            }
+            other @ MaybePackage::Download { .. } => other,
+        })
+    }
+
+    fn finish_download(&mut self, id: &PackageId, data: Vec<u8>)
+        -> CargoResult<Package>
+    {
+        let id = id.with_source_id(&self.replace_with);
+        let pkg = self.inner
+            .finish_download(&id, data)
+            .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
+        Ok(pkg.map_source(&self.replace_with, &self.to_replace))
+    }
+
+    fn fingerprint(&self, id: &Package) -> CargoResult<String> {
+        self.inner.fingerprint(id)
+    }
+
+    fn verify(&self, id: &PackageId) -> CargoResult<()> {
+        let id = id.with_source_id(&self.replace_with);
+        self.inner.verify(&id)
+    }
+
+    fn describe(&self) -> String {
+        format!("{} (which is replacing {})", self.inner.describe(), self.to_replace)
+    }
+
+    fn is_replaced(&self) -> bool {
+        true
+    }
+}
diff --git a/src/cargo/util/cfg.rs b/src/cargo/util/cfg.rs
new file mode 100644 (file)
index 0000000..877452c
--- /dev/null
@@ -0,0 +1,274 @@
+use std::str::{self, FromStr};
+use std::iter;
+use std::fmt;
+
+use util::{CargoError, CargoResult};
+
+#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)]
+pub enum Cfg {
+    Name(String),
+    KeyPair(String, String),
+}
+
+#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)]
+pub enum CfgExpr {
+    Not(Box<CfgExpr>),
+    All(Vec<CfgExpr>),
+    Any(Vec<CfgExpr>),
+    Value(Cfg),
+}
+
+#[derive(PartialEq)]
+enum Token<'a> {
+    LeftParen,
+    RightParen,
+    Ident(&'a str),
+    Comma,
+    Equals,
+    String(&'a str),
+}
+
+struct Tokenizer<'a> {
+    s: iter::Peekable<str::CharIndices<'a>>,
+    orig: &'a str,
+}
+
+struct Parser<'a> {
+    t: iter::Peekable<Tokenizer<'a>>,
+}
+
+impl FromStr for Cfg {
+    type Err = CargoError;
+
+    fn from_str(s: &str) -> CargoResult<Cfg> {
+        let mut p = Parser::new(s);
+        let e = p.cfg()?;
+        if p.t.next().is_some() {
+            bail!("malformed cfg value or key/value pair: `{}`", s)
+        }
+        Ok(e)
+    }
+}
+
+impl fmt::Display for Cfg {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match *self {
+            Cfg::Name(ref s) => s.fmt(f),
+            Cfg::KeyPair(ref k, ref v) => write!(f, "{} = \"{}\"", k, v),
+        }
+    }
+}
+
+impl CfgExpr {
+    /// Utility function to check if the key, "cfg(..)" matches the `target_cfg`
+    pub fn matches_key(key: &str, target_cfg: &[Cfg]) -> bool {
+        if key.starts_with("cfg(") && key.ends_with(')') {
+            let cfg = &key[4..key.len() - 1 ];
+
+            CfgExpr::from_str(cfg).ok().map(|ce| ce.matches(target_cfg)).unwrap_or(false)
+        } else {
+            false
+        }
+    }
+
+    pub fn matches(&self, cfg: &[Cfg]) -> bool {
+        match *self {
+            CfgExpr::Not(ref e) => !e.matches(cfg),
+            CfgExpr::All(ref e) => e.iter().all(|e| e.matches(cfg)),
+            CfgExpr::Any(ref e) => e.iter().any(|e| e.matches(cfg)),
+            CfgExpr::Value(ref e) => cfg.contains(e),
+        }
+    }
+}
+
+impl FromStr for CfgExpr {
+    type Err = CargoError;
+
+    fn from_str(s: &str) -> CargoResult<CfgExpr> {
+        let mut p = Parser::new(s);
+        let e = p.expr()?;
+        if p.t.next().is_some() {
+            bail!(
+                "can only have one cfg-expression, consider using all() or \
+                 any() explicitly"
+            )
+        }
+        Ok(e)
+    }
+}
+
+impl fmt::Display for CfgExpr {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match *self {
+            CfgExpr::Not(ref e) => write!(f, "not({})", e),
+            CfgExpr::All(ref e) => write!(f, "all({})", CommaSep(e)),
+            CfgExpr::Any(ref e) => write!(f, "any({})", CommaSep(e)),
+            CfgExpr::Value(ref e) => write!(f, "{}", e),
+        }
+    }
+}
+
+struct CommaSep<'a, T: 'a>(&'a [T]);
+
+impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        for (i, v) in self.0.iter().enumerate() {
+            if i > 0 {
+                write!(f, ", ")?;
+            }
+            write!(f, "{}", v)?;
+        }
+        Ok(())
+    }
+}
+
+impl<'a> Parser<'a> {
+    fn new(s: &'a str) -> Parser<'a> {
+        Parser {
+            t: Tokenizer {
+                s: s.char_indices().peekable(),
+                orig: s,
+            }.peekable(),
+        }
+    }
+
+    fn expr(&mut self) -> CargoResult<CfgExpr> {
+        match self.t.peek() {
+            Some(&Ok(Token::Ident(op @ "all"))) | Some(&Ok(Token::Ident(op @ "any"))) => {
+                self.t.next();
+                let mut e = Vec::new();
+                self.eat(&Token::LeftParen)?;
+                while !self.try(&Token::RightParen) {
+                    e.push(self.expr()?);
+                    if !self.try(&Token::Comma) {
+                        self.eat(&Token::RightParen)?;
+                        break;
+                    }
+                }
+                if op == "all" {
+                    Ok(CfgExpr::All(e))
+                } else {
+                    Ok(CfgExpr::Any(e))
+                }
+            }
+            Some(&Ok(Token::Ident("not"))) => {
+                self.t.next();
+                self.eat(&Token::LeftParen)?;
+                let e = self.expr()?;
+                self.eat(&Token::RightParen)?;
+                Ok(CfgExpr::Not(Box::new(e)))
+            }
+            Some(&Ok(..)) => self.cfg().map(CfgExpr::Value),
+            Some(&Err(..)) => Err(self.t.next().unwrap().err().unwrap()),
+            None => bail!(
+                "expected start of a cfg expression, \
+                 found nothing"
+            ),
+        }
+    }
+
+    fn cfg(&mut self) -> CargoResult<Cfg> {
+        match self.t.next() {
+            Some(Ok(Token::Ident(name))) => {
+                let e = if self.try(&Token::Equals) {
+                    let val = match self.t.next() {
+                        Some(Ok(Token::String(s))) => s,
+                        Some(Ok(t)) => bail!("expected a string, found {}", t.classify()),
+                        Some(Err(e)) => return Err(e),
+                        None => bail!("expected a string, found nothing"),
+                    };
+                    Cfg::KeyPair(name.to_string(), val.to_string())
+                } else {
+                    Cfg::Name(name.to_string())
+                };
+                Ok(e)
+            }
+            Some(Ok(t)) => bail!("expected identifier, found {}", t.classify()),
+            Some(Err(e)) => Err(e),
+            None => bail!("expected identifier, found nothing"),
+        }
+    }
+
+    fn try(&mut self, token: &Token<'a>) -> bool {
+        match self.t.peek() {
+            Some(&Ok(ref t)) if token == t => {}
+            _ => return false,
+        }
+        self.t.next();
+        true
+    }
+
+    fn eat(&mut self, token: &Token<'a>) -> CargoResult<()> {
+        match self.t.next() {
+            Some(Ok(ref t)) if token == t => Ok(()),
+            Some(Ok(t)) => bail!("expected {}, found {}", token.classify(), t.classify()),
+            Some(Err(e)) => Err(e),
+            None => bail!("expected {}, but cfg expr ended", token.classify()),
+        }
+    }
+}
+
+impl<'a> Iterator for Tokenizer<'a> {
+    type Item = CargoResult<Token<'a>>;
+
+    fn next(&mut self) -> Option<CargoResult<Token<'a>>> {
+        loop {
+            match self.s.next() {
+                Some((_, ' ')) => {}
+                Some((_, '(')) => return Some(Ok(Token::LeftParen)),
+                Some((_, ')')) => return Some(Ok(Token::RightParen)),
+                Some((_, ',')) => return Some(Ok(Token::Comma)),
+                Some((_, '=')) => return Some(Ok(Token::Equals)),
+                Some((start, '"')) => {
+                    while let Some((end, ch)) = self.s.next() {
+                        if ch == '"' {
+                            return Some(Ok(Token::String(&self.orig[start + 1..end])));
+                        }
+                    }
+                    return Some(Err(format_err!("unterminated string in cfg")));
+                }
+                Some((start, ch)) if is_ident_start(ch) => {
+                    while let Some(&(end, ch)) = self.s.peek() {
+                        if !is_ident_rest(ch) {
+                            return Some(Ok(Token::Ident(&self.orig[start..end])));
+                        } else {
+                            self.s.next();
+                        }
+                    }
+                    return Some(Ok(Token::Ident(&self.orig[start..])));
+                }
+                Some((_, ch)) => {
+                    return Some(Err(format_err!(
+                        "unexpected character in \
+                         cfg `{}`, expected parens, \
+                         a comma, an identifier, or \
+                         a string",
+                        ch
+                    )))
+                }
+                None => return None,
+            }
+        }
+    }
+}
+
+fn is_ident_start(ch: char) -> bool {
+    ch == '_' || ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z')
+}
+
+fn is_ident_rest(ch: char) -> bool {
+    is_ident_start(ch) || ('0' <= ch && ch <= '9')
+}
+
+impl<'a> Token<'a> {
+    fn classify(&self) -> &str {
+        match *self {
+            Token::LeftParen => "`(`",
+            Token::RightParen => "`)`",
+            Token::Ident(..) => "an identifier",
+            Token::Comma => "`,`",
+            Token::Equals => "`=`",
+            Token::String(..) => "a string",
+        }
+    }
+}
diff --git a/src/cargo/util/config.rs b/src/cargo/util/config.rs
new file mode 100644 (file)
index 0000000..cf5b22a
--- /dev/null
@@ -0,0 +1,1658 @@
+use std;
+use std::cell::{RefCell, RefMut};
+use std::collections::hash_map::Entry::{Occupied, Vacant};
+use std::collections::hash_map::HashMap;
+use std::collections::HashSet;
+use std::env;
+use std::fmt;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::io::SeekFrom;
+use std::mem;
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+use std::sync::{Once, ONCE_INIT};
+use std::time::Instant;
+use std::vec;
+
+use curl::easy::Easy;
+use failure;
+use jobserver;
+use lazycell::LazyCell;
+use serde::{de, de::IntoDeserializer};
+use toml;
+
+use core::profiles::ConfigProfiles;
+use core::shell::Verbosity;
+use core::{CliUnstable, Shell, SourceId, Workspace};
+use ops;
+use url::Url;
+use util::errors::{internal, CargoResult, CargoResultExt};
+use util::paths;
+use util::toml as cargo_toml;
+use util::Filesystem;
+use util::Rustc;
+use util::ToUrl;
+
+use self::ConfigValue as CV;
+
+/// Configuration information for cargo. This is not specific to a build, it is information
+/// relating to cargo itself.
+///
+/// This struct implements `Default`: all fields can be inferred.
+#[derive(Debug)]
+pub struct Config {
+    /// The location of the user's 'home' directory. OS-dependent.
+    home_path: Filesystem,
+    /// Information about how to write messages to the shell
+    shell: RefCell<Shell>,
+    /// A collection of configuration options
+    values: LazyCell<HashMap<String, ConfigValue>>,
+    /// The current working directory of cargo
+    cwd: PathBuf,
+    /// The location of the cargo executable (path to current process)
+    cargo_exe: LazyCell<PathBuf>,
+    /// The location of the rustdoc executable
+    rustdoc: LazyCell<PathBuf>,
+    /// Whether we are printing extra verbose messages
+    extra_verbose: bool,
+    /// `frozen` is set if we shouldn't access the network
+    frozen: bool,
+    /// `locked` is set if we should not update lock files
+    locked: bool,
+    /// A global static IPC control mechanism (used for managing parallel builds)
+    jobserver: Option<jobserver::Client>,
+    /// Cli flags of the form "-Z something"
+    cli_flags: CliUnstable,
+    /// A handle on curl easy mode for http calls
+    easy: LazyCell<RefCell<Easy>>,
+    /// Cache of the `SourceId` for crates.io
+    crates_io_source_id: LazyCell<SourceId>,
+    /// If false, don't cache `rustc --version --verbose` invocations
+    cache_rustc_info: bool,
+    /// Creation time of this config, used to output the total build time
+    creation_time: Instant,
+    /// Target Directory via resolved Cli parameter
+    target_dir: Option<Filesystem>,
+    /// Environment variables, separated to assist testing.
+    env: HashMap<String, String>,
+    /// Profiles loaded from config.
+    profiles: LazyCell<ConfigProfiles>,
+}
+
+impl Config {
+    pub fn new(shell: Shell, cwd: PathBuf, homedir: PathBuf) -> Config {
+        static mut GLOBAL_JOBSERVER: *mut jobserver::Client = 0 as *mut _;
+        static INIT: Once = ONCE_INIT;
+
+        // This should be called early on in the process, so in theory the
+        // unsafety is ok here. (taken ownership of random fds)
+        INIT.call_once(|| unsafe {
+            if let Some(client) = jobserver::Client::from_env() {
+                GLOBAL_JOBSERVER = Box::into_raw(Box::new(client));
+            }
+        });
+
+        let env: HashMap<_, _> = env::vars_os()
+            .filter_map(|(k, v)| {
+                // Ignore any key/values that are not valid Unicode.
+                match (k.into_string(), v.into_string()) {
+                    (Ok(k), Ok(v)) => Some((k, v)),
+                    _ => None,
+                }
+            })
+            .collect();
+
+        let cache_rustc_info = match env.get("CARGO_CACHE_RUSTC_INFO") {
+            Some(cache) => cache != "0",
+            _ => true,
+        };
+
+        Config {
+            home_path: Filesystem::new(homedir),
+            shell: RefCell::new(shell),
+            cwd,
+            values: LazyCell::new(),
+            cargo_exe: LazyCell::new(),
+            rustdoc: LazyCell::new(),
+            extra_verbose: false,
+            frozen: false,
+            locked: false,
+            jobserver: unsafe {
+                if GLOBAL_JOBSERVER.is_null() {
+                    None
+                } else {
+                    Some((*GLOBAL_JOBSERVER).clone())
+                }
+            },
+            cli_flags: CliUnstable::default(),
+            easy: LazyCell::new(),
+            crates_io_source_id: LazyCell::new(),
+            cache_rustc_info,
+            creation_time: Instant::now(),
+            target_dir: None,
+            env,
+            profiles: LazyCell::new(),
+        }
+    }
+
+    pub fn default() -> CargoResult<Config> {
+        let shell = Shell::new();
+        let cwd =
+            env::current_dir().chain_err(|| "couldn't get the current directory of the process")?;
+        let homedir = homedir(&cwd).ok_or_else(|| {
+            format_err!(
+                "Cargo couldn't find your home directory. \
+                 This probably means that $HOME was not set."
+            )
+        })?;
+        Ok(Config::new(shell, cwd, homedir))
+    }
+
+    /// The user's cargo home directory (OS-dependent)
+    pub fn home(&self) -> &Filesystem {
+        &self.home_path
+    }
+
+    /// The cargo git directory (`<cargo_home>/git`)
+    pub fn git_path(&self) -> Filesystem {
+        self.home_path.join("git")
+    }
+
+    /// The cargo registry index directory (`<cargo_home>/registry/index`)
+    pub fn registry_index_path(&self) -> Filesystem {
+        self.home_path.join("registry").join("index")
+    }
+
+    /// The cargo registry cache directory (`<cargo_home>/registry/path`)
+    pub fn registry_cache_path(&self) -> Filesystem {
+        self.home_path.join("registry").join("cache")
+    }
+
+    /// The cargo registry source directory (`<cargo_home>/registry/src`)
+    pub fn registry_source_path(&self) -> Filesystem {
+        self.home_path.join("registry").join("src")
+    }
+
+    /// The default cargo registry (`alternative-registry`)
+    pub fn default_registry(&self) -> CargoResult<Option<String>> {
+        Ok(
+            match self.get_string("registry.default")? {
+                Some(registry) => Some(registry.val),
+                None => None,
+            }
+        )
+    }
+
+    /// Get a reference to the shell, for e.g. writing error messages
+    pub fn shell(&self) -> RefMut<Shell> {
+        self.shell.borrow_mut()
+    }
+
+    /// Get the path to the `rustdoc` executable
+    pub fn rustdoc(&self) -> CargoResult<&Path> {
+        self.rustdoc
+            .try_borrow_with(|| self.get_tool("rustdoc"))
+            .map(AsRef::as_ref)
+    }
+
+    /// Get the path to the `rustc` executable
+    pub fn rustc(&self, ws: Option<&Workspace>) -> CargoResult<Rustc> {
+        let cache_location = ws.map(|ws| {
+            ws.target_dir()
+                .join(".rustc_info.json")
+                .into_path_unlocked()
+        });
+        Rustc::new(
+            self.get_tool("rustc")?,
+            self.maybe_get_tool("rustc_wrapper")?,
+            &self
+                .home()
+                .join("bin")
+                .join("rustc")
+                .into_path_unlocked()
+                .with_extension(env::consts::EXE_EXTENSION),
+            if self.cache_rustc_info {
+                cache_location
+            } else {
+                None
+            },
+        )
+    }
+
+    /// Get the path to the `cargo` executable
+    pub fn cargo_exe(&self) -> CargoResult<&Path> {
+        self.cargo_exe
+            .try_borrow_with(|| {
+                fn from_current_exe() -> CargoResult<PathBuf> {
+                    // Try fetching the path to `cargo` using env::current_exe().
+                    // The method varies per operating system and might fail; in particular,
+                    // it depends on /proc being mounted on Linux, and some environments
+                    // (like containers or chroots) may not have that available.
+                    let exe = env::current_exe()?.canonicalize()?;
+                    Ok(exe)
+                }
+
+                fn from_argv() -> CargoResult<PathBuf> {
+                    // Grab argv[0] and attempt to resolve it to an absolute path.
+                    // If argv[0] has one component, it must have come from a PATH lookup,
+                    // so probe PATH in that case.
+                    // Otherwise, it has multiple components and is either:
+                    // - a relative path (e.g. `./cargo`, `target/debug/cargo`), or
+                    // - an absolute path (e.g. `/usr/local/bin/cargo`).
+                    // In either case, Path::canonicalize will return the full absolute path
+                    // to the target if it exists
+                    let argv0 = env::args_os()
+                        .map(PathBuf::from)
+                        .next()
+                        .ok_or_else(||format_err!("no argv[0]"))?;
+                    paths::resolve_executable(&argv0)
+                }
+
+                let exe = from_current_exe()
+                    .or_else(|_| from_argv())
+                    .chain_err(|| "couldn't get the path to cargo executable")?;
+                Ok(exe)
+            })
+            .map(AsRef::as_ref)
+    }
+
+    pub fn profiles(&self) -> CargoResult<&ConfigProfiles> {
+        self.profiles.try_borrow_with(|| {
+            let ocp = self.get::<Option<ConfigProfiles>>("profile")?;
+            if let Some(config_profiles) = ocp {
+                // Warn if config profiles without CLI option.
+                if !self.cli_unstable().config_profile {
+                    self.shell().warn(
+                        "profiles in config files require `-Z config-profile` \
+                         command-line option",
+                    )?;
+                    return Ok(ConfigProfiles::default());
+                }
+                Ok(config_profiles)
+            } else {
+                Ok(ConfigProfiles::default())
+            }
+        })
+    }
+
+    pub fn values(&self) -> CargoResult<&HashMap<String, ConfigValue>> {
+        self.values.try_borrow_with(|| self.load_values())
+    }
+
+    // Note: This is used by RLS, not Cargo.
+    pub fn set_values(&self, values: HashMap<String, ConfigValue>) -> CargoResult<()> {
+        if self.values.borrow().is_some() {
+            bail!("config values already found")
+        }
+        match self.values.fill(values) {
+            Ok(()) => Ok(()),
+            Err(_) => bail!("could not fill values"),
+        }
+    }
+
+    pub fn reload_rooted_at_cargo_home(&mut self) -> CargoResult<()> {
+        let home = self.home_path.clone().into_path_unlocked();
+        let values = self.load_values_from(&home)?;
+        self.values.replace(values);
+        Ok(())
+    }
+
+    pub fn cwd(&self) -> &Path {
+        &self.cwd
+    }
+
+    pub fn target_dir(&self) -> CargoResult<Option<Filesystem>> {
+        if let Some(ref dir) = self.target_dir {
+            Ok(Some(dir.clone()))
+        } else if let Some(dir) = env::var_os("CARGO_TARGET_DIR") {
+            Ok(Some(Filesystem::new(self.cwd.join(dir))))
+        } else if let Some(val) = self.get_path("build.target-dir")? {
+            let val = self.cwd.join(val.val);
+            Ok(Some(Filesystem::new(val)))
+        } else {
+            Ok(None)
+        }
+    }
+
+    fn get_cv(&self, key: &str) -> CargoResult<Option<ConfigValue>> {
+        let vals = self.values()?;
+        let mut parts = key.split('.').enumerate();
+        let mut val = match vals.get(parts.next().unwrap().1) {
+            Some(val) => val,
+            None => return Ok(None),
+        };
+        for (i, part) in parts {
+            match *val {
+                CV::Table(ref map, _) => {
+                    val = match map.get(part) {
+                        Some(val) => val,
+                        None => return Ok(None),
+                    }
+                }
+                CV::Integer(_, ref path)
+                | CV::String(_, ref path)
+                | CV::List(_, ref path)
+                | CV::Boolean(_, ref path) => {
+                    let idx = key.split('.').take(i).fold(0, |n, s| n + s.len()) + i - 1;
+                    let key_so_far = &key[..idx];
+                    bail!(
+                        "expected table for configuration key `{}`, \
+                         but found {} in {}",
+                        key_so_far,
+                        val.desc(),
+                        path.display()
+                    )
+                }
+            }
+        }
+        Ok(Some(val.clone()))
+    }
+
+    // Helper primarily for testing.
+    pub fn set_env(&mut self, env: HashMap<String, String>) {
+        self.env = env;
+    }
+
+    fn get_env<T>(&self, key: &ConfigKey) -> Result<OptValue<T>, ConfigError>
+    where
+        T: FromStr,
+        <T as FromStr>::Err: fmt::Display,
+    {
+        let key = key.to_env();
+        match self.env.get(&key) {
+            Some(value) => {
+                let definition = Definition::Environment(key);
+                Ok(Some(Value {
+                    val: value
+                        .parse()
+                        .map_err(|e| ConfigError::new(format!("{}", e), definition.clone()))?,
+                    definition,
+                }))
+            }
+            None => Ok(None),
+        }
+    }
+
+    fn has_key(&self, key: &ConfigKey) -> bool {
+        let env_key = key.to_env();
+        if self.env.get(&env_key).is_some() {
+            return true;
+        }
+        let env_pattern = format!("{}_", env_key);
+        if self.env.keys().any(|k| k.starts_with(&env_pattern)) {
+            return true;
+        }
+        if let Ok(o_cv) = self.get_cv(&key.to_config()) {
+            if o_cv.is_some() {
+                return true;
+            }
+        }
+        false
+    }
+
+    pub fn get_string(&self, key: &str) -> CargoResult<OptValue<String>> {
+        self.get_string_priv(&ConfigKey::from_str(key))
+            .map_err(|e| e.into())
+    }
+
+    fn get_string_priv(&self, key: &ConfigKey) -> Result<OptValue<String>, ConfigError> {
+        match self.get_env(key)? {
+            Some(v) => Ok(Some(v)),
+            None => {
+                let config_key = key.to_config();
+                let o_cv = self.get_cv(&config_key)?;
+                match o_cv {
+                    Some(CV::String(s, path)) => Ok(Some(Value {
+                        val: s,
+                        definition: Definition::Path(path),
+                    })),
+                    Some(cv) => Err(ConfigError::expected(&config_key, "a string", &cv)),
+                    None => Ok(None),
+                }
+            }
+        }
+    }
+
+    pub fn get_bool(&self, key: &str) -> CargoResult<OptValue<bool>> {
+        self.get_bool_priv(&ConfigKey::from_str(key))
+            .map_err(|e| e.into())
+    }
+
+    fn get_bool_priv(&self, key: &ConfigKey) -> Result<OptValue<bool>, ConfigError> {
+        match self.get_env(key)? {
+            Some(v) => Ok(Some(v)),
+            None => {
+                let config_key = key.to_config();
+                let o_cv = self.get_cv(&config_key)?;
+                match o_cv {
+                    Some(CV::Boolean(b, path)) => Ok(Some(Value {
+                        val: b,
+                        definition: Definition::Path(path),
+                    })),
+                    Some(cv) => Err(ConfigError::expected(&config_key, "true/false", &cv)),
+                    None => Ok(None),
+                }
+            }
+        }
+    }
+
+    fn string_to_path(&self, value: String, definition: &Definition) -> PathBuf {
+        let is_path = value.contains('/') || (cfg!(windows) && value.contains('\\'));
+        if is_path {
+            definition.root(self).join(value)
+        } else {
+            // A pathless name
+            PathBuf::from(value)
+        }
+    }
+
+    pub fn get_path(&self, key: &str) -> CargoResult<OptValue<PathBuf>> {
+        if let Some(val) = self.get_string(key)? {
+            Ok(Some(Value {
+                val: self.string_to_path(val.val, &val.definition),
+                definition: val.definition,
+            }))
+        } else {
+            Ok(None)
+        }
+    }
+
+    pub fn get_path_and_args(
+        &self,
+        key: &str,
+    ) -> CargoResult<OptValue<(PathBuf, Vec<String>)>> {
+        if let Some(mut val) = self.get_list_or_split_string(key)? {
+            if !val.val.is_empty() {
+                return Ok(Some(Value {
+                    val: (
+                        self.string_to_path(val.val.remove(0), &val.definition),
+                        val.val,
+                    ),
+                    definition: val.definition,
+                }));
+            }
+        }
+        Ok(None)
+    }
+
+    // NOTE: This does *not* support environment variables.  Use `get` instead
+    // if you want that.
+    pub fn get_list(&self, key: &str) -> CargoResult<OptValue<Vec<(String, PathBuf)>>> {
+        match self.get_cv(key)? {
+            Some(CV::List(i, path)) => Ok(Some(Value {
+                val: i,
+                definition: Definition::Path(path),
+            })),
+            Some(val) => self.expected("list", key, &val),
+            None => Ok(None),
+        }
+    }
+
+    pub fn get_list_or_split_string(&self, key: &str) -> CargoResult<OptValue<Vec<String>>> {
+        if let Some(value) = self.get_env::<String>(&ConfigKey::from_str(key))? {
+            return Ok(Some(Value {
+                val: value.val.split(' ').map(str::to_string).collect(),
+                definition: value.definition,
+            }));
+        }
+
+        match self.get_cv(key)? {
+            Some(CV::List(i, path)) => Ok(Some(Value {
+                val: i.into_iter().map(|(s, _)| s).collect(),
+                definition: Definition::Path(path),
+            })),
+            Some(CV::String(i, path)) => Ok(Some(Value {
+                val: i.split(' ').map(str::to_string).collect(),
+                definition: Definition::Path(path),
+            })),
+            Some(val) => self.expected("list or string", key, &val),
+            None => Ok(None),
+        }
+    }
+
+    pub fn get_table(&self, key: &str) -> CargoResult<OptValue<HashMap<String, CV>>> {
+        match self.get_cv(key)? {
+            Some(CV::Table(i, path)) => Ok(Some(Value {
+                val: i,
+                definition: Definition::Path(path),
+            })),
+            Some(val) => self.expected("table", key, &val),
+            None => Ok(None),
+        }
+    }
+
+    // Recommend use `get` if you want a specific type, such as an unsigned value.
+    // Example:  config.get::<Option<u32>>("some.key")?
+    pub fn get_i64(&self, key: &str) -> CargoResult<OptValue<i64>> {
+        self.get_integer(&ConfigKey::from_str(key))
+            .map_err(|e| e.into())
+    }
+
+    fn get_integer(&self, key: &ConfigKey) -> Result<OptValue<i64>, ConfigError> {
+        let config_key = key.to_config();
+        match self.get_env::<i64>(key)? {
+            Some(v) => Ok(Some(v)),
+            None => match self.get_cv(&config_key)? {
+                Some(CV::Integer(i, path)) => Ok(Some(Value {
+                    val: i,
+                    definition: Definition::Path(path),
+                })),
+                Some(cv) => Err(ConfigError::expected(&config_key, "an integer", &cv)),
+                None => Ok(None),
+            },
+        }
+    }
+
+    fn expected<T>(&self, ty: &str, key: &str, val: &CV) -> CargoResult<T> {
+        val.expected(ty, key)
+            .map_err(|e| format_err!("invalid configuration for key `{}`\n{}", key, e))
+    }
+
+    pub fn configure(
+        &mut self,
+        verbose: u32,
+        quiet: Option<bool>,
+        color: &Option<String>,
+        frozen: bool,
+        locked: bool,
+        target_dir: &Option<PathBuf>,
+        unstable_flags: &[String],
+    ) -> CargoResult<()> {
+        let extra_verbose = verbose >= 2;
+        let verbose = if verbose == 0 { None } else { Some(true) };
+
+        // Ignore errors in the configuration files.
+        let cfg_verbose = self.get_bool("term.verbose").unwrap_or(None).map(|v| v.val);
+        let cfg_color = self.get_string("term.color").unwrap_or(None).map(|v| v.val);
+
+        let color = color.as_ref().or_else(|| cfg_color.as_ref());
+
+        let verbosity = match (verbose, cfg_verbose, quiet) {
+            (Some(true), _, None) | (None, Some(true), None) => Verbosity::Verbose,
+
+            // command line takes precedence over configuration, so ignore the
+            // configuration.
+            (None, _, Some(true)) => Verbosity::Quiet,
+
+            // Can't pass both at the same time on the command line regardless
+            // of configuration.
+            (Some(true), _, Some(true)) => {
+                bail!("cannot set both --verbose and --quiet");
+            }
+
+            // Can't actually get `Some(false)` as a value from the command
+            // line, so just ignore them here to appease exhaustiveness checking
+            // in match statements.
+            (Some(false), _, _)
+            | (_, _, Some(false))
+            | (None, Some(false), None)
+            | (None, None, None) => Verbosity::Normal,
+        };
+
+        let cli_target_dir = match target_dir.as_ref() {
+            Some(dir) => Some(Filesystem::new(dir.clone())),
+            None => None,
+        };
+
+        self.shell().set_verbosity(verbosity);
+        self.shell().set_color_choice(color.map(|s| &s[..]))?;
+        self.extra_verbose = extra_verbose;
+        self.frozen = frozen;
+        self.locked = locked;
+        self.target_dir = cli_target_dir;
+        self.cli_flags.parse(unstable_flags)?;
+
+        Ok(())
+    }
+
+    pub fn cli_unstable(&self) -> &CliUnstable {
+        &self.cli_flags
+    }
+
+    pub fn extra_verbose(&self) -> bool {
+        self.extra_verbose
+    }
+
+    pub fn network_allowed(&self) -> bool {
+        !self.frozen() && !self.cli_unstable().offline
+    }
+
+    pub fn frozen(&self) -> bool {
+        self.frozen
+    }
+
+    pub fn lock_update_allowed(&self) -> bool {
+        !self.frozen && !self.locked
+    }
+
+    /// Loads configuration from the filesystem
+    pub fn load_values(&self) -> CargoResult<HashMap<String, ConfigValue>> {
+        self.load_values_from(&self.cwd)
+    }
+
+    fn load_values_from(&self, path: &Path)
+        -> CargoResult<HashMap<String, ConfigValue>>
+    {
+        let mut cfg = CV::Table(HashMap::new(), PathBuf::from("."));
+        let home = self.home_path.clone().into_path_unlocked();
+
+        walk_tree(path, &home, |path| {
+            let mut contents = String::new();
+            let mut file = File::open(&path)?;
+            file.read_to_string(&mut contents)
+                .chain_err(|| format!("failed to read configuration file `{}`", path.display()))?;
+            let toml = cargo_toml::parse(&contents, path, self).chain_err(|| {
+                format!("could not parse TOML configuration in `{}`", path.display())
+            })?;
+            let value = CV::from_toml(path, toml).chain_err(|| {
+                format!(
+                    "failed to load TOML configuration from `{}`",
+                    path.display()
+                )
+            })?;
+            cfg.merge(value)
+                .chain_err(|| format!("failed to merge configuration at `{}`", path.display()))?;
+            Ok(())
+        }).chain_err(|| "could not load Cargo configuration")?;
+
+        self.load_credentials(&mut cfg)?;
+        match cfg {
+            CV::Table(map, _) => Ok(map),
+            _ => unreachable!(),
+        }
+    }
+
+    /// Gets the index for a registry.
+    pub fn get_registry_index(&self, registry: &str) -> CargoResult<Url> {
+        Ok(
+            match self.get_string(&format!("registries.{}.index", registry))? {
+                Some(index) => {
+                    let url = index.val.to_url()?;
+                    if url.username() != "" || url.password().is_some() {
+                        bail!("Registry URLs may not contain credentials");
+                    }
+                    url
+                }
+                None => bail!("No index found for registry: `{}`", registry),
+            },
+        )
+    }
+
+    /// Loads credentials config from the credentials file into the ConfigValue object, if present.
+    fn load_credentials(&self, cfg: &mut ConfigValue) -> CargoResult<()> {
+        let home_path = self.home_path.clone().into_path_unlocked();
+        let credentials = home_path.join("credentials");
+        if fs::metadata(&credentials).is_err() {
+            return Ok(());
+        }
+
+        let mut contents = String::new();
+        let mut file = File::open(&credentials)?;
+        file.read_to_string(&mut contents).chain_err(|| {
+            format!(
+                "failed to read configuration file `{}`",
+                credentials.display()
+            )
+        })?;
+
+        let toml = cargo_toml::parse(&contents, &credentials, self).chain_err(|| {
+            format!(
+                "could not parse TOML configuration in `{}`",
+                credentials.display()
+            )
+        })?;
+
+        let mut value = CV::from_toml(&credentials, toml).chain_err(|| {
+            format!(
+                "failed to load TOML configuration from `{}`",
+                credentials.display()
+            )
+        })?;
+
+        // backwards compatibility for old .cargo/credentials layout
+        {
+            let value = match value {
+                CV::Table(ref mut value, _) => value,
+                _ => unreachable!(),
+            };
+
+            if let Some(token) = value.remove("token") {
+                if let Vacant(entry) = value.entry("registry".into()) {
+                    let mut map = HashMap::new();
+                    map.insert("token".into(), token);
+                    let table = CV::Table(map, PathBuf::from("."));
+                    entry.insert(table);
+                }
+            }
+        }
+
+        // we want value to override cfg, so swap these
+        mem::swap(cfg, &mut value);
+        cfg.merge(value)?;
+
+        Ok(())
+    }
+
+    /// Look for a path for `tool` in an environment variable or config path, but return `None`
+    /// if it's not present.
+    fn maybe_get_tool(&self, tool: &str) -> CargoResult<Option<PathBuf>> {
+        let var = tool
+            .chars()
+            .flat_map(|c| c.to_uppercase())
+            .collect::<String>();
+        if let Some(tool_path) = env::var_os(&var) {
+            let maybe_relative = match tool_path.to_str() {
+                Some(s) => s.contains('/') || s.contains('\\'),
+                None => false,
+            };
+            let path = if maybe_relative {
+                self.cwd.join(tool_path)
+            } else {
+                PathBuf::from(tool_path)
+            };
+            return Ok(Some(path));
+        }
+
+        let var = format!("build.{}", tool);
+        if let Some(tool_path) = self.get_path(&var)? {
+            return Ok(Some(tool_path.val));
+        }
+
+        Ok(None)
+    }
+
+    /// Look for a path for `tool` in an environment variable or config path, defaulting to `tool`
+    /// as a path.
+    fn get_tool(&self, tool: &str) -> CargoResult<PathBuf> {
+        self.maybe_get_tool(tool)
+            .map(|t| t.unwrap_or_else(|| PathBuf::from(tool)))
+    }
+
+    pub fn jobserver_from_env(&self) -> Option<&jobserver::Client> {
+        self.jobserver.as_ref()
+    }
+
+    pub fn http(&self) -> CargoResult<&RefCell<Easy>> {
+        let http = self
+            .easy
+            .try_borrow_with(|| ops::http_handle(self).map(RefCell::new))?;
+        {
+            let mut http = http.borrow_mut();
+            http.reset();
+            let timeout = ops::configure_http_handle(self, &mut http)?;
+            timeout.configure(&mut http)?;
+        }
+        Ok(http)
+    }
+
+    pub fn crates_io_source_id<F>(&self, f: F) -> CargoResult<SourceId>
+    where
+        F: FnMut() -> CargoResult<SourceId>,
+    {
+        Ok(self.crates_io_source_id.try_borrow_with(f)?.clone())
+    }
+
+    pub fn creation_time(&self) -> Instant {
+        self.creation_time
+    }
+
+    // Retrieve a config variable.
+    //
+    // This supports most serde `Deserialize` types.  Examples:
+    //     let v: Option<u32> = config.get("some.nested.key")?;
+    //     let v: Option<MyStruct> = config.get("some.key")?;
+    //     let v: Option<HashMap<String, MyStruct>> = config.get("foo")?;
+    pub fn get<'de, T: de::Deserialize<'de>>(&self, key: &str) -> CargoResult<T> {
+        let d = Deserializer {
+            config: self,
+            key: ConfigKey::from_str(key),
+        };
+        T::deserialize(d).map_err(|e| e.into())
+    }
+}
+
+/// A segment of a config key.
+///
+/// Config keys are split on dots for regular keys, or underscores for
+/// environment keys.
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+enum ConfigKeyPart {
+    /// Case-insensitive part (checks uppercase in environment keys).
+    Part(String),
+    /// Case-sensitive part (environment keys must match exactly).
+    CasePart(String),
+}
+
+impl ConfigKeyPart {
+    fn to_env(&self) -> String {
+        match self {
+            ConfigKeyPart::Part(s) => s.replace("-", "_").to_uppercase(),
+            ConfigKeyPart::CasePart(s) => s.clone(),
+        }
+    }
+
+    fn to_config(&self) -> String {
+        match self {
+            ConfigKeyPart::Part(s) => s.clone(),
+            ConfigKeyPart::CasePart(s) => s.clone(),
+        }
+    }
+}
+
+/// Key for a configuration variable.
+#[derive(Debug, Clone)]
+struct ConfigKey(Vec<ConfigKeyPart>);
+
+impl ConfigKey {
+    fn from_str(key: &str) -> ConfigKey {
+        ConfigKey(
+            key.split('.')
+                .map(|p| ConfigKeyPart::Part(p.to_string()))
+                .collect(),
+        )
+    }
+
+    fn join(&self, next: ConfigKeyPart) -> ConfigKey {
+        let mut res = self.clone();
+        res.0.push(next);
+        res
+    }
+
+    fn to_env(&self) -> String {
+        format!(
+            "CARGO_{}",
+            self.0
+                .iter()
+                .map(|p| p.to_env())
+                .collect::<Vec<_>>()
+                .join("_")
+        )
+    }
+
+    fn to_config(&self) -> String {
+        self.0
+            .iter()
+            .map(|p| p.to_config())
+            .collect::<Vec<_>>()
+            .join(".")
+    }
+
+    fn last(&self) -> &ConfigKeyPart {
+        self.0.last().unwrap()
+    }
+}
+
+impl fmt::Display for ConfigKey {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.to_config().fmt(f)
+    }
+}
+
+/// Internal error for serde errors.
+#[derive(Debug)]
+pub struct ConfigError {
+    error: failure::Error,
+    definition: Option<Definition>,
+}
+
+impl ConfigError {
+    fn new(message: String, definition: Definition) -> ConfigError {
+        ConfigError {
+            error: failure::err_msg(message),
+            definition: Some(definition),
+        }
+    }
+
+    fn expected(key: &str, expected: &str, found: &ConfigValue) -> ConfigError {
+        ConfigError {
+            error: format_err!(
+                "`{}` expected {}, but found a {}",
+                key,
+                expected,
+                found.desc()
+            ),
+            definition: Some(Definition::Path(found.definition_path().to_path_buf())),
+        }
+    }
+
+    fn missing(key: &str) -> ConfigError {
+        ConfigError {
+            error: format_err!("missing config key `{}`", key),
+            definition: None,
+        }
+    }
+
+    fn with_key_context(self, key: &str, definition: Definition) -> ConfigError {
+        ConfigError {
+            error: format_err!("could not load config key `{}`: {}", key, self),
+            definition: Some(definition),
+        }
+    }
+}
+
+impl std::error::Error for ConfigError {
+    // This can be removed once 1.27 is stable.
+    fn description(&self) -> &str {
+        "An error has occurred."
+    }
+}
+
+// Future Note: Currently we cannot override Fail::cause (due to
+// specialization) so we have no way to return the underlying causes. In the
+// future, once this limitation is lifted, this should instead implement
+// `cause` and avoid doing the cause formatting here.
+impl fmt::Display for ConfigError {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        let message = self
+            .error
+            .iter_chain()
+            .map(|e| e.to_string())
+            .collect::<Vec<_>>()
+            .join("\nCaused by:\n  ");
+        if let Some(ref definition) = self.definition {
+            write!(f, "error in {}: {}", definition, message)
+        } else {
+            message.fmt(f)
+        }
+    }
+}
+
+impl de::Error for ConfigError {
+    fn custom<T: fmt::Display>(msg: T) -> Self {
+        ConfigError {
+            error: failure::err_msg(msg.to_string()),
+            definition: None,
+        }
+    }
+}
+
+impl From<failure::Error> for ConfigError {
+    fn from(error: failure::Error) -> Self {
+        ConfigError {
+            error,
+            definition: None,
+        }
+    }
+}
+
+/// Serde deserializer used to convert config values to a target type using
+/// `Config::get`.
+pub struct Deserializer<'config> {
+    config: &'config Config,
+    key: ConfigKey,
+}
+
+macro_rules! deserialize_method {
+    ($method:ident, $visit:ident, $getter:ident) => {
+        fn $method<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+        where
+            V: de::Visitor<'de>,
+        {
+            let v = self.config.$getter(&self.key)?.ok_or_else(||
+                ConfigError::missing(&self.key.to_config()))?;
+            let Value{val, definition} = v;
+            let res: Result<V::Value, ConfigError> = visitor.$visit(val);
+            res.map_err(|e| e.with_key_context(&self.key.to_config(), definition))
+        }
+    }
+}
+
+impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> {
+    type Error = ConfigError;
+
+    fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+    where
+        V: de::Visitor<'de>,
+    {
+        // Future note: If you ever need to deserialize a non-self describing
+        // map type, this should implement a starts_with check (similar to how
+        // ConfigMapAccess does).
+        if let Some(v) = self.config.env.get(&self.key.to_env()) {
+            let res: Result<V::Value, ConfigError> = if v == "true" || v == "false" {
+                visitor.visit_bool(v.parse().unwrap())
+            } else if let Ok(v) = v.parse::<i64>() {
+                visitor.visit_i64(v)
+            } else if self.config.cli_unstable().advanced_env
+                && v.starts_with('[')
+                && v.ends_with(']')
+            {
+                visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?)
+            } else {
+                visitor.visit_string(v.clone())
+            };
+            return res.map_err(|e| {
+                e.with_key_context(
+                    &self.key.to_config(),
+                    Definition::Environment(self.key.to_env()),
+                )
+            });
+        }
+
+        let o_cv = self.config.get_cv(&self.key.to_config())?;
+        if let Some(cv) = o_cv {
+            let res: (Result<V::Value, ConfigError>, PathBuf) = match cv {
+                CV::Integer(i, path) => (visitor.visit_i64(i), path),
+                CV::String(s, path) => (visitor.visit_string(s), path),
+                CV::List(_, path) => (
+                    visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?),
+                    path,
+                ),
+                CV::Table(_, path) => (
+                    visitor.visit_map(ConfigMapAccess::new_map(self.config, self.key.clone())?),
+                    path,
+                ),
+                CV::Boolean(b, path) => (visitor.visit_bool(b), path),
+            };
+            let (res, path) = res;
+            return res
+                .map_err(|e| e.with_key_context(&self.key.to_config(), Definition::Path(path)));
+        }
+        Err(ConfigError::missing(&self.key.to_config()))
+    }
+
+    deserialize_method!(deserialize_bool, visit_bool, get_bool_priv);
+    deserialize_method!(deserialize_i8, visit_i64, get_integer);
+    deserialize_method!(deserialize_i16, visit_i64, get_integer);
+    deserialize_method!(deserialize_i32, visit_i64, get_integer);
+    deserialize_method!(deserialize_i64, visit_i64, get_integer);
+    deserialize_method!(deserialize_u8, visit_i64, get_integer);
+    deserialize_method!(deserialize_u16, visit_i64, get_integer);
+    deserialize_method!(deserialize_u32, visit_i64, get_integer);
+    deserialize_method!(deserialize_u64, visit_i64, get_integer);
+    deserialize_method!(deserialize_string, visit_string, get_string_priv);
+
+    fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+    where
+        V: de::Visitor<'de>,
+    {
+        if self.config.has_key(&self.key) {
+            visitor.visit_some(self)
+        } else {
+            // Treat missing values as None.
+            visitor.visit_none()
+        }
+    }
+
+    fn deserialize_struct<V>(
+        self,
+        _name: &'static str,
+        fields: &'static [&'static str],
+        visitor: V,
+    ) -> Result<V::Value, Self::Error>
+    where
+        V: de::Visitor<'de>,
+    {
+        visitor.visit_map(ConfigMapAccess::new_struct(self.config, self.key, fields)?)
+    }
+
+    fn deserialize_map<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+    where
+        V: de::Visitor<'de>,
+    {
+        visitor.visit_map(ConfigMapAccess::new_map(self.config, self.key)?)
+    }
+
+    fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+    where
+        V: de::Visitor<'de>,
+    {
+        visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?)
+    }
+
+    fn deserialize_tuple<V>(self, _len: usize, visitor: V) -> Result<V::Value, Self::Error>
+    where
+        V: de::Visitor<'de>,
+    {
+        visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?)
+    }
+
+    fn deserialize_tuple_struct<V>(
+        self,
+        _name: &'static str,
+        _len: usize,
+        visitor: V,
+    ) -> Result<V::Value, Self::Error>
+    where
+        V: de::Visitor<'de>,
+    {
+        visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?)
+    }
+
+    fn deserialize_newtype_struct<V>(
+        self,
+        name: &'static str,
+        visitor: V,
+    ) -> Result<V::Value, Self::Error>
+    where
+        V: de::Visitor<'de>,
+    {
+        if name == "ConfigRelativePath" {
+            match self.config.get_string_priv(&self.key)? {
+                Some(v) => {
+                    let path = v
+                        .definition
+                        .root(self.config)
+                        .join(v.val)
+                        .display()
+                        .to_string();
+                    visitor.visit_newtype_struct(path.into_deserializer())
+                }
+                None => Err(ConfigError::missing(&self.key.to_config())),
+            }
+        } else {
+            visitor.visit_newtype_struct(self)
+        }
+    }
+
+    // These aren't really supported, yet.
+    forward_to_deserialize_any! {
+        f32 f64 char str bytes
+        byte_buf unit unit_struct
+        enum identifier ignored_any
+    }
+}
+
+struct ConfigMapAccess<'config> {
+    config: &'config Config,
+    key: ConfigKey,
+    set_iter: <HashSet<ConfigKeyPart> as IntoIterator>::IntoIter,
+    next: Option<ConfigKeyPart>,
+}
+
+impl<'config> ConfigMapAccess<'config> {
+    fn new_map(
+        config: &'config Config,
+        key: ConfigKey,
+    ) -> Result<ConfigMapAccess<'config>, ConfigError> {
+        let mut set = HashSet::new();
+        if let Some(mut v) = config.get_table(&key.to_config())? {
+            // v: Value<HashMap<String, CV>>
+            for (key, _value) in v.val.drain() {
+                set.insert(ConfigKeyPart::CasePart(key));
+            }
+        }
+        if config.cli_unstable().advanced_env {
+            // CARGO_PROFILE_DEV_OVERRIDES_
+            let env_pattern = format!("{}_", key.to_env());
+            for env_key in config.env.keys() {
+                if env_key.starts_with(&env_pattern) {
+                    // CARGO_PROFILE_DEV_OVERRIDES_bar_OPT_LEVEL = 3
+                    let rest = &env_key[env_pattern.len()..];
+                    // rest = bar_OPT_LEVEL
+                    let part = rest.splitn(2, '_').next().unwrap();
+                    // part = "bar"
+                    set.insert(ConfigKeyPart::CasePart(part.to_string()));
+                }
+            }
+        }
+        Ok(ConfigMapAccess {
+            config,
+            key,
+            set_iter: set.into_iter(),
+            next: None,
+        })
+    }
+
+    fn new_struct(
+        config: &'config Config,
+        key: ConfigKey,
+        fields: &'static [&'static str],
+    ) -> Result<ConfigMapAccess<'config>, ConfigError> {
+        let mut set = HashSet::new();
+        for field in fields {
+            set.insert(ConfigKeyPart::Part(field.to_string()));
+        }
+        if let Some(mut v) = config.get_table(&key.to_config())? {
+            for (t_key, value) in v.val.drain() {
+                let part = ConfigKeyPart::Part(t_key);
+                if !set.contains(&part) {
+                    config.shell().warn(format!(
+                        "unused key `{}` in config file `{}`",
+                        key.join(part).to_config(),
+                        value.definition_path().display()
+                    ))?;
+                }
+            }
+        }
+        Ok(ConfigMapAccess {
+            config,
+            key,
+            set_iter: set.into_iter(),
+            next: None,
+        })
+    }
+}
+
+impl<'de, 'config> de::MapAccess<'de> for ConfigMapAccess<'config> {
+    type Error = ConfigError;
+
+    fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
+    where
+        K: de::DeserializeSeed<'de>,
+    {
+        match self.set_iter.next() {
+            Some(key) => {
+                let de_key = key.to_config();
+                self.next = Some(key);
+                seed.deserialize(de_key.into_deserializer()).map(Some)
+            }
+            None => Ok(None),
+        }
+    }
+
+    fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
+    where
+        V: de::DeserializeSeed<'de>,
+    {
+        let next_key = self.next.take().expect("next field missing");
+        let next_key = self.key.join(next_key);
+        seed.deserialize(Deserializer {
+            config: self.config,
+            key: next_key,
+        })
+    }
+}
+
+struct ConfigSeqAccess {
+    list_iter: vec::IntoIter<(String, Definition)>,
+}
+
+impl ConfigSeqAccess {
+    fn new(config: &Config, key: &ConfigKey) -> Result<ConfigSeqAccess, ConfigError> {
+        let mut res = Vec::new();
+        if let Some(v) = config.get_list(&key.to_config())? {
+            for (s, path) in v.val {
+                res.push((s, Definition::Path(path)));
+            }
+        }
+
+        if config.cli_unstable().advanced_env {
+            // Parse an environment string as a TOML array.
+            let env_key = key.to_env();
+            let def = Definition::Environment(env_key.clone());
+            if let Some(v) = config.env.get(&env_key) {
+                if !(v.starts_with('[') && v.ends_with(']')) {
+                    return Err(ConfigError::new(
+                        format!("should have TOML list syntax, found `{}`", v),
+                        def.clone(),
+                    ));
+                }
+                let temp_key = key.last().to_env();
+                let toml_s = format!("{}={}", temp_key, v);
+                let toml_v: toml::Value = toml::de::from_str(&toml_s).map_err(|e| {
+                    ConfigError::new(format!("could not parse TOML list: {}", e), def.clone())
+                })?;
+                let values = toml_v
+                    .as_table()
+                    .unwrap()
+                    .get(&temp_key)
+                    .unwrap()
+                    .as_array()
+                    .expect("env var was not array");
+                for value in values {
+                    // TODO: support other types
+                    let s = value.as_str().ok_or_else(|| {
+                        ConfigError::new(
+                            format!("expected string, found {}", value.type_str()),
+                            def.clone(),
+                        )
+                    })?;
+                    res.push((s.to_string(), def.clone()));
+                }
+            }
+        }
+        Ok(ConfigSeqAccess {
+            list_iter: res.into_iter(),
+        })
+    }
+}
+
+impl<'de> de::SeqAccess<'de> for ConfigSeqAccess {
+    type Error = ConfigError;
+
+    fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error>
+    where
+        T: de::DeserializeSeed<'de>,
+    {
+        match self.list_iter.next() {
+            // TODO: Add def to err?
+            Some((value, _def)) => seed.deserialize(value.into_deserializer()).map(Some),
+            None => Ok(None),
+        }
+    }
+}
+
+/// Use with the `get` API to fetch a string that will be converted to a
+/// `PathBuf`.  Relative paths are converted to absolute paths based on the
+/// location of the config file.
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize)]
+pub struct ConfigRelativePath(PathBuf);
+
+impl ConfigRelativePath {
+    pub fn path(self) -> PathBuf {
+        self.0
+    }
+}
+
+#[derive(Eq, PartialEq, Clone)]
+pub enum ConfigValue {
+    Integer(i64, PathBuf),
+    String(String, PathBuf),
+    List(Vec<(String, PathBuf)>, PathBuf),
+    Table(HashMap<String, ConfigValue>, PathBuf),
+    Boolean(bool, PathBuf),
+}
+
+pub struct Value<T> {
+    pub val: T,
+    pub definition: Definition,
+}
+
+pub type OptValue<T> = Option<Value<T>>;
+
+#[derive(Clone, Debug)]
+pub enum Definition {
+    Path(PathBuf),
+    Environment(String),
+}
+
+impl fmt::Debug for ConfigValue {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match *self {
+            CV::Integer(i, ref path) => write!(f, "{} (from {})", i, path.display()),
+            CV::Boolean(b, ref path) => write!(f, "{} (from {})", b, path.display()),
+            CV::String(ref s, ref path) => write!(f, "{} (from {})", s, path.display()),
+            CV::List(ref list, ref path) => {
+                write!(f, "[")?;
+                for (i, &(ref s, ref path)) in list.iter().enumerate() {
+                    if i > 0 {
+                        write!(f, ", ")?;
+                    }
+                    write!(f, "{} (from {})", s, path.display())?;
+                }
+                write!(f, "] (from {})", path.display())
+            }
+            CV::Table(ref table, _) => write!(f, "{:?}", table),
+        }
+    }
+}
+
+impl ConfigValue {
+    fn from_toml(path: &Path, toml: toml::Value) -> CargoResult<ConfigValue> {
+        match toml {
+            toml::Value::String(val) => Ok(CV::String(val, path.to_path_buf())),
+            toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())),
+            toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())),
+            toml::Value::Array(val) => Ok(CV::List(
+                val.into_iter()
+                    .map(|toml| match toml {
+                        toml::Value::String(val) => Ok((val, path.to_path_buf())),
+                        v => bail!("expected string but found {} in list", v.type_str()),
+                    })
+                    .collect::<CargoResult<_>>()?,
+                path.to_path_buf(),
+            )),
+            toml::Value::Table(val) => Ok(CV::Table(
+                val.into_iter()
+                    .map(|(key, value)| {
+                        let value = CV::from_toml(path, value)
+                            .chain_err(|| format!("failed to parse key `{}`", key))?;
+                        Ok((key, value))
+                    })
+                    .collect::<CargoResult<_>>()?,
+                path.to_path_buf(),
+            )),
+            v => bail!(
+                "found TOML configuration value of unknown type `{}`",
+                v.type_str()
+            ),
+        }
+    }
+
+    fn into_toml(self) -> toml::Value {
+        match self {
+            CV::Boolean(s, _) => toml::Value::Boolean(s),
+            CV::String(s, _) => toml::Value::String(s),
+            CV::Integer(i, _) => toml::Value::Integer(i),
+            CV::List(l, _) => {
+                toml::Value::Array(l.into_iter().map(|(s, _)| toml::Value::String(s)).collect())
+            }
+            CV::Table(l, _) => {
+                toml::Value::Table(l.into_iter().map(|(k, v)| (k, v.into_toml())).collect())
+            }
+        }
+    }
+
+    fn merge(&mut self, from: ConfigValue) -> CargoResult<()> {
+        match (self, from) {
+            (&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => {
+                let new = mem::replace(new, Vec::new());
+                old.extend(new.into_iter());
+            }
+            (&mut CV::Table(ref mut old, _), CV::Table(ref mut new, _)) => {
+                let new = mem::replace(new, HashMap::new());
+                for (key, value) in new {
+                    match old.entry(key.clone()) {
+                        Occupied(mut entry) => {
+                            let path = value.definition_path().to_path_buf();
+                            let entry = entry.get_mut();
+                            entry.merge(value).chain_err(|| {
+                                format!(
+                                    "failed to merge key `{}` between \
+                                     files:\n  \
+                                     file 1: {}\n  \
+                                     file 2: {}",
+                                    key,
+                                    entry.definition_path().display(),
+                                    path.display()
+                                )
+                            })?;
+                        }
+                        Vacant(entry) => {
+                            entry.insert(value);
+                        }
+                    };
+                }
+            }
+            // Allow switching types except for tables or arrays.
+            (expected @ &mut CV::List(_, _), found)
+            | (expected @ &mut CV::Table(_, _), found)
+            | (expected, found @ CV::List(_, _))
+            | (expected, found @ CV::Table(_, _)) => {
+                return Err(internal(format!(
+                    "expected {}, but found {}",
+                    expected.desc(),
+                    found.desc()
+                )))
+            }
+            _ => {}
+        }
+
+        Ok(())
+    }
+
+    pub fn i64(&self, key: &str) -> CargoResult<(i64, &Path)> {
+        match *self {
+            CV::Integer(i, ref p) => Ok((i, p)),
+            _ => self.expected("integer", key),
+        }
+    }
+
+    pub fn string(&self, key: &str) -> CargoResult<(&str, &Path)> {
+        match *self {
+            CV::String(ref s, ref p) => Ok((s, p)),
+            _ => self.expected("string", key),
+        }
+    }
+
+    pub fn table(&self, key: &str) -> CargoResult<(&HashMap<String, ConfigValue>, &Path)> {
+        match *self {
+            CV::Table(ref table, ref p) => Ok((table, p)),
+            _ => self.expected("table", key),
+        }
+    }
+
+    pub fn list(&self, key: &str) -> CargoResult<&[(String, PathBuf)]> {
+        match *self {
+            CV::List(ref list, _) => Ok(list),
+            _ => self.expected("list", key),
+        }
+    }
+
+    pub fn boolean(&self, key: &str) -> CargoResult<(bool, &Path)> {
+        match *self {
+            CV::Boolean(b, ref p) => Ok((b, p)),
+            _ => self.expected("bool", key),
+        }
+    }
+
+    pub fn desc(&self) -> &'static str {
+        match *self {
+            CV::Table(..) => "table",
+            CV::List(..) => "array",
+            CV::String(..) => "string",
+            CV::Boolean(..) => "boolean",
+            CV::Integer(..) => "integer",
+        }
+    }
+
+    pub fn definition_path(&self) -> &Path {
+        match *self {
+            CV::Boolean(_, ref p)
+            | CV::Integer(_, ref p)
+            | CV::String(_, ref p)
+            | CV::List(_, ref p)
+            | CV::Table(_, ref p) => p,
+        }
+    }
+
+    fn expected<T>(&self, wanted: &str, key: &str) -> CargoResult<T> {
+        bail!(
+            "expected a {}, but found a {} for `{}` in {}",
+            wanted,
+            self.desc(),
+            key,
+            self.definition_path().display()
+        )
+    }
+}
+
+impl Definition {
+    pub fn root<'a>(&'a self, config: &'a Config) -> &'a Path {
+        match *self {
+            Definition::Path(ref p) => p.parent().unwrap().parent().unwrap(),
+            Definition::Environment(_) => config.cwd(),
+        }
+    }
+}
+
+impl fmt::Display for Definition {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match *self {
+            Definition::Path(ref p) => p.display().fmt(f),
+            Definition::Environment(ref key) => write!(f, "environment variable `{}`", key),
+        }
+    }
+}
+
+pub fn homedir(cwd: &Path) -> Option<PathBuf> {
+    ::home::cargo_home_with_cwd(cwd).ok()
+}
+
+fn walk_tree<F>(pwd: &Path, home: &Path, mut walk: F) -> CargoResult<()>
+where
+    F: FnMut(&Path) -> CargoResult<()>,
+{
+    let mut stash: HashSet<PathBuf> = HashSet::new();
+
+    for current in paths::ancestors(pwd) {
+        let possible = current.join(".cargo").join("config");
+        if fs::metadata(&possible).is_ok() {
+            walk(&possible)?;
+            stash.insert(possible);
+        }
+    }
+
+    // Once we're done, also be sure to walk the home directory even if it's not
+    // in our history to be sure we pick up that standard location for
+    // information.
+    let config = home.join("config");
+    if !stash.contains(&config) && fs::metadata(&config).is_ok() {
+        walk(&config)?;
+    }
+
+    Ok(())
+}
+
+pub fn save_credentials(cfg: &Config, token: String, registry: Option<String>) -> CargoResult<()> {
+    let mut file = {
+        cfg.home_path.create_dir()?;
+        cfg.home_path
+            .open_rw(Path::new("credentials"), cfg, "credentials' config file")?
+    };
+
+    let (key, value) = {
+        let key = "token".to_string();
+        let value = ConfigValue::String(token, file.path().to_path_buf());
+        let mut map = HashMap::new();
+        map.insert(key, value);
+        let table = CV::Table(map, file.path().to_path_buf());
+
+        if let Some(registry) = registry {
+            let mut map = HashMap::new();
+            map.insert(registry, table);
+            (
+                "registries".into(),
+                CV::Table(map, file.path().to_path_buf()),
+            )
+        } else {
+            ("registry".into(), table)
+        }
+    };
+
+    let mut contents = String::new();
+    file.read_to_string(&mut contents).chain_err(|| {
+        format!(
+            "failed to read configuration file `{}`",
+            file.path().display()
+        )
+    })?;
+
+    let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?;
+
+    // move the old token location to the new one
+    if let Some(token) = toml.as_table_mut().unwrap().remove("token") {
+        let mut map = HashMap::new();
+        map.insert("token".to_string(), token);
+        toml.as_table_mut()
+            .unwrap()
+            .insert("registry".into(), map.into());
+    }
+
+    toml.as_table_mut().unwrap().insert(key, value.into_toml());
+
+    let contents = toml.to_string();
+    file.seek(SeekFrom::Start(0))?;
+    file.write_all(contents.as_bytes())?;
+    file.file().set_len(contents.len() as u64)?;
+    set_permissions(file.file(), 0o600)?;
+
+    return Ok(());
+
+    #[cfg(unix)]
+    fn set_permissions(file: &File, mode: u32) -> CargoResult<()> {
+        use std::os::unix::fs::PermissionsExt;
+
+        let mut perms = file.metadata()?.permissions();
+        perms.set_mode(mode);
+        file.set_permissions(perms)?;
+        Ok(())
+    }
+
+    #[cfg(not(unix))]
+    #[allow(unused)]
+    fn set_permissions(file: &File, mode: u32) -> CargoResult<()> {
+        Ok(())
+    }
+}
diff --git a/src/cargo/util/dependency_queue.rs b/src/cargo/util/dependency_queue.rs
new file mode 100644 (file)
index 0000000..639f95f
--- /dev/null
@@ -0,0 +1,231 @@
+//! A graph-like structure used to represent a set of dependencies and in what
+//! order they should be built.
+//!
+//! This structure is used to store the dependency graph and dynamically update
+//! it to figure out when a dependency should be built.
+
+use std::collections::hash_map::Entry::{Occupied, Vacant};
+use std::collections::{HashMap, HashSet};
+use std::hash::Hash;
+
+pub use self::Freshness::{Dirty, Fresh};
+
+#[derive(Debug)]
+pub struct DependencyQueue<K: Eq + Hash, V> {
+    /// A list of all known keys to build.
+    ///
+    /// The value of the hash map is list of dependencies which still need to be
+    /// built before the package can be built. Note that the set is dynamically
+    /// updated as more dependencies are built.
+    dep_map: HashMap<K, (HashSet<K>, V)>,
+
+    /// A reverse mapping of a package to all packages that depend on that
+    /// package.
+    ///
+    /// This map is statically known and does not get updated throughout the
+    /// lifecycle of the DependencyQueue.
+    reverse_dep_map: HashMap<K, HashSet<K>>,
+
+    /// A set of dirty packages.
+    ///
+    /// Packages may become dirty over time if their dependencies are rebuilt.
+    dirty: HashSet<K>,
+
+    /// The packages which are currently being built, waiting for a call to
+    /// `finish`.
+    pending: HashSet<K>,
+
+    /// Topological depth of each key
+    depth: HashMap<K, usize>,
+}
+
+/// Indication of the freshness of a package.
+///
+/// A fresh package does not necessarily need to be rebuilt (unless a dependency
+/// was also rebuilt), and a dirty package must always be rebuilt.
+#[derive(PartialEq, Eq, Debug, Clone, Copy)]
+pub enum Freshness {
+    Fresh,
+    Dirty,
+}
+
+impl Freshness {
+    pub fn combine(self, other: Freshness) -> Freshness {
+        match self {
+            Fresh => other,
+            Dirty => Dirty,
+        }
+    }
+}
+
+impl<K: Hash + Eq + Clone, V> Default for DependencyQueue<K, V> {
+    fn default() -> DependencyQueue<K, V> {
+        DependencyQueue::new()
+    }
+}
+
+impl<K: Hash + Eq + Clone, V> DependencyQueue<K, V> {
+    /// Creates a new dependency queue with 0 packages.
+    pub fn new() -> DependencyQueue<K, V> {
+        DependencyQueue {
+            dep_map: HashMap::new(),
+            reverse_dep_map: HashMap::new(),
+            dirty: HashSet::new(),
+            pending: HashSet::new(),
+            depth: HashMap::new(),
+        }
+    }
+
+    /// Adds a new package to this dependency queue.
+    ///
+    /// It is assumed that any dependencies of this package will eventually also
+    /// be added to the dependency queue.
+    pub fn queue(&mut self, fresh: Freshness, key: &K, value: V, dependencies: &[K]) -> &mut V {
+        let slot = match self.dep_map.entry(key.clone()) {
+            Occupied(v) => return &mut v.into_mut().1,
+            Vacant(v) => v,
+        };
+
+        if fresh == Dirty {
+            self.dirty.insert(key.clone());
+        }
+
+        let mut my_dependencies = HashSet::new();
+        for dep in dependencies {
+            my_dependencies.insert(dep.clone());
+            let rev = self.reverse_dep_map
+                .entry(dep.clone())
+                .or_insert_with(HashSet::new);
+            rev.insert(key.clone());
+        }
+        &mut slot.insert((my_dependencies, value)).1
+    }
+
+    /// All nodes have been added, calculate some internal metadata and prepare
+    /// for `dequeue`.
+    pub fn queue_finished(&mut self) {
+        for key in self.dep_map.keys() {
+            depth(key, &self.reverse_dep_map, &mut self.depth);
+        }
+
+        fn depth<K: Hash + Eq + Clone>(
+            key: &K,
+            map: &HashMap<K, HashSet<K>>,
+            results: &mut HashMap<K, usize>,
+        ) -> usize {
+            const IN_PROGRESS: usize = !0;
+
+            if let Some(&depth) = results.get(key) {
+                assert_ne!(depth, IN_PROGRESS, "cycle in DependencyQueue");
+                return depth;
+            }
+
+            results.insert(key.clone(), IN_PROGRESS);
+
+            let depth = 1
+                + map.get(&key)
+                    .into_iter()
+                    .flat_map(|it| it)
+                    .map(|dep| depth(dep, map, results))
+                    .max()
+                    .unwrap_or(0);
+
+            *results.get_mut(key).unwrap() = depth;
+
+            depth
+        }
+    }
+
+    /// Dequeues a package that is ready to be built.
+    ///
+    /// A package is ready to be built when it has 0 un-built dependencies. If
+    /// `None` is returned then no packages are ready to be built.
+    pub fn dequeue(&mut self) -> Option<(Freshness, K, V)> {
+        // Look at all our crates and find everything that's ready to build (no
+        // deps). After we've got that candidate set select the one which has
+        // the maximum depth in the dependency graph. This way we should
+        // hopefully keep CPUs hottest the longest by ensuring that long
+        // dependency chains are scheduled early on in the build process and the
+        // leafs higher in the tree can fill in the cracks later.
+        //
+        // TODO: it'd be best here to throw in a heuristic of crate size as
+        //       well. For example how long did this crate historically take to
+        //       compile? How large is its source code? etc.
+        let next = self.dep_map
+            .iter()
+            .filter(|&(_, &(ref deps, _))| deps.is_empty())
+            .map(|(key, _)| key.clone())
+            .max_by_key(|k| self.depth[k]);
+        let key = match next {
+            Some(key) => key,
+            None => return None,
+        };
+        let (_, data) = self.dep_map.remove(&key).unwrap();
+        let fresh = if self.dirty.contains(&key) {
+            Dirty
+        } else {
+            Fresh
+        };
+        self.pending.insert(key.clone());
+        Some((fresh, key, data))
+    }
+
+    /// Returns whether there are remaining packages to be built.
+    pub fn is_empty(&self) -> bool {
+        self.dep_map.is_empty() && self.pending.is_empty()
+    }
+
+    /// Returns the number of remaining packages to be built.
+    pub fn len(&self) -> usize {
+        self.dep_map.len() + self.pending.len()
+    }
+
+    /// Indicate that a package has been built.
+    ///
+    /// This function will update the dependency queue with this information,
+    /// possibly allowing the next invocation of `dequeue` to return a package.
+    pub fn finish(&mut self, key: &K, fresh: Freshness) {
+        assert!(self.pending.remove(key));
+        let reverse_deps = match self.reverse_dep_map.get(key) {
+            Some(deps) => deps,
+            None => return,
+        };
+        for dep in reverse_deps.iter() {
+            if fresh == Dirty {
+                self.dirty.insert(dep.clone());
+            }
+            assert!(self.dep_map.get_mut(dep).unwrap().0.remove(key));
+        }
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::{DependencyQueue, Freshness};
+
+    #[test]
+    fn deep_first() {
+        let mut q = DependencyQueue::new();
+
+        q.queue(Freshness::Fresh, &1, (), &[]);
+        q.queue(Freshness::Fresh, &2, (), &[1]);
+        q.queue(Freshness::Fresh, &3, (), &[]);
+        q.queue(Freshness::Fresh, &4, (), &[2, 3]);
+        q.queue(Freshness::Fresh, &5, (), &[4, 3]);
+        q.queue_finished();
+
+        assert_eq!(q.dequeue(), Some((Freshness::Fresh, 1, ())));
+        assert_eq!(q.dequeue(), Some((Freshness::Fresh, 3, ())));
+        assert_eq!(q.dequeue(), None);
+        q.finish(&3, Freshness::Fresh);
+        assert_eq!(q.dequeue(), None);
+        q.finish(&1, Freshness::Fresh);
+        assert_eq!(q.dequeue(), Some((Freshness::Fresh, 2, ())));
+        assert_eq!(q.dequeue(), None);
+        q.finish(&2, Freshness::Fresh);
+        assert_eq!(q.dequeue(), Some((Freshness::Fresh, 4, ())));
+        assert_eq!(q.dequeue(), None);
+        q.finish(&4, Freshness::Fresh);
+        assert_eq!(q.dequeue(), Some((Freshness::Fresh, 5, ())));
+    }
+}
diff --git a/src/cargo/util/diagnostic_server.rs b/src/cargo/util/diagnostic_server.rs
new file mode 100644 (file)
index 0000000..e7138f1
--- /dev/null
@@ -0,0 +1,256 @@
+//! A small TCP server to handle collection of diagnostics information in a
+//! cross-platform way for the `cargo fix` command.
+
+use std::collections::HashSet;
+use std::env;
+use std::io::{BufReader, Read, Write};
+use std::net::{Shutdown, SocketAddr, TcpListener, TcpStream};
+use std::sync::Arc;
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::thread::{self, JoinHandle};
+
+use failure::{Error, ResultExt};
+use serde_json;
+
+use util::{Config, ProcessBuilder};
+use util::errors::CargoResult;
+
+const DIAGNOSICS_SERVER_VAR: &str = "__CARGO_FIX_DIAGNOSTICS_SERVER";
+const PLEASE_REPORT_THIS_BUG: &str =
+    "\
+     This likely indicates a bug in either rustc or cargo itself,\n\
+     and we would appreciate a bug report! You're likely to see \n\
+     a number of compiler warnings after this message which cargo\n\
+     attempted to fix but failed. If you could open an issue at\n\
+     https://github.com/rust-lang/cargo/issues\n\
+     quoting the full output of this command we'd be very appreciative!\n\n\
+     ";
+
+#[derive(Deserialize, Serialize)]
+pub enum Message {
+    Fixing {
+        file: String,
+        fixes: u32,
+    },
+    FixFailed {
+        files: Vec<String>,
+        krate: Option<String>,
+    },
+    ReplaceFailed {
+        file: String,
+        message: String,
+    },
+    EditionAlreadyEnabled {
+        file: String,
+        edition: String,
+    },
+    IdiomEditionMismatch {
+        file: String,
+        idioms: String,
+        edition: Option<String>,
+    },
+}
+
+impl Message {
+    pub fn post(&self) -> Result<(), Error> {
+        let addr = env::var(DIAGNOSICS_SERVER_VAR)
+            .context("diagnostics collector misconfigured")?;
+        let mut client =
+            TcpStream::connect(&addr).context("failed to connect to parent diagnostics target")?;
+
+        let s = serde_json::to_string(self).context("failed to serialize message")?;
+        client
+            .write_all(s.as_bytes())
+            .context("failed to write message to diagnostics target")?;
+        client
+            .shutdown(Shutdown::Write)
+            .context("failed to shutdown")?;
+
+        let mut tmp = Vec::new();
+        client
+            .read_to_end(&mut tmp)
+            .context("failed to receive a disconnect")?;
+
+        Ok(())
+    }
+}
+
+pub struct DiagnosticPrinter<'a> {
+    config: &'a Config,
+    edition_already_enabled: HashSet<String>,
+    idiom_mismatch: HashSet<String>,
+}
+
+impl<'a> DiagnosticPrinter<'a> {
+    pub fn new(config: &'a Config) -> DiagnosticPrinter<'a> {
+        DiagnosticPrinter {
+            config,
+            edition_already_enabled: HashSet::new(),
+            idiom_mismatch: HashSet::new(),
+        }
+    }
+
+    pub fn print(&mut self, msg: &Message) -> CargoResult<()> {
+        match msg {
+            Message::Fixing { file, fixes } => {
+                let msg = if *fixes == 1 { "fix" } else { "fixes" };
+                let msg = format!("{} ({} {})", file, fixes, msg);
+                self.config.shell().status("Fixing", msg)
+            }
+            Message::ReplaceFailed { file, message } => {
+                let msg = format!("error applying suggestions to `{}`\n", file);
+                self.config.shell().warn(&msg)?;
+                write!(
+                    self.config.shell().err(),
+                    "The full error message was:\n\n> {}\n\n",
+                    message,
+                )?;
+                write!(self.config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?;
+                Ok(())
+            }
+            Message::FixFailed { files, krate } => {
+                if let Some(ref krate) = *krate {
+                    self.config.shell().warn(&format!(
+                        "failed to automatically apply fixes suggested by rustc \
+                         to crate `{}`",
+                        krate,
+                    ))?;
+                } else {
+                    self.config.shell().warn(
+                        "failed to automatically apply fixes suggested by rustc"
+                    )?;
+                }
+                if !files.is_empty() {
+                    writeln!(
+                        self.config.shell().err(),
+                        "\nafter fixes were automatically applied the compiler \
+                         reported errors within these files:\n"
+                    )?;
+                    for file in files {
+                        writeln!(self.config.shell().err(), "  * {}", file)?;
+                    }
+                    writeln!(self.config.shell().err())?;
+                }
+                write!(self.config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?;
+                Ok(())
+            }
+            Message::EditionAlreadyEnabled { file, edition } => {
+                // Like above, only warn once per file
+                if !self.edition_already_enabled.insert(file.clone()) {
+                    return Ok(())
+                }
+
+                let msg = format!(
+                    "\
+cannot prepare for the {} edition when it is enabled, so cargo cannot
+automatically fix errors in `{}`
+
+To prepare for the {0} edition you should first remove `edition = '{0}'` from
+your `Cargo.toml` and then rerun this command. Once all warnings have been fixed
+then you can re-enable the `edition` key in `Cargo.toml`. For some more
+information about transitioning to the {0} edition see:
+
+  https://rust-lang-nursery.github.io/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html
+",
+                    edition,
+                    file,
+                );
+                self.config.shell().error(&msg)?;
+                Ok(())
+            }
+            Message::IdiomEditionMismatch { file, idioms, edition } => {
+                // Same as above
+                if !self.idiom_mismatch.insert(file.clone()) {
+                    return Ok(())
+                }
+                self.config.shell().error(&format!(
+                    "\
+cannot migrate to the idioms of the {} edition for `{}`
+because it is compiled {}, which doesn't match {0}
+
+consider migrating to the {0} edition by adding `edition = '{0}'` to
+`Cargo.toml` and then rerunning this command; a more detailed transition
+guide can be found at
+
+  https://rust-lang-nursery.github.io/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html
+",
+                    idioms,
+                    file,
+                    match edition {
+                        Some(s) => format!("with the {} edition", s),
+                        None => "without an edition".to_string(),
+                    },
+                ))?;
+                Ok(())
+            }
+        }
+    }
+}
+
+#[derive(Debug)]
+pub struct RustfixDiagnosticServer {
+    listener: TcpListener,
+    addr: SocketAddr,
+}
+
+pub struct StartedServer {
+    addr: SocketAddr,
+    done: Arc<AtomicBool>,
+    thread: Option<JoinHandle<()>>,
+}
+
+impl RustfixDiagnosticServer {
+    pub fn new() -> Result<Self, Error> {
+        let listener = TcpListener::bind("127.0.0.1:0")
+            .with_context(|_| "failed to bind TCP listener to manage locking")?;
+        let addr = listener.local_addr()?;
+
+        Ok(RustfixDiagnosticServer { listener, addr })
+    }
+
+    pub fn configure(&self, process: &mut ProcessBuilder) {
+        process.env(DIAGNOSICS_SERVER_VAR, self.addr.to_string());
+    }
+
+    pub fn start<F>(self, on_message: F) -> Result<StartedServer, Error>
+    where
+        F: Fn(Message) + Send + 'static,
+    {
+        let addr = self.addr;
+        let done = Arc::new(AtomicBool::new(false));
+        let done2 = done.clone();
+        let thread = thread::spawn(move || {
+            self.run(&on_message, &done2);
+        });
+
+        Ok(StartedServer {
+            addr,
+            thread: Some(thread),
+            done,
+        })
+    }
+
+    fn run(self, on_message: &Fn(Message), done: &AtomicBool) {
+        while let Ok((client, _)) = self.listener.accept() {
+            let client = BufReader::new(client);
+            match serde_json::from_reader(client) {
+                Ok(message) => on_message(message),
+                Err(e) => warn!("invalid diagnostics message: {}", e),
+            }
+            if done.load(Ordering::SeqCst) {
+                break
+            }
+        }
+    }
+}
+
+impl Drop for StartedServer {
+    fn drop(&mut self) {
+        self.done.store(true, Ordering::SeqCst);
+        // Ignore errors here as this is largely best-effort
+        if TcpStream::connect(&self.addr).is_err() {
+            return;
+        }
+        drop(self.thread.take().unwrap().join());
+    }
+}
diff --git a/src/cargo/util/errors.rs b/src/cargo/util/errors.rs
new file mode 100644 (file)
index 0000000..6fc2898
--- /dev/null
@@ -0,0 +1,349 @@
+#![allow(unknown_lints)]
+
+use std::fmt;
+use std::process::{ExitStatus, Output};
+use std::str;
+use std::path::PathBuf;
+
+use core::{TargetKind, Workspace};
+use failure::{Context, Error, Fail};
+use clap;
+
+pub use failure::Error as CargoError;
+pub type CargoResult<T> = Result<T, Error>;
+
+pub trait CargoResultExt<T, E> {
+    fn chain_err<F, D>(self, f: F) -> Result<T, Context<D>>
+    where
+        F: FnOnce() -> D,
+        D: fmt::Display + Send + Sync + 'static;
+}
+
+impl<T, E> CargoResultExt<T, E> for Result<T, E>
+where
+    E: Into<Error>,
+{
+    fn chain_err<F, D>(self, f: F) -> Result<T, Context<D>>
+    where
+        F: FnOnce() -> D,
+        D: fmt::Display + Send + Sync + 'static,
+    {
+        self.map_err(|failure| {
+            let err = failure.into();
+            let context = f();
+            trace!("error: {}", err);
+            trace!("\tcontext: {}", context);
+            err.context(context)
+        })
+    }
+}
+
+#[derive(Debug, Fail)]
+#[fail(display = "failed to get 200 response from `{}`, got {}", url, code)]
+pub struct HttpNot200 {
+    pub code: u32,
+    pub url: String,
+}
+
+pub struct Internal {
+    inner: Error,
+}
+
+impl Internal {
+    pub fn new(inner: Error) -> Internal {
+        Internal { inner }
+    }
+}
+
+impl Fail for Internal {
+    fn cause(&self) -> Option<&Fail> {
+        self.inner.as_fail().cause()
+    }
+}
+
+impl fmt::Debug for Internal {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
+    }
+}
+
+impl fmt::Display for Internal {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
+    }
+}
+
+/// Error wrapper related to a particular manifest and providing it's path.
+///
+/// This error adds no displayable info of it's own.
+pub struct ManifestError {
+    cause: Error,
+    manifest: PathBuf,
+}
+
+impl ManifestError {
+    pub fn new<E: Into<Error>>(cause: E, manifest: PathBuf) -> Self {
+        Self {
+            cause: cause.into(),
+            manifest,
+        }
+    }
+
+    pub fn manifest_path(&self) -> &PathBuf {
+        &self.manifest
+    }
+
+    /// Returns an iterator over the `ManifestError` chain of causes.
+    ///
+    /// So if this error was not caused by another `ManifestError` this will be empty.
+    pub fn manifest_causes(&self) -> ManifestCauses {
+        ManifestCauses { current: self }
+    }
+}
+
+impl Fail for ManifestError {
+    fn cause(&self) -> Option<&Fail> {
+        self.cause.as_fail().cause()
+    }
+}
+
+impl fmt::Debug for ManifestError {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.cause.fmt(f)
+    }
+}
+
+impl fmt::Display for ManifestError {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.cause.fmt(f)
+    }
+}
+
+/// An iterator over the `ManifestError` chain of causes.
+pub struct ManifestCauses<'a> {
+    current: &'a ManifestError,
+}
+
+impl<'a> Iterator for ManifestCauses<'a> {
+    type Item = &'a ManifestError;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        self.current = self.current.cause.downcast_ref()?;
+        Some(self.current)
+    }
+}
+
+impl<'a> ::std::iter::FusedIterator for ManifestCauses<'a> {}
+
+// =============================================================================
+// Process errors
+#[derive(Debug, Fail)]
+#[fail(display = "{}", desc)]
+pub struct ProcessError {
+    pub desc: String,
+    pub exit: Option<ExitStatus>,
+    pub output: Option<Output>,
+}
+
+// =============================================================================
+// Cargo test errors.
+
+/// Error when testcases fail
+#[derive(Debug, Fail)]
+#[fail(display = "{}", desc)]
+pub struct CargoTestError {
+    pub test: Test,
+    pub desc: String,
+    pub exit: Option<ExitStatus>,
+    pub causes: Vec<ProcessError>,
+}
+
+#[derive(Debug)]
+pub enum Test {
+    Multiple,
+    Doc,
+    UnitTest {
+        kind: TargetKind,
+        name: String,
+        pkg_name: String,
+    },
+}
+
+impl CargoTestError {
+    pub fn new(test: Test, errors: Vec<ProcessError>) -> Self {
+        if errors.is_empty() {
+            panic!("Cannot create CargoTestError from empty Vec")
+        }
+        let desc = errors
+            .iter()
+            .map(|error| error.desc.clone())
+            .collect::<Vec<String>>()
+            .join("\n");
+        CargoTestError {
+            test,
+            desc,
+            exit: errors[0].exit,
+            causes: errors,
+        }
+    }
+
+    pub fn hint(&self, ws: &Workspace) -> String {
+        match self.test {
+            Test::UnitTest {
+                ref kind,
+                ref name,
+                ref pkg_name,
+            } => {
+                let pkg_info = if ws.members().count() > 1 && ws.is_virtual() {
+                    format!("-p {} ", pkg_name)
+                } else {
+                    String::new()
+                };
+
+                match *kind {
+                    TargetKind::Bench => {
+                        format!("test failed, to rerun pass '{}--bench {}'", pkg_info, name)
+                    }
+                    TargetKind::Bin => {
+                        format!("test failed, to rerun pass '{}--bin {}'", pkg_info, name)
+                    }
+                    TargetKind::Lib(_) => format!("test failed, to rerun pass '{}--lib'", pkg_info),
+                    TargetKind::Test => {
+                        format!("test failed, to rerun pass '{}--test {}'", pkg_info, name)
+                    }
+                    TargetKind::ExampleBin | TargetKind::ExampleLib(_) => {
+                        format!("test failed, to rerun pass '{}--example {}", pkg_info, name)
+                    }
+                    _ => "test failed.".into(),
+                }
+            }
+            Test::Doc => "test failed, to rerun pass '--doc'".into(),
+            _ => "test failed.".into(),
+        }
+    }
+}
+
+// =============================================================================
+// CLI errors
+
+pub type CliResult = Result<(), CliError>;
+
+#[derive(Debug)]
+pub struct CliError {
+    pub error: Option<CargoError>,
+    pub unknown: bool,
+    pub exit_code: i32,
+}
+
+impl CliError {
+    pub fn new(error: CargoError, code: i32) -> CliError {
+        let unknown = error.downcast_ref::<Internal>().is_some();
+        CliError {
+            error: Some(error),
+            exit_code: code,
+            unknown,
+        }
+    }
+
+    pub fn code(code: i32) -> CliError {
+        CliError {
+            error: None,
+            exit_code: code,
+            unknown: false,
+        }
+    }
+}
+
+impl From<CargoError> for CliError {
+    fn from(err: CargoError) -> CliError {
+        CliError::new(err, 101)
+    }
+}
+
+impl From<clap::Error> for CliError {
+    fn from(err: clap::Error) -> CliError {
+        let code = if err.use_stderr() { 1 } else { 0 };
+        CliError::new(err.into(), code)
+    }
+}
+
+// =============================================================================
+// Construction helpers
+
+pub fn process_error(
+    msg: &str,
+    status: Option<ExitStatus>,
+    output: Option<&Output>,
+) -> ProcessError {
+    let exit = match status {
+        Some(s) => status_to_string(s),
+        None => "never executed".to_string(),
+    };
+    let mut desc = format!("{} ({})", &msg, exit);
+
+    if let Some(out) = output {
+        match str::from_utf8(&out.stdout) {
+            Ok(s) if !s.trim().is_empty() => {
+                desc.push_str("\n--- stdout\n");
+                desc.push_str(s);
+            }
+            Ok(..) | Err(..) => {}
+        }
+        match str::from_utf8(&out.stderr) {
+            Ok(s) if !s.trim().is_empty() => {
+                desc.push_str("\n--- stderr\n");
+                desc.push_str(s);
+            }
+            Ok(..) | Err(..) => {}
+        }
+    }
+
+    return ProcessError {
+        desc,
+        exit: status,
+        output: output.cloned(),
+    };
+
+    #[cfg(unix)]
+    fn status_to_string(status: ExitStatus) -> String {
+        use std::os::unix::process::*;
+        use libc;
+
+        if let Some(signal) = status.signal() {
+            let name = match signal as libc::c_int {
+                libc::SIGABRT => ", SIGABRT: process abort signal",
+                libc::SIGALRM => ", SIGALRM: alarm clock",
+                libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation",
+                libc::SIGHUP => ", SIGHUP: hangup",
+                libc::SIGILL => ", SIGILL: illegal instruction",
+                libc::SIGINT => ", SIGINT: terminal interrupt signal",
+                libc::SIGKILL => ", SIGKILL: kill",
+                libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read",
+                libc::SIGQUIT => ", SIGQUIT: terminal quite signal",
+                libc::SIGSEGV => ", SIGSEGV: invalid memory reference",
+                libc::SIGTERM => ", SIGTERM: termination signal",
+                libc::SIGBUS => ", SIGBUS: access to undefined memory",
+                #[cfg(not(target_os = "haiku"))]
+                libc::SIGSYS => ", SIGSYS: bad system call",
+                libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap",
+                _ => "",
+            };
+            format!("signal: {}{}", signal, name)
+        } else {
+            status.to_string()
+        }
+    }
+
+    #[cfg(windows)]
+    fn status_to_string(status: ExitStatus) -> String {
+        status.to_string()
+    }
+}
+
+pub fn internal<S: fmt::Display>(error: S) -> CargoError {
+    _internal(&error)
+}
+
+fn _internal(error: &fmt::Display) -> CargoError {
+    Internal::new(format_err!("{}", error)).into()
+}
diff --git a/src/cargo/util/flock.rs b/src/cargo/util/flock.rs
new file mode 100644 (file)
index 0000000..c292975
--- /dev/null
@@ -0,0 +1,346 @@
+use std::fs::{self, File, OpenOptions};
+use std::io::{Read, Seek, SeekFrom, Write};
+use std::io;
+use std::path::{Display, Path, PathBuf};
+
+use termcolor::Color::Cyan;
+use fs2::{lock_contended_error, FileExt};
+#[allow(unused_imports)]
+use libc;
+
+use util::Config;
+use util::paths;
+use util::errors::{CargoError, CargoResult, CargoResultExt};
+
+pub struct FileLock {
+    f: Option<File>,
+    path: PathBuf,
+    state: State,
+}
+
+#[derive(PartialEq, Debug)]
+enum State {
+    Unlocked,
+    Shared,
+    Exclusive,
+}
+
+impl FileLock {
+    /// Returns the underlying file handle of this lock.
+    pub fn file(&self) -> &File {
+        self.f.as_ref().unwrap()
+    }
+
+    /// Returns the underlying path that this lock points to.
+    ///
+    /// Note that special care must be taken to ensure that the path is not
+    /// referenced outside the lifetime of this lock.
+    pub fn path(&self) -> &Path {
+        assert_ne!(self.state, State::Unlocked);
+        &self.path
+    }
+
+    /// Returns the parent path containing this file
+    pub fn parent(&self) -> &Path {
+        assert_ne!(self.state, State::Unlocked);
+        self.path.parent().unwrap()
+    }
+
+    /// Removes all sibling files to this locked file.
+    ///
+    /// This can be useful if a directory is locked with a sentinel file but it
+    /// needs to be cleared out as it may be corrupt.
+    pub fn remove_siblings(&self) -> CargoResult<()> {
+        let path = self.path();
+        for entry in path.parent().unwrap().read_dir()? {
+            let entry = entry?;
+            if Some(&entry.file_name()[..]) == path.file_name() {
+                continue;
+            }
+            let kind = entry.file_type()?;
+            if kind.is_dir() {
+                paths::remove_dir_all(entry.path())?;
+            } else {
+                paths::remove_file(entry.path())?;
+            }
+        }
+        Ok(())
+    }
+}
+
+impl Read for FileLock {
+    fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+        self.file().read(buf)
+    }
+}
+
+impl Seek for FileLock {
+    fn seek(&mut self, to: SeekFrom) -> io::Result<u64> {
+        self.file().seek(to)
+    }
+}
+
+impl Write for FileLock {
+    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+        self.file().write(buf)
+    }
+
+    fn flush(&mut self) -> io::Result<()> {
+        self.file().flush()
+    }
+}
+
+impl Drop for FileLock {
+    fn drop(&mut self) {
+        if self.state != State::Unlocked {
+            if let Some(f) = self.f.take() {
+                let _ = f.unlock();
+            }
+        }
+    }
+}
+
+/// A "filesystem" is intended to be a globally shared, hence locked, resource
+/// in Cargo.
+///
+/// The `Path` of a filesystem cannot be learned unless it's done in a locked
+/// fashion, and otherwise functions on this structure are prepared to handle
+/// concurrent invocations across multiple instances of Cargo.
+#[derive(Clone, Debug)]
+pub struct Filesystem {
+    root: PathBuf,
+}
+
+impl Filesystem {
+    /// Creates a new filesystem to be rooted at the given path.
+    pub fn new(path: PathBuf) -> Filesystem {
+        Filesystem { root: path }
+    }
+
+    /// Like `Path::join`, creates a new filesystem rooted at this filesystem
+    /// joined with the given path.
+    pub fn join<T: AsRef<Path>>(&self, other: T) -> Filesystem {
+        Filesystem::new(self.root.join(other))
+    }
+
+    /// Like `Path::push`, pushes a new path component onto this filesystem.
+    pub fn push<T: AsRef<Path>>(&mut self, other: T) {
+        self.root.push(other);
+    }
+
+    /// Consumes this filesystem and returns the underlying `PathBuf`.
+    ///
+    /// Note that this is a relatively dangerous operation and should be used
+    /// with great caution!.
+    pub fn into_path_unlocked(self) -> PathBuf {
+        self.root
+    }
+
+    /// Creates the directory pointed to by this filesystem.
+    ///
+    /// Handles errors where other Cargo processes are also attempting to
+    /// concurrently create this directory.
+    pub fn create_dir(&self) -> io::Result<()> {
+        fs::create_dir_all(&self.root)
+    }
+
+    /// Returns an adaptor that can be used to print the path of this
+    /// filesystem.
+    pub fn display(&self) -> Display {
+        self.root.display()
+    }
+
+    /// Opens exclusive access to a file, returning the locked version of a
+    /// file.
+    ///
+    /// This function will create a file at `path` if it doesn't already exist
+    /// (including intermediate directories), and then it will acquire an
+    /// exclusive lock on `path`. If the process must block waiting for the
+    /// lock, the `msg` is printed to `config`.
+    ///
+    /// The returned file can be accessed to look at the path and also has
+    /// read/write access to the underlying file.
+    pub fn open_rw<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
+    where
+        P: AsRef<Path>,
+    {
+        self.open(
+            path.as_ref(),
+            OpenOptions::new().read(true).write(true).create(true),
+            State::Exclusive,
+            config,
+            msg,
+        )
+    }
+
+    /// Opens shared access to a file, returning the locked version of a file.
+    ///
+    /// This function will fail if `path` doesn't already exist, but if it does
+    /// then it will acquire a shared lock on `path`. If the process must block
+    /// waiting for the lock, the `msg` is printed to `config`.
+    ///
+    /// The returned file can be accessed to look at the path and also has read
+    /// access to the underlying file. Any writes to the file will return an
+    /// error.
+    pub fn open_ro<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
+    where
+        P: AsRef<Path>,
+    {
+        self.open(
+            path.as_ref(),
+            OpenOptions::new().read(true),
+            State::Shared,
+            config,
+            msg,
+        )
+    }
+
+    fn open(
+        &self,
+        path: &Path,
+        opts: &OpenOptions,
+        state: State,
+        config: &Config,
+        msg: &str,
+    ) -> CargoResult<FileLock> {
+        let path = self.root.join(path);
+
+        // If we want an exclusive lock then if we fail because of NotFound it's
+        // likely because an intermediate directory didn't exist, so try to
+        // create the directory and then continue.
+        let f = opts.open(&path)
+            .or_else(|e| {
+                if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
+                    fs::create_dir_all(path.parent().unwrap())?;
+                    opts.open(&path)
+                } else {
+                    Err(e)
+                }
+            })
+            .chain_err(|| format!("failed to open: {}", path.display()))?;
+        match state {
+            State::Exclusive => {
+                acquire(config, msg, &path, &|| f.try_lock_exclusive(), &|| {
+                    f.lock_exclusive()
+                })?;
+            }
+            State::Shared => {
+                acquire(config, msg, &path, &|| f.try_lock_shared(), &|| {
+                    f.lock_shared()
+                })?;
+            }
+            State::Unlocked => {}
+        }
+        Ok(FileLock {
+            f: Some(f),
+            path,
+            state,
+        })
+    }
+}
+
+impl PartialEq<Path> for Filesystem {
+    fn eq(&self, other: &Path) -> bool {
+        self.root == other
+    }
+}
+
+impl PartialEq<Filesystem> for Path {
+    fn eq(&self, other: &Filesystem) -> bool {
+        self == other.root
+    }
+}
+
+/// Acquires a lock on a file in a "nice" manner.
+///
+/// Almost all long-running blocking actions in Cargo have a status message
+/// associated with them as we're not sure how long they'll take. Whenever a
+/// conflicted file lock happens, this is the case (we're not sure when the lock
+/// will be released).
+///
+/// This function will acquire the lock on a `path`, printing out a nice message
+/// to the console if we have to wait for it. It will first attempt to use `try`
+/// to acquire a lock on the crate, and in the case of contention it will emit a
+/// status message based on `msg` to `config`'s shell, and then use `block` to
+/// block waiting to acquire a lock.
+///
+/// Returns an error if the lock could not be acquired or if any error other
+/// than a contention error happens.
+fn acquire(
+    config: &Config,
+    msg: &str,
+    path: &Path,
+    try: &Fn() -> io::Result<()>,
+    block: &Fn() -> io::Result<()>,
+) -> CargoResult<()> {
+    // File locking on Unix is currently implemented via `flock`, which is known
+    // to be broken on NFS. We could in theory just ignore errors that happen on
+    // NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking
+    // forever**, even if the nonblocking flag is passed!
+    //
+    // As a result, we just skip all file locks entirely on NFS mounts. That
+    // should avoid calling any `flock` functions at all, and it wouldn't work
+    // there anyway.
+    //
+    // [1]: https://github.com/rust-lang/cargo/issues/2615
+    if is_on_nfs_mount(path) {
+        return Ok(());
+    }
+
+    match try() {
+        Ok(()) => return Ok(()),
+
+        // In addition to ignoring NFS which is commonly not working we also
+        // just ignore locking on filesystems that look like they don't
+        // implement file locking. We detect that here via the return value of
+        // locking (e.g. inspecting errno).
+        #[cfg(unix)]
+        Err(ref e) if e.raw_os_error() == Some(libc::ENOTSUP) =>
+        {
+            return Ok(())
+        }
+
+        #[cfg(target_os = "linux")]
+        Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) =>
+        {
+            return Ok(())
+        }
+
+        Err(e) => {
+            if e.raw_os_error() != lock_contended_error().raw_os_error() {
+                let e = CargoError::from(e);
+                let cx = format!("failed to lock file: {}", path.display());
+                return Err(e.context(cx).into());
+            }
+        }
+    }
+    let msg = format!("waiting for file lock on {}", msg);
+    config.shell().status_with_color("Blocking", &msg, Cyan)?;
+
+    block().chain_err(|| format!("failed to lock file: {}", path.display()))?;
+    return Ok(());
+
+    #[cfg(all(target_os = "linux", not(target_env = "musl")))]
+    fn is_on_nfs_mount(path: &Path) -> bool {
+        use std::ffi::CString;
+        use std::mem;
+        use std::os::unix::prelude::*;
+
+        let path = match CString::new(path.as_os_str().as_bytes()) {
+            Ok(path) => path,
+            Err(_) => return false,
+        };
+
+        unsafe {
+            let mut buf: libc::statfs = mem::zeroed();
+            let r = libc::statfs(path.as_ptr(), &mut buf);
+
+            r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32
+        }
+    }
+
+    #[cfg(any(not(target_os = "linux"), target_env = "musl"))]
+    fn is_on_nfs_mount(_path: &Path) -> bool {
+        false
+    }
+}
diff --git a/src/cargo/util/graph.rs b/src/cargo/util/graph.rs
new file mode 100644 (file)
index 0000000..78aa2b9
--- /dev/null
@@ -0,0 +1,110 @@
+use std::borrow::Borrow;
+use std::collections::hash_map::HashMap;
+use std::fmt;
+use std::hash::Hash;
+
+pub struct Graph<N, E> {
+    nodes: HashMap<N, HashMap<N, E>>,
+}
+
+impl<N: Eq + Hash + Clone, E: Default> Graph<N, E> {
+    pub fn new() -> Graph<N, E> {
+        Graph {
+            nodes: HashMap::new(),
+        }
+    }
+
+    pub fn add(&mut self, node: N) {
+        self.nodes.entry(node).or_insert_with(HashMap::new);
+    }
+
+    pub fn link(&mut self, node: N, child: N) -> &mut E {
+        self.nodes
+            .entry(node)
+            .or_insert_with(HashMap::new)
+            .entry(child)
+            .or_insert_with(Default::default)
+    }
+
+    pub fn contains<Q: ?Sized>(&self, k: &Q) -> bool
+    where
+        N: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        self.nodes.contains_key(k)
+    }
+
+    pub fn edge(&self, from: &N, to: &N) -> Option<&E> {
+        self.nodes.get(from)?.get(to)
+    }
+
+    pub fn edges(&self, from: &N) -> impl Iterator<Item = (&N, &E)> {
+        self.nodes.get(from).into_iter().flat_map(|x| x.iter())
+    }
+
+    pub fn iter(&self) -> impl Iterator<Item = &N> {
+        self.nodes.keys()
+    }
+
+    /// Resolves one of the paths from the given dependent package up to
+    /// the root.
+    pub fn path_to_top<'a>(&'a self, mut pkg: &'a N) -> Vec<&'a N> {
+        // Note that this implementation isn't the most robust per se, we'll
+        // likely have to tweak this over time. For now though it works for what
+        // it's used for!
+        let mut result = vec![pkg];
+        let first_pkg_depending_on = |pkg: &N, res: &[&N]| {
+            self.nodes
+                .iter()
+                .filter(|&(_, adjacent)| adjacent.contains_key(pkg))
+                // Note that we can have "cycles" introduced through dev-dependency
+                // edges, so make sure we don't loop infinitely.
+                .find(|&(node, _)| !res.contains(&node))
+                .map(|p| p.0)
+        };
+        while let Some(p) = first_pkg_depending_on(pkg, &result) {
+            result.push(p);
+            pkg = p;
+        }
+        result
+    }
+}
+
+impl<N: Eq + Hash + Clone, E: Default> Default for Graph<N, E> {
+    fn default() -> Graph<N, E> {
+        Graph::new()
+    }
+}
+
+impl<N: fmt::Display + Eq + Hash, E> fmt::Debug for Graph<N, E> {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        writeln!(fmt, "Graph {{")?;
+
+        for (n, e) in &self.nodes {
+            writeln!(fmt, "  - {}", n)?;
+
+            for n in e.keys() {
+                writeln!(fmt, "    - {}", n)?;
+            }
+        }
+
+        write!(fmt, "}}")?;
+
+        Ok(())
+    }
+}
+
+impl<N: Eq + Hash, E: Eq> PartialEq for Graph<N, E> {
+    fn eq(&self, other: &Graph<N, E>) -> bool {
+        self.nodes.eq(&other.nodes)
+    }
+}
+impl<N: Eq + Hash, E: Eq> Eq for Graph<N, E> {}
+
+impl<N: Eq + Hash + Clone, E: Clone> Clone for Graph<N, E> {
+    fn clone(&self) -> Graph<N, E> {
+        Graph {
+            nodes: self.nodes.clone(),
+        }
+    }
+}
diff --git a/src/cargo/util/hex.rs b/src/cargo/util/hex.rs
new file mode 100644 (file)
index 0000000..7e4dd00
--- /dev/null
@@ -0,0 +1,27 @@
+#![allow(deprecated)]
+
+use hex;
+use std::hash::{Hash, Hasher, SipHasher};
+
+pub fn to_hex(num: u64) -> String {
+    hex::encode(&[
+        (num >> 0) as u8,
+        (num >> 8) as u8,
+        (num >> 16) as u8,
+        (num >> 24) as u8,
+        (num >> 32) as u8,
+        (num >> 40) as u8,
+        (num >> 48) as u8,
+        (num >> 56) as u8,
+    ])
+}
+
+pub fn hash_u64<H: Hash>(hashable: &H) -> u64 {
+    let mut hasher = SipHasher::new_with_keys(0, 0);
+    hashable.hash(&mut hasher);
+    hasher.finish()
+}
+
+pub fn short_hash<H: Hash>(hashable: &H) -> String {
+    to_hex(hash_u64(hashable))
+}
diff --git a/src/cargo/util/important_paths.rs b/src/cargo/util/important_paths.rs
new file mode 100644 (file)
index 0000000..2fb4dea
--- /dev/null
@@ -0,0 +1,32 @@
+use std::fs;
+use std::path::{Path, PathBuf};
+use util::errors::CargoResult;
+use util::paths;
+
+/// Find the root Cargo.toml
+pub fn find_root_manifest_for_wd(cwd: &Path) -> CargoResult<PathBuf> {
+    let file = "Cargo.toml";
+    for current in paths::ancestors(cwd) {
+        let manifest = current.join(file);
+        if fs::metadata(&manifest).is_ok() {
+            return Ok(manifest);
+        }
+    }
+
+    bail!(
+        "could not find `{}` in `{}` or any parent directory",
+        file,
+        cwd.display()
+    )
+}
+
+/// Return the path to the `file` in `pwd`, if it exists.
+pub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult<PathBuf> {
+    let manifest = pwd.join(file);
+
+    if manifest.exists() {
+        Ok(manifest)
+    } else {
+        bail!("Could not find `{}` in `{}`", file, pwd.display())
+    }
+}
diff --git a/src/cargo/util/job.rs b/src/cargo/util/job.rs
new file mode 100644 (file)
index 0000000..44c61f0
--- /dev/null
@@ -0,0 +1,142 @@
+//! Job management (mostly for windows)
+//!
+//! Most of the time when you're running cargo you expect Ctrl-C to actually
+//! terminate the entire tree of processes in play, not just the one at the top
+//! (cago). This currently works "by default" on Unix platforms because Ctrl-C
+//! actually sends a signal to the *process group* rather than the parent
+//! process, so everything will get torn down. On Windows, however, this does
+//! not happen and Ctrl-C just kills cargo.
+//!
+//! To achieve the same semantics on Windows we use Job Objects to ensure that
+//! all processes die at the same time. Job objects have a mode of operation
+//! where when all handles to the object are closed it causes all child
+//! processes associated with the object to be terminated immediately.
+//! Conveniently whenever a process in the job object spawns a new process the
+//! child will be associated with the job object as well. This means if we add
+//! ourselves to the job object we create then everything will get torn down!
+
+pub use self::imp::Setup;
+
+pub fn setup() -> Option<Setup> {
+    unsafe { imp::setup() }
+}
+
+#[cfg(unix)]
+mod imp {
+    use std::env;
+    use libc;
+
+    pub type Setup = ();
+
+    pub unsafe fn setup() -> Option<()> {
+        // There's a test case for the behavior of
+        // when-cargo-is-killed-subprocesses-are-also-killed, but that requires
+        // one cargo spawned to become its own session leader, so we do that
+        // here.
+        if env::var("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE").is_ok() {
+            libc::setsid();
+        }
+        Some(())
+    }
+}
+
+#[cfg(windows)]
+mod imp {
+    extern crate winapi;
+
+    use std::io;
+    use std::mem;
+    use std::ptr;
+
+    use self::winapi::shared::minwindef::*;
+    use self::winapi::um::handleapi::*;
+    use self::winapi::um::jobapi2::*;
+    use self::winapi::um::processthreadsapi::*;
+    use self::winapi::um::winnt::*;
+    use self::winapi::um::winnt::HANDLE;
+
+    pub struct Setup {
+        job: Handle,
+    }
+
+    pub struct Handle {
+        inner: HANDLE,
+    }
+
+    fn last_err() -> io::Error {
+        io::Error::last_os_error()
+    }
+
+    pub unsafe fn setup() -> Option<Setup> {
+        // Creates a new job object for us to use and then adds ourselves to it.
+        // Note that all errors are basically ignored in this function,
+        // intentionally. Job objects are "relatively new" in Windows,
+        // particularly the ability to support nested job objects. Older
+        // Windows installs don't support this ability. We probably don't want
+        // to force Cargo to abort in this situation or force others to *not*
+        // use job objects, so we instead just ignore errors and assume that
+        // we're otherwise part of someone else's job object in this case.
+
+        let job = CreateJobObjectW(ptr::null_mut(), ptr::null());
+        if job.is_null() {
+            return None;
+        }
+        let job = Handle { inner: job };
+
+        // Indicate that when all handles to the job object are gone that all
+        // process in the object should be killed. Note that this includes our
+        // entire process tree by default because we've added ourselves and
+        // our children will reside in the job once we spawn a process.
+        let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
+        info = mem::zeroed();
+        info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
+        let r = SetInformationJobObject(
+            job.inner,
+            JobObjectExtendedLimitInformation,
+            &mut info as *mut _ as LPVOID,
+            mem::size_of_val(&info) as DWORD,
+        );
+        if r == 0 {
+            return None;
+        }
+
+        // Assign our process to this job object, meaning that our children will
+        // now live or die based on our existence.
+        let me = GetCurrentProcess();
+        let r = AssignProcessToJobObject(job.inner, me);
+        if r == 0 {
+            return None;
+        }
+
+        Some(Setup { job })
+    }
+
+    impl Drop for Setup {
+        fn drop(&mut self) {
+            // On normal exits (not ctrl-c), we don't want to kill any child
+            // processes. The destructor here configures our job object to
+            // *not* kill everything on close, then closes the job object.
+            unsafe {
+                let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
+                info = mem::zeroed();
+                let r = SetInformationJobObject(
+                    self.job.inner,
+                    JobObjectExtendedLimitInformation,
+                    &mut info as *mut _ as LPVOID,
+                    mem::size_of_val(&info) as DWORD,
+                );
+                if r == 0 {
+                    info!("failed to configure job object to defaults: {}", last_err());
+                }
+            }
+        }
+    }
+
+    impl Drop for Handle {
+        fn drop(&mut self) {
+            unsafe {
+                CloseHandle(self.inner);
+            }
+        }
+    }
+}
diff --git a/src/cargo/util/lev_distance.rs b/src/cargo/util/lev_distance.rs
new file mode 100644 (file)
index 0000000..c4a7e98
--- /dev/null
@@ -0,0 +1,56 @@
+use std::cmp;
+
+pub fn lev_distance(me: &str, t: &str) -> usize {
+    if me.is_empty() {
+        return t.chars().count();
+    }
+    if t.is_empty() {
+        return me.chars().count();
+    }
+
+    let mut dcol = (0..t.len() + 1).collect::<Vec<_>>();
+    let mut t_last = 0;
+
+    for (i, sc) in me.chars().enumerate() {
+        let mut current = i;
+        dcol[0] = current + 1;
+
+        for (j, tc) in t.chars().enumerate() {
+            let next = dcol[j + 1];
+
+            if sc == tc {
+                dcol[j + 1] = current;
+            } else {
+                dcol[j + 1] = cmp::min(current, next);
+                dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1;
+            }
+
+            current = next;
+            t_last = j;
+        }
+    }
+
+    dcol[t_last + 1]
+}
+
+#[test]
+fn test_lev_distance() {
+    use std::char::{from_u32, MAX};
+    // Test bytelength agnosticity
+    for c in (0u32..MAX as u32)
+        .filter_map(from_u32)
+        .map(|i| i.to_string())
+    {
+        assert_eq!(lev_distance(&c, &c), 0);
+    }
+
+    let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
+    let b = "\nMary häd ä little lämb\n\nLittle lämb\n";
+    let c = "Mary häd ä little lämb\n\nLittle lämb\n";
+    assert_eq!(lev_distance(a, b), 1);
+    assert_eq!(lev_distance(b, a), 1);
+    assert_eq!(lev_distance(a, c), 2);
+    assert_eq!(lev_distance(c, a), 2);
+    assert_eq!(lev_distance(b, c), 1);
+    assert_eq!(lev_distance(c, b), 1);
+}
diff --git a/src/cargo/util/lockserver.rs b/src/cargo/util/lockserver.rs
new file mode 100644 (file)
index 0000000..0e5f524
--- /dev/null
@@ -0,0 +1,173 @@
+//! An implementation of IPC locks, guaranteed to be released if a process dies
+//!
+//! This module implements a locking server/client where the main `cargo fix`
+//! process will start up a server and then all the client processes will
+//! connect to it. The main purpose of this file is to enusre that each crate
+//! (aka file entry point) is only fixed by one process at a time, currently
+//! concurrent fixes can't happen.
+//!
+//! The basic design here is to use a TCP server which is pretty portable across
+//! platforms. For simplicity it just uses threads as well. Clients connect to
+//! the main server, inform the server what its name is, and then wait for the
+//! server to give it the lock (aka write a byte).
+
+use std::collections::HashMap;
+use std::io::{BufRead, BufReader, Read, Write};
+use std::net::{SocketAddr, TcpListener, TcpStream};
+use std::path::Path;
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::{Arc, Mutex};
+use std::thread::{self, JoinHandle};
+
+use failure::{Error, ResultExt};
+
+pub struct LockServer {
+    listener: TcpListener,
+    addr: SocketAddr,
+    threads: HashMap<String, ServerClient>,
+    done: Arc<AtomicBool>,
+}
+
+pub struct LockServerStarted {
+    done: Arc<AtomicBool>,
+    addr: SocketAddr,
+    thread: Option<JoinHandle<()>>,
+}
+
+pub struct LockServerClient {
+    _socket: TcpStream,
+}
+
+struct ServerClient {
+    thread: Option<JoinHandle<()>>,
+    lock: Arc<Mutex<(bool, Vec<TcpStream>)>>,
+}
+
+impl LockServer {
+    pub fn new() -> Result<LockServer, Error> {
+        let listener = TcpListener::bind("127.0.0.1:0")
+            .with_context(|_| "failed to bind TCP listener to manage locking")?;
+        let addr = listener.local_addr()?;
+        Ok(LockServer {
+            listener,
+            addr,
+            threads: HashMap::new(),
+            done: Arc::new(AtomicBool::new(false)),
+        })
+    }
+
+    pub fn addr(&self) -> &SocketAddr {
+        &self.addr
+    }
+
+    pub fn start(self) -> Result<LockServerStarted, Error> {
+        let addr = self.addr;
+        let done = self.done.clone();
+        let thread = thread::spawn(|| {
+            self.run();
+        });
+        Ok(LockServerStarted {
+            addr,
+            thread: Some(thread),
+            done,
+        })
+    }
+
+    fn run(mut self) {
+        while let Ok((client, _)) = self.listener.accept() {
+            if self.done.load(Ordering::SeqCst) {
+                break;
+            }
+
+            // Learn the name of our connected client to figure out if it needs
+            // to wait for another process to release the lock.
+            let mut client = BufReader::new(client);
+            let mut name = String::new();
+            if client.read_line(&mut name).is_err() {
+                continue;
+            }
+            let client = client.into_inner();
+
+            // If this "named mutex" is already registered and the thread is
+            // still going, put it on the queue. Otherwise wait on the previous
+            // thread and we'll replace it just below.
+            if let Some(t) = self.threads.get_mut(&name) {
+                let mut state = t.lock.lock().unwrap();
+                if state.0 {
+                    state.1.push(client);
+                    continue;
+                }
+                drop(t.thread.take().unwrap().join());
+            }
+
+            let lock = Arc::new(Mutex::new((true, vec![client])));
+            let lock2 = lock.clone();
+            let thread = thread::spawn(move || {
+                loop {
+                    let mut client = {
+                        let mut state = lock2.lock().unwrap();
+                        if state.1.is_empty() {
+                            state.0 = false;
+                            break;
+                        } else {
+                            state.1.remove(0)
+                        }
+                    };
+                    // Inform this client that it now has the lock and wait for
+                    // it to disconnect by waiting for EOF.
+                    if client.write_all(&[1]).is_err() {
+                        continue;
+                    }
+                    let mut dst = Vec::new();
+                    drop(client.read_to_end(&mut dst));
+                }
+            });
+
+            self.threads.insert(
+                name,
+                ServerClient {
+                    thread: Some(thread),
+                    lock,
+                },
+            );
+        }
+    }
+}
+
+impl Drop for LockServer {
+    fn drop(&mut self) {
+        for (_, mut client) in self.threads.drain() {
+            if let Some(thread) = client.thread.take() {
+                drop(thread.join());
+            }
+        }
+    }
+}
+
+impl Drop for LockServerStarted {
+    fn drop(&mut self) {
+        self.done.store(true, Ordering::SeqCst);
+        // Ignore errors here as this is largely best-effort
+        if TcpStream::connect(&self.addr).is_err() {
+            return;
+        }
+        drop(self.thread.take().unwrap().join());
+    }
+}
+
+impl LockServerClient {
+    pub fn lock(addr: &SocketAddr, name: &Path) -> Result<LockServerClient, Error> {
+        let mut client =
+            TcpStream::connect(&addr).with_context(|_| "failed to connect to parent lock server")?;
+        client
+            .write_all(name.display().to_string().as_bytes())
+            .and_then(|_| client.write_all(b"\n"))
+            .with_context(|_| "failed to write to lock server")?;
+        let mut buf = [0];
+        client
+            .read_exact(&mut buf)
+            .with_context(|_| "failed to acquire lock")?;
+        Ok(LockServerClient { _socket: client })
+    }
+}
+
diff --git a/src/cargo/util/machine_message.rs b/src/cargo/util/machine_message.rs
new file mode 100644 (file)
index 0000000..1c68393
--- /dev/null
@@ -0,0 +1,71 @@
+use serde::ser;
+use serde_json::{self, value::RawValue};
+
+use core::{PackageId, Target};
+
+pub trait Message: ser::Serialize {
+    fn reason(&self) -> &str;
+}
+
+pub fn emit<T: Message>(t: &T) {
+    let json = serde_json::to_string(t).unwrap();
+    assert!(json.starts_with("{\""));
+    let reason = json!(t.reason());
+    println!("{{\"reason\":{},{}", reason, &json[1..]);
+}
+
+#[derive(Serialize)]
+pub struct FromCompiler<'a> {
+    pub package_id: &'a PackageId,
+    pub target: &'a Target,
+    pub message: Box<RawValue>,
+}
+
+impl<'a> Message for FromCompiler<'a> {
+    fn reason(&self) -> &str {
+        "compiler-message"
+    }
+}
+
+#[derive(Serialize)]
+pub struct Artifact<'a> {
+    pub package_id: &'a PackageId,
+    pub target: &'a Target,
+    pub profile: ArtifactProfile,
+    pub features: Vec<String>,
+    pub filenames: Vec<String>,
+    pub fresh: bool,
+}
+
+impl<'a> Message for Artifact<'a> {
+    fn reason(&self) -> &str {
+        "compiler-artifact"
+    }
+}
+
+/// This is different from the regular `Profile` to maintain backwards
+/// compatibility (in particular, `test` is no longer in `Profile`, but we
+/// still want it to be included here).
+#[derive(Serialize)]
+pub struct ArtifactProfile {
+    pub opt_level: &'static str,
+    pub debuginfo: Option<u32>,
+    pub debug_assertions: bool,
+    pub overflow_checks: bool,
+    pub test: bool,
+}
+
+#[derive(Serialize)]
+pub struct BuildScript<'a> {
+    pub package_id: &'a PackageId,
+    pub linked_libs: &'a [String],
+    pub linked_paths: &'a [String],
+    pub cfgs: &'a [String],
+    pub env: &'a [(String, String)],
+}
+
+impl<'a> Message for BuildScript<'a> {
+    fn reason(&self) -> &str {
+        "build-script-executed"
+    }
+}
diff --git a/src/cargo/util/mod.rs b/src/cargo/util/mod.rs
new file mode 100644 (file)
index 0000000..c189149
--- /dev/null
@@ -0,0 +1,60 @@
+use std::time::Duration;
+
+pub use self::cfg::{Cfg, CfgExpr};
+pub use self::config::{homedir, Config, ConfigValue};
+pub use self::dependency_queue::{DependencyQueue, Dirty, Fresh, Freshness};
+pub use self::errors::{CargoError, CargoResult, CargoResultExt, CliResult, Test};
+pub use self::errors::{CargoTestError, CliError, ProcessError};
+pub use self::errors::{internal, process_error};
+pub use self::flock::{FileLock, Filesystem};
+pub use self::graph::Graph;
+pub use self::hex::{short_hash, to_hex, hash_u64};
+pub use self::lev_distance::lev_distance;
+pub use self::paths::{dylib_path, join_paths, bytes2path, path2bytes};
+pub use self::paths::{dylib_path_envvar, normalize_path, without_prefix};
+pub use self::process_builder::{process, ProcessBuilder};
+pub use self::rustc::Rustc;
+pub use self::sha256::Sha256;
+pub use self::to_semver::ToSemver;
+pub use self::to_url::ToUrl;
+pub use self::vcs::{FossilRepo, GitRepo, HgRepo, PijulRepo, existing_vcs_repo};
+pub use self::read2::read2;
+pub use self::progress::{Progress, ProgressStyle};
+pub use self::lockserver::{LockServer, LockServerStarted, LockServerClient};
+pub use self::diagnostic_server::RustfixDiagnosticServer;
+
+pub mod config;
+pub mod errors;
+pub mod graph;
+pub mod hex;
+pub mod important_paths;
+pub mod job;
+pub mod lev_distance;
+pub mod machine_message;
+pub mod network;
+pub mod paths;
+pub mod process_builder;
+pub mod profile;
+pub mod to_semver;
+pub mod to_url;
+pub mod toml;
+mod cfg;
+mod dependency_queue;
+mod rustc;
+mod sha256;
+mod vcs;
+mod flock;
+mod read2;
+mod progress;
+mod lockserver;
+pub mod diagnostic_server;
+
+pub fn elapsed(duration: Duration) -> String {
+    let secs = duration.as_secs();
+
+    if secs >= 60 {
+        format!("{}m {:02}s", secs / 60, secs % 60)
+    } else {
+        format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000)
+    }
+}
diff --git a/src/cargo/util/network.rs b/src/cargo/util/network.rs
new file mode 100644 (file)
index 0000000..4c3fcac
--- /dev/null
@@ -0,0 +1,129 @@
+use curl;
+use git2;
+
+use failure::Error;
+
+use util::Config;
+use util::errors::{CargoResult, HttpNot200};
+
+pub struct Retry<'a> {
+    config: &'a Config,
+    remaining: u32,
+}
+
+impl<'a> Retry<'a> {
+    pub fn new(config: &'a Config) -> CargoResult<Retry<'a>> {
+        Ok(Retry {
+            config,
+            remaining: config.get::<Option<u32>>("net.retry")?.unwrap_or(2),
+        })
+    }
+
+    pub fn try<T>(&mut self, f: impl FnOnce() -> CargoResult<T>)
+        -> CargoResult<Option<T>>
+    {
+        match f() {
+            Err(ref e) if maybe_spurious(e) && self.remaining > 0 => {
+                let msg = format!(
+                    "spurious network error ({} tries \
+                     remaining): {}",
+                    self.remaining, e
+                );
+                self.config.shell().warn(msg)?;
+                self.remaining -= 1;
+                Ok(None)
+            }
+            other => other.map(Some),
+        }
+    }
+}
+
+fn maybe_spurious(err: &Error) -> bool {
+    for e in err.iter_chain() {
+        if let Some(git_err) = e.downcast_ref::<git2::Error>() {
+            match git_err.class() {
+                git2::ErrorClass::Net | git2::ErrorClass::Os => return true,
+                _ => (),
+            }
+        }
+        if let Some(curl_err) = e.downcast_ref::<curl::Error>() {
+            if curl_err.is_couldnt_connect()
+                || curl_err.is_couldnt_resolve_proxy()
+                || curl_err.is_couldnt_resolve_host()
+                || curl_err.is_operation_timedout()
+                || curl_err.is_recv_error()
+            {
+                return true;
+            }
+        }
+        if let Some(not_200) = e.downcast_ref::<HttpNot200>() {
+            if 500 <= not_200.code && not_200.code < 600 {
+                return true;
+            }
+        }
+    }
+    false
+}
+
+/// Wrapper method for network call retry logic.
+///
+/// Retry counts provided by Config object `net.retry`. Config shell outputs
+/// a warning on per retry.
+///
+/// Closure must return a `CargoResult`.
+///
+/// # Examples
+///
+/// ```ignore
+/// use util::network;
+/// cargo_result = network::with_retry(&config, || something.download());
+/// ```
+pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>
+where
+    F: FnMut() -> CargoResult<T>,
+{
+    let mut retry = Retry::new(config)?;
+    loop {
+        if let Some(ret) = retry.try(&mut callback)? {
+            return Ok(ret)
+        }
+    }
+}
+#[test]
+fn with_retry_repeats_the_call_then_works() {
+    //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry
+    let error1 = HttpNot200 {
+        code: 501,
+        url: "Uri".to_string(),
+    }.into();
+    let error2 = HttpNot200 {
+        code: 502,
+        url: "Uri".to_string(),
+    }.into();
+    let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
+    let config = Config::default().unwrap();
+    let result = with_retry(&config, || results.pop().unwrap());
+    assert_eq!(result.unwrap(), ())
+}
+
+#[test]
+fn with_retry_finds_nested_spurious_errors() {
+    use util::CargoError;
+
+    //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry
+    //String error messages are not considered spurious
+    let error1 = CargoError::from(HttpNot200 {
+        code: 501,
+        url: "Uri".to_string(),
+    });
+    let error1 = CargoError::from(error1.context("A non-spurious wrapping err"));
+    let error2 = CargoError::from(HttpNot200 {
+        code: 502,
+        url: "Uri".to_string(),
+    });
+    let error2 = CargoError::from(error2.context("A second chained error"));
+    let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
+    let config = Config::default().unwrap();
+    let result = with_retry(&config, || results.pop().unwrap());
+    assert_eq!(result.unwrap(), ())
+}
diff --git a/src/cargo/util/paths.rs b/src/cargo/util/paths.rs
new file mode 100644 (file)
index 0000000..ca54e5f
--- /dev/null
@@ -0,0 +1,310 @@
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fs::{self, File, OpenOptions};
+use std::io;
+use std::io::prelude::*;
+use std::iter;
+use std::path::{Component, Path, PathBuf};
+
+use filetime::FileTime;
+
+use util::errors::{CargoError, CargoResult, CargoResultExt, Internal};
+
+pub fn join_paths<T: AsRef<OsStr>>(paths: &[T], env: &str) -> CargoResult<OsString> {
+    let err = match env::join_paths(paths.iter()) {
+        Ok(paths) => return Ok(paths),
+        Err(e) => e,
+    };
+    let paths = paths.iter().map(Path::new).collect::<Vec<_>>();
+    let err = CargoError::from(err);
+    let explain = Internal::new(format_err!("failed to join path array: {:?}", paths));
+    let err = CargoError::from(err.context(explain));
+    let more_explain = format!(
+        "failed to join search paths together\n\
+         Does ${} have an unterminated quote character?",
+        env
+    );
+    Err(err.context(more_explain).into())
+}
+
+pub fn dylib_path_envvar() -> &'static str {
+    if cfg!(windows) {
+        "PATH"
+    } else if cfg!(target_os = "macos") {
+        "DYLD_LIBRARY_PATH"
+    } else {
+        "LD_LIBRARY_PATH"
+    }
+}
+
+pub fn dylib_path() -> Vec<PathBuf> {
+    match env::var_os(dylib_path_envvar()) {
+        Some(var) => env::split_paths(&var).collect(),
+        None => Vec::new(),
+    }
+}
+
+pub fn normalize_path(path: &Path) -> PathBuf {
+    let mut components = path.components().peekable();
+    let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
+        components.next();
+        PathBuf::from(c.as_os_str())
+    } else {
+        PathBuf::new()
+    };
+
+    for component in components {
+        match component {
+            Component::Prefix(..) => unreachable!(),
+            Component::RootDir => {
+                ret.push(component.as_os_str());
+            }
+            Component::CurDir => {}
+            Component::ParentDir => {
+                ret.pop();
+            }
+            Component::Normal(c) => {
+                ret.push(c);
+            }
+        }
+    }
+    ret
+}
+
+pub fn without_prefix<'a>(long_path: &'a Path, prefix: &'a Path) -> Option<&'a Path> {
+    let mut a = long_path.components();
+    let mut b = prefix.components();
+    loop {
+        match b.next() {
+            Some(y) => match a.next() {
+                Some(x) if x == y => continue,
+                _ => return None,
+            },
+            None => return Some(a.as_path()),
+        }
+    }
+}
+
+pub fn resolve_executable(exec: &Path) -> CargoResult<PathBuf> {
+    if exec.components().count() == 1 {
+        let paths = env::var_os("PATH").ok_or_else(|| format_err!("no PATH"))?;
+        let candidates = env::split_paths(&paths).flat_map(|path| {
+            let candidate = path.join(&exec);
+            let with_exe = if env::consts::EXE_EXTENSION == "" {
+                None
+            } else {
+                Some(candidate.with_extension(env::consts::EXE_EXTENSION))
+            };
+            iter::once(candidate).chain(with_exe)
+        });
+        for candidate in candidates {
+            if candidate.is_file() {
+                // PATH may have a component like "." in it, so we still need to
+                // canonicalize.
+                return Ok(candidate.canonicalize()?);
+            }
+        }
+
+        bail!("no executable for `{}` found in PATH", exec.display())
+    } else {
+        Ok(exec.canonicalize()?)
+    }
+}
+
+pub fn read(path: &Path) -> CargoResult<String> {
+    match String::from_utf8(read_bytes(path)?) {
+        Ok(s) => Ok(s),
+        Err(_) => bail!("path at `{}` was not valid utf-8", path.display()),
+    }
+}
+
+pub fn read_bytes(path: &Path) -> CargoResult<Vec<u8>> {
+    let res = (|| -> CargoResult<_> {
+        let mut ret = Vec::new();
+        let mut f = File::open(path)?;
+        if let Ok(m) = f.metadata() {
+            ret.reserve(m.len() as usize + 1);
+        }
+        f.read_to_end(&mut ret)?;
+        Ok(ret)
+    })().chain_err(|| format!("failed to read `{}`", path.display()))?;
+    Ok(res)
+}
+
+pub fn write(path: &Path, contents: &[u8]) -> CargoResult<()> {
+    (|| -> CargoResult<()> {
+        let mut f = File::create(path)?;
+        f.write_all(contents)?;
+        Ok(())
+    })().chain_err(|| format!("failed to write `{}`", path.display()))?;
+    Ok(())
+}
+
+pub fn write_if_changed<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> CargoResult<()> {
+    (|| -> CargoResult<()> {
+        let contents = contents.as_ref();
+        let mut f = OpenOptions::new()
+            .read(true)
+            .write(true)
+            .create(true)
+            .open(&path)?;
+        let mut orig = Vec::new();
+        f.read_to_end(&mut orig)?;
+        if orig != contents {
+            f.set_len(0)?;
+            f.seek(io::SeekFrom::Start(0))?;
+            f.write_all(contents)?;
+        }
+        Ok(())
+    })().chain_err(|| format!("failed to write `{}`", path.as_ref().display()))?;
+    Ok(())
+}
+
+pub fn append(path: &Path, contents: &[u8]) -> CargoResult<()> {
+    (|| -> CargoResult<()> {
+        let mut f = OpenOptions::new()
+            .write(true)
+            .append(true)
+            .create(true)
+            .open(path)?;
+
+        f.write_all(contents)?;
+        Ok(())
+    })().chain_err(|| format!("failed to write `{}`", path.display()))?;
+    Ok(())
+}
+
+pub fn mtime(path: &Path) -> CargoResult<FileTime> {
+    let meta = fs::metadata(path).chain_err(|| format!("failed to stat `{}`", path.display()))?;
+    Ok(FileTime::from_last_modification_time(&meta))
+}
+
+#[cfg(unix)]
+pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> {
+    use std::os::unix::prelude::*;
+    Ok(path.as_os_str().as_bytes())
+}
+#[cfg(windows)]
+pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> {
+    match path.as_os_str().to_str() {
+        Some(s) => Ok(s.as_bytes()),
+        None => Err(format_err!("invalid non-unicode path: {}", path.display())),
+    }
+}
+
+#[cfg(unix)]
+pub fn bytes2path(bytes: &[u8]) -> CargoResult<PathBuf> {
+    use std::ffi::OsStr;
+    use std::os::unix::prelude::*;
+    Ok(PathBuf::from(OsStr::from_bytes(bytes)))
+}
+#[cfg(windows)]
+pub fn bytes2path(bytes: &[u8]) -> CargoResult<PathBuf> {
+    use std::str;
+    match str::from_utf8(bytes) {
+        Ok(s) => Ok(PathBuf::from(s)),
+        Err(..) => Err(format_err!("invalid non-unicode path")),
+    }
+}
+
+pub fn ancestors(path: &Path) -> PathAncestors {
+    PathAncestors::new(path)
+}
+
+pub struct PathAncestors<'a> {
+    current: Option<&'a Path>,
+    stop_at: Option<PathBuf>,
+}
+
+impl<'a> PathAncestors<'a> {
+    fn new(path: &Path) -> PathAncestors {
+        PathAncestors {
+            current: Some(path),
+            //HACK: avoid reading `~/.cargo/config` when testing Cargo itself.
+            stop_at: env::var("__CARGO_TEST_ROOT").ok().map(PathBuf::from),
+        }
+    }
+}
+
+impl<'a> Iterator for PathAncestors<'a> {
+    type Item = &'a Path;
+
+    fn next(&mut self) -> Option<&'a Path> {
+        if let Some(path) = self.current {
+            self.current = path.parent();
+
+            if let Some(ref stop_at) = self.stop_at {
+                if path == stop_at {
+                    self.current = None;
+                }
+            }
+
+            Some(path)
+        } else {
+            None
+        }
+    }
+}
+
+pub fn remove_dir_all<P: AsRef<Path>>(p: P) -> CargoResult<()> {
+    _remove_dir_all(p.as_ref())
+}
+
+fn _remove_dir_all(p: &Path) -> CargoResult<()> {
+    if p.symlink_metadata()?.file_type().is_symlink() {
+        return remove_file(p);
+    }
+    let entries = p
+        .read_dir()
+        .chain_err(|| format!("failed to read directory `{}`", p.display()))?;
+    for entry in entries {
+        let entry = entry?;
+        let path = entry.path();
+        if entry.file_type()?.is_dir() {
+            remove_dir_all(&path)?;
+        } else {
+            remove_file(&path)?;
+        }
+    }
+    remove_dir(&p)
+}
+
+pub fn remove_dir<P: AsRef<Path>>(p: P) -> CargoResult<()> {
+    _remove_dir(p.as_ref())
+}
+
+fn _remove_dir(p: &Path) -> CargoResult<()> {
+    fs::remove_dir(p).chain_err(|| format!("failed to remove directory `{}`", p.display()))?;
+    Ok(())
+}
+
+pub fn remove_file<P: AsRef<Path>>(p: P) -> CargoResult<()> {
+    _remove_file(p.as_ref())
+}
+
+fn _remove_file(p: &Path) -> CargoResult<()> {
+    let mut err = match fs::remove_file(p) {
+        Ok(()) => return Ok(()),
+        Err(e) => e,
+    };
+
+    if err.kind() == io::ErrorKind::PermissionDenied && set_not_readonly(p).unwrap_or(false) {
+        match fs::remove_file(p) {
+            Ok(()) => return Ok(()),
+            Err(e) => err = e,
+        }
+    }
+
+    Err(err).chain_err(|| format!("failed to remove file `{}`", p.display()))?;
+    Ok(())
+}
+
+fn set_not_readonly(p: &Path) -> io::Result<bool> {
+    let mut perms = p.metadata()?.permissions();
+    if !perms.readonly() {
+        return Ok(false);
+    }
+    perms.set_readonly(false);
+    fs::set_permissions(p, perms)?;
+    Ok(true)
+}
diff --git a/src/cargo/util/process_builder.rs b/src/cargo/util/process_builder.rs
new file mode 100644 (file)
index 0000000..e6a4c03
--- /dev/null
@@ -0,0 +1,367 @@
+use std::collections::HashMap;
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fmt;
+use std::path::Path;
+use std::process::{Command, Output, Stdio};
+
+use jobserver::Client;
+use shell_escape::escape;
+
+use util::{process_error, CargoResult, CargoResultExt, read2};
+
+/// A builder object for an external process, similar to `std::process::Command`.
+#[derive(Clone, Debug)]
+pub struct ProcessBuilder {
+    /// The program to execute.
+    program: OsString,
+    /// A list of arguments to pass to the program.
+    args: Vec<OsString>,
+    /// Any environment variables that should be set for the program.
+    env: HashMap<String, Option<OsString>>,
+    /// Which directory to run the program from.
+    cwd: Option<OsString>,
+    /// The `make` jobserver. See the [jobserver crate][jobserver_docs] for
+    /// more information.
+    ///
+    /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/
+    jobserver: Option<Client>,
+}
+
+impl fmt::Display for ProcessBuilder {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "`{}", self.program.to_string_lossy())?;
+
+        for arg in &self.args {
+            write!(f, " {}", escape(arg.to_string_lossy()))?;
+        }
+
+        write!(f, "`")
+    }
+}
+
+impl ProcessBuilder {
+    /// (chainable) Set the executable for the process.
+    pub fn program<T: AsRef<OsStr>>(&mut self, program: T) -> &mut ProcessBuilder {
+        self.program = program.as_ref().to_os_string();
+        self
+    }
+
+    /// (chainable) Add an arg to the args list.
+    pub fn arg<T: AsRef<OsStr>>(&mut self, arg: T) -> &mut ProcessBuilder {
+        self.args.push(arg.as_ref().to_os_string());
+        self
+    }
+
+    /// (chainable) Add many args to the args list.
+    pub fn args<T: AsRef<OsStr>>(&mut self, arguments: &[T]) -> &mut ProcessBuilder {
+        self.args
+            .extend(arguments.iter().map(|t| t.as_ref().to_os_string()));
+        self
+    }
+
+    /// (chainable) Replace args with new args list
+    pub fn args_replace<T: AsRef<OsStr>>(&mut self, arguments: &[T]) -> &mut ProcessBuilder {
+        self.args = arguments
+            .iter()
+            .map(|t| t.as_ref().to_os_string())
+            .collect();
+        self
+    }
+
+    /// (chainable) Set the current working directory of the process
+    pub fn cwd<T: AsRef<OsStr>>(&mut self, path: T) -> &mut ProcessBuilder {
+        self.cwd = Some(path.as_ref().to_os_string());
+        self
+    }
+
+    /// (chainable) Set an environment variable for the process.
+    pub fn env<T: AsRef<OsStr>>(&mut self, key: &str, val: T) -> &mut ProcessBuilder {
+        self.env
+            .insert(key.to_string(), Some(val.as_ref().to_os_string()));
+        self
+    }
+
+    /// (chainable) Unset an environment variable for the process.
+    pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder {
+        self.env.insert(key.to_string(), None);
+        self
+    }
+
+    /// Get the executable name.
+    pub fn get_program(&self) -> &OsString {
+        &self.program
+    }
+
+    /// Get the program arguments
+    pub fn get_args(&self) -> &[OsString] {
+        &self.args
+    }
+
+    /// Get the current working directory for the process
+    pub fn get_cwd(&self) -> Option<&Path> {
+        self.cwd.as_ref().map(Path::new)
+    }
+
+    /// Get an environment variable as the process will see it (will inherit from environment
+    /// unless explicitally unset).
+    pub fn get_env(&self, var: &str) -> Option<OsString> {
+        self.env
+            .get(var)
+            .cloned()
+            .or_else(|| Some(env::var_os(var)))
+            .and_then(|s| s)
+    }
+
+    /// Get all environment variables explicitly set or unset for the process (not inherited
+    /// vars).
+    pub fn get_envs(&self) -> &HashMap<String, Option<OsString>> {
+        &self.env
+    }
+
+    /// Set the `make` jobserver. See the [jobserver crate][jobserver_docs] for
+    /// more information.
+    ///
+    /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/
+    pub fn inherit_jobserver(&mut self, jobserver: &Client) -> &mut Self {
+        self.jobserver = Some(jobserver.clone());
+        self
+    }
+
+    /// Run the process, waiting for completion, and mapping non-success exit codes to an error.
+    pub fn exec(&self) -> CargoResult<()> {
+        let mut command = self.build_command();
+        let exit = command.status().chain_err(|| {
+            process_error(
+                &format!("could not execute process {}", self),
+                None,
+                None,
+            )
+        })?;
+
+        if exit.success() {
+            Ok(())
+        } else {
+            Err(process_error(
+                &format!("process didn't exit successfully: {}", self),
+                Some(exit),
+                None,
+            ).into())
+        }
+    }
+
+    /// Replaces the current process with the target process.
+    ///
+    /// On Unix, this executes the process using the unix syscall `execvp`, which will block
+    /// this process, and will only return if there is an error.
+    ///
+    /// On Windows this isn't technically possible. Instead we emulate it to the best of our
+    /// ability. One aspect we fix here is that we specify a handler for the ctrl-c handler.
+    /// In doing so (and by effectively ignoring it) we should emulate proxying ctrl-c
+    /// handling to the application at hand, which will either terminate or handle it itself.
+    /// According to microsoft's documentation at:
+    /// https://docs.microsoft.com/en-us/windows/console/ctrl-c-and-ctrl-break-signals
+    /// the ctrl-c signal is sent to all processes attached to a terminal, which should
+    /// include our child process. If the child terminates then we'll reap them in Cargo
+    /// pretty quickly, and if the child handles the signal then we won't terminate
+    /// (and we shouldn't!) until the process itself later exits.
+    pub fn exec_replace(&self) -> CargoResult<()> {
+        imp::exec_replace(self)
+    }
+
+    /// Execute the process, returning the stdio output, or an error if non-zero exit status.
+    pub fn exec_with_output(&self) -> CargoResult<Output> {
+        let mut command = self.build_command();
+
+        let output = command.output().chain_err(|| {
+            process_error(
+                &format!("could not execute process {}", self),
+                None,
+                None,
+            )
+        })?;
+
+        if output.status.success() {
+            Ok(output)
+        } else {
+            Err(process_error(
+                &format!("process didn't exit successfully: {}", self),
+                Some(output.status),
+                Some(&output),
+            ).into())
+        }
+    }
+
+    /// Execute a command, passing each line of stdout and stderr to the supplied callbacks, which
+    /// can mutate the string data.
+    ///
+    /// If any invocations of these function return an error, it will be propagated.
+    ///
+    /// Optionally, output can be passed to errors using `print_output`
+    pub fn exec_with_streaming(
+        &self,
+        on_stdout_line: &mut FnMut(&str) -> CargoResult<()>,
+        on_stderr_line: &mut FnMut(&str) -> CargoResult<()>,
+        print_output: bool,
+    ) -> CargoResult<Output> {
+        let mut stdout = Vec::new();
+        let mut stderr = Vec::new();
+
+        let mut cmd = self.build_command();
+        cmd.stdout(Stdio::piped())
+            .stderr(Stdio::piped())
+            .stdin(Stdio::null());
+
+        let mut callback_error = None;
+        let status = (|| {
+            let mut child = cmd.spawn()?;
+            let out = child.stdout.take().unwrap();
+            let err = child.stderr.take().unwrap();
+            read2(out, err, &mut |is_out, data, eof| {
+                let idx = if eof {
+                    data.len()
+                } else {
+                    match data.iter().rposition(|b| *b == b'\n') {
+                        Some(i) => i + 1,
+                        None => return,
+                    }
+                };
+                let data = data.drain(..idx);
+                let dst = if is_out { &mut stdout } else { &mut stderr };
+                let start = dst.len();
+                dst.extend(data);
+                for line in String::from_utf8_lossy(&dst[start..]).lines() {
+                    if callback_error.is_some() {
+                        break;
+                    }
+                    let callback_result = if is_out {
+                        on_stdout_line(line)
+                    } else {
+                        on_stderr_line(line)
+                    };
+                    if let Err(e) = callback_result {
+                        callback_error = Some(e);
+                    }
+                }
+            })?;
+            child.wait()
+        })()
+            .chain_err(|| {
+            process_error(
+                &format!("could not execute process {}", self),
+                None,
+                None,
+            )
+        })?;
+        let output = Output {
+            stdout,
+            stderr,
+            status,
+        };
+
+        {
+            let to_print = if print_output { Some(&output) } else { None };
+            if !output.status.success() {
+                return Err(process_error(
+                    &format!("process didn't exit successfully: {}", self),
+                    Some(output.status),
+                    to_print,
+                ).into());
+            } else if let Some(e) = callback_error {
+                let cx = process_error(
+                    &format!("failed to parse process output: {}", self),
+                    Some(output.status),
+                    to_print,
+                );
+                return Err(e.context(cx).into());
+            }
+        }
+
+        Ok(output)
+    }
+
+    /// Converts ProcessBuilder into a `std::process::Command`, and handles the jobserver if
+    /// present.
+    pub fn build_command(&self) -> Command {
+        let mut command = Command::new(&self.program);
+        if let Some(cwd) = self.get_cwd() {
+            command.current_dir(cwd);
+        }
+        for arg in &self.args {
+            command.arg(arg);
+        }
+        for (k, v) in &self.env {
+            match *v {
+                Some(ref v) => {
+                    command.env(k, v);
+                }
+                None => {
+                    command.env_remove(k);
+                }
+            }
+        }
+        if let Some(ref c) = self.jobserver {
+            c.configure(&mut command);
+        }
+        command
+    }
+}
+
+/// A helper function to create a `ProcessBuilder`.
+pub fn process<T: AsRef<OsStr>>(cmd: T) -> ProcessBuilder {
+    ProcessBuilder {
+        program: cmd.as_ref().to_os_string(),
+        args: Vec::new(),
+        cwd: None,
+        env: HashMap::new(),
+        jobserver: None,
+    }
+}
+
+#[cfg(unix)]
+mod imp {
+    use CargoResult;
+    use std::os::unix::process::CommandExt;
+    use util::{process_error, ProcessBuilder};
+
+    pub fn exec_replace(process_builder: &ProcessBuilder) -> CargoResult<()> {
+        let mut command = process_builder.build_command();
+        let error = command.exec();
+        Err(::util::CargoError::from(error)
+            .context(process_error(
+                &format!("could not execute process {}", process_builder),
+                None,
+                None,
+            ))
+            .into())
+    }
+}
+
+#[cfg(windows)]
+mod imp {
+    extern crate winapi;
+
+    use CargoResult;
+    use util::{process_error, ProcessBuilder};
+    use self::winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE};
+    use self::winapi::um::consoleapi::SetConsoleCtrlHandler;
+
+    unsafe extern "system" fn ctrlc_handler(_: DWORD) -> BOOL {
+        // Do nothing. Let the child process handle it.
+        TRUE
+    }
+
+    pub fn exec_replace(process_builder: &ProcessBuilder) -> CargoResult<()> {
+        unsafe {
+            if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE {
+                return Err(process_error(
+                    "Could not set Ctrl-C handler.",
+                    None,
+                    None).into());
+            }
+        }
+
+        // Just exec the process as normal.
+        process_builder.exec()
+    }
+}
diff --git a/src/cargo/util/profile.rs b/src/cargo/util/profile.rs
new file mode 100644 (file)
index 0000000..e7db180
--- /dev/null
@@ -0,0 +1,89 @@
+use std::env;
+use std::fmt;
+use std::mem;
+use std::time;
+use std::iter::repeat;
+use std::cell::RefCell;
+use std::io::{stdout, StdoutLock, Write};
+
+thread_local!(static PROFILE_STACK: RefCell<Vec<time::Instant>> = RefCell::new(Vec::new()));
+thread_local!(static MESSAGES: RefCell<Vec<Message>> = RefCell::new(Vec::new()));
+
+type Message = (usize, u64, String);
+
+pub struct Profiler {
+    desc: String,
+}
+
+fn enabled_level() -> Option<usize> {
+    env::var("CARGO_PROFILE").ok().and_then(|s| s.parse().ok())
+}
+
+pub fn start<T: fmt::Display>(desc: T) -> Profiler {
+    if enabled_level().is_none() {
+        return Profiler {
+            desc: String::new(),
+        };
+    }
+
+    PROFILE_STACK.with(|stack| stack.borrow_mut().push(time::Instant::now()));
+
+    Profiler {
+        desc: desc.to_string(),
+    }
+}
+
+impl Drop for Profiler {
+    fn drop(&mut self) {
+        let enabled = match enabled_level() {
+            Some(i) => i,
+            None => return,
+        };
+
+        let (start, stack_len) = PROFILE_STACK.with(|stack| {
+            let mut stack = stack.borrow_mut();
+            let start = stack.pop().unwrap();
+            (start, stack.len())
+        });
+        let duration = start.elapsed();
+        let duration_ms =
+            duration.as_secs() * 1000 + u64::from(duration.subsec_millis());
+
+        let msg = (
+            stack_len,
+            duration_ms,
+            mem::replace(&mut self.desc, String::new()),
+        );
+        MESSAGES.with(|msgs| msgs.borrow_mut().push(msg));
+
+        if stack_len == 0 {
+            fn print(lvl: usize, msgs: &[Message], enabled: usize, stdout: &mut StdoutLock) {
+                if lvl > enabled {
+                    return;
+                }
+                let mut last = 0;
+                for (i, &(l, time, ref msg)) in msgs.iter().enumerate() {
+                    if l != lvl {
+                        continue;
+                    }
+                    writeln!(
+                        stdout,
+                        "{} {:6}ms - {}",
+                        repeat("    ").take(lvl + 1).collect::<String>(),
+                        time,
+                        msg
+                    ).expect("printing profiling info to stdout");
+
+                    print(lvl + 1, &msgs[last..i], enabled, stdout);
+                    last = i;
+                }
+            }
+            let stdout = stdout();
+            MESSAGES.with(|msgs| {
+                let mut msgs = msgs.borrow_mut();
+                print(0, &msgs, enabled, &mut stdout.lock());
+                msgs.clear();
+            });
+        }
+    }
+}
diff --git a/src/cargo/util/progress.rs b/src/cargo/util/progress.rs
new file mode 100644 (file)
index 0000000..c6cedf6
--- /dev/null
@@ -0,0 +1,408 @@
+use std::cmp;
+use std::env;
+use std::time::{Duration, Instant};
+
+use core::shell::Verbosity;
+use util::{CargoResult, Config};
+
+use unicode_width::UnicodeWidthChar;
+
+pub struct Progress<'cfg> {
+    state: Option<State<'cfg>>,
+}
+
+pub enum ProgressStyle {
+    Percentage,
+    Ratio,
+}
+
+struct Throttle {
+    first: bool,
+    last_update: Instant,
+}
+
+struct State<'cfg> {
+    config: &'cfg Config,
+    format: Format,
+    name: String,
+    done: bool,
+    throttle: Throttle,
+}
+
+struct Format {
+    style: ProgressStyle,
+    max_width: usize,
+    max_print: usize,
+}
+
+impl<'cfg> Progress<'cfg> {
+    pub fn with_style(name: &str, style: ProgressStyle, cfg: &'cfg Config) -> Progress<'cfg> {
+        // report no progress when -q (for quiet) or TERM=dumb are set
+        let dumb = match env::var("TERM") {
+            Ok(term) => term == "dumb",
+            Err(_) => false,
+        };
+        if cfg.shell().verbosity() == Verbosity::Quiet || dumb {
+            return Progress { state: None };
+        }
+
+        Progress {
+            state: cfg.shell().err_width().map(|n| State {
+                config: cfg,
+                format: Format {
+                    style,
+                    max_width: n,
+                    max_print: 80,
+                },
+                name: name.to_string(),
+                done: false,
+                throttle: Throttle::new(),
+            }),
+        }
+    }
+
+    pub fn disable(&mut self) {
+        self.state = None;
+    }
+
+    pub fn is_enabled(&self) -> bool {
+        self.state.is_some()
+    }
+
+    pub fn new(name: &str, cfg: &'cfg Config) -> Progress<'cfg> {
+        Self::with_style(name, ProgressStyle::Percentage, cfg)
+    }
+
+    pub fn tick(&mut self, cur: usize, max: usize) -> CargoResult<()> {
+        let s = match &mut self.state {
+            Some(s) => s,
+            None => return Ok(()),
+        };
+
+        // Don't update too often as it can cause excessive performance loss
+        // just putting stuff onto the terminal. We also want to avoid
+        // flickering by not drawing anything that goes away too quickly. As a
+        // result we've got two branches here:
+        //
+        // 1. If we haven't drawn anything, we wait for a period of time to
+        //    actually start drawing to the console. This ensures that
+        //    short-lived operations don't flicker on the console. Currently
+        //    there's a 500ms delay to when we first draw something.
+        // 2. If we've drawn something, then we rate limit ourselves to only
+        //    draw to the console every so often. Currently there's a 100ms
+        //    delay between updates.
+        if !s.throttle.allowed() {
+            return Ok(())
+        }
+
+        s.tick(cur, max, "")
+    }
+
+    pub fn tick_now(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
+        match self.state {
+            Some(ref mut s) => s.tick(cur, max, msg),
+            None => Ok(()),
+        }
+    }
+
+    pub fn update_allowed(&mut self) -> bool {
+        match &mut self.state {
+            Some(s) => s.throttle.allowed(),
+            None => false,
+        }
+    }
+
+    pub fn print_now(&mut self, msg: &str) -> CargoResult<()> {
+        match &mut self.state {
+            Some(s) => s.print("", msg),
+            None => Ok(()),
+        }
+    }
+
+    pub fn clear(&mut self) {
+        if let Some(ref mut s) = self.state {
+            s.clear();
+        }
+    }
+}
+
+impl Throttle {
+    fn new() -> Throttle {
+        Throttle {
+            first: true,
+            last_update: Instant::now(),
+        }
+    }
+
+    fn allowed(&mut self) -> bool {
+        if self.first {
+            let delay = Duration::from_millis(500);
+            if self.last_update.elapsed() < delay {
+                return false
+            }
+        } else {
+            let interval = Duration::from_millis(100);
+            if self.last_update.elapsed() < interval {
+                return false
+            }
+        }
+        self.update();
+        true
+    }
+
+    fn update(&mut self) {
+        self.first = false;
+        self.last_update = Instant::now();
+    }
+}
+
+impl<'cfg> State<'cfg> {
+    fn tick(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
+        if self.done {
+            return Ok(());
+        }
+
+        if max > 0 && cur == max {
+            self.done = true;
+        }
+
+        // Write out a pretty header, then the progress bar itself, and then
+        // return back to the beginning of the line for the next print.
+        self.try_update_max_width();
+        if let Some(pbar) = self.format.progress(cur, max) {
+            self.print(&pbar, msg)?;
+        }
+        Ok(())
+    }
+
+    fn print(&mut self, prefix: &str, msg: &str) -> CargoResult<()> {
+        self.throttle.update();
+        self.try_update_max_width();
+
+        // make sure we have enough room for the header
+        if self.format.max_width < 15 {
+            return Ok(())
+        }
+        self.config.shell().status_header(&self.name)?;
+        let mut line = prefix.to_string();
+        self.format.render(&mut line, msg);
+
+        while line.len() < self.format.max_width - 15 {
+            line.push(' ');
+        }
+
+        write!(self.config.shell().err(), "{}\r", line)?;
+        Ok(())
+    }
+
+    fn clear(&mut self) {
+        self.try_update_max_width();
+        let blank = " ".repeat(self.format.max_width);
+        drop(write!(self.config.shell().err(), "{}\r", blank));
+    }
+
+    fn try_update_max_width(&mut self) {
+        if let Some(n) = self.config.shell().err_width() {
+            self.format.max_width = n;
+        }
+    }
+}
+
+impl Format {
+    fn progress(&self, cur: usize, max: usize) -> Option<String> {
+        // Render the percentage at the far right and then figure how long the
+        // progress bar is
+        let pct = (cur as f64) / (max as f64);
+        let pct = if !pct.is_finite() { 0.0 } else { pct };
+        let stats = match self.style {
+            ProgressStyle::Percentage => format!(" {:6.02}%", pct * 100.0),
+            ProgressStyle::Ratio => format!(" {}/{}", cur, max),
+        };
+        let extra_len = stats.len() + 2 /* [ and ] */ + 15 /* status header */;
+        let display_width = match self.width().checked_sub(extra_len) {
+            Some(n) => n,
+            None => return None,
+        };
+
+        let mut string = String::with_capacity(self.max_width);
+        string.push('[');
+        let hashes = display_width as f64 * pct;
+        let hashes = hashes as usize;
+
+        // Draw the `===>`
+        if hashes > 0 {
+            for _ in 0..hashes - 1 {
+                string.push_str("=");
+            }
+            if cur == max {
+                string.push_str("=");
+            } else {
+                string.push_str(">");
+            }
+        }
+
+        // Draw the empty space we have left to do
+        for _ in 0..(display_width - hashes) {
+            string.push_str(" ");
+        }
+        string.push_str("]");
+        string.push_str(&stats);
+
+        Some(string)
+    }
+
+    fn render(&self, string: &mut String, msg: &str) {
+        let mut avail_msg_len = self.max_width - string.len() - 15;
+        let mut ellipsis_pos = 0;
+        if avail_msg_len <= 3 {
+            return
+        }
+        for c in msg.chars() {
+            let display_width = c.width().unwrap_or(0);
+            if avail_msg_len >= display_width {
+                avail_msg_len -= display_width;
+                string.push(c);
+                if avail_msg_len >= 3 {
+                    ellipsis_pos = string.len();
+                }
+            } else {
+                string.truncate(ellipsis_pos);
+                string.push_str("...");
+                break;
+            }
+        }
+    }
+
+    #[cfg(test)]
+    fn progress_status(&self, cur: usize, max: usize, msg: &str) -> Option<String> {
+        let mut ret = self.progress(cur, max)?;
+        self.render(&mut ret, msg);
+        Some(ret)
+    }
+
+    fn width(&self) -> usize {
+        cmp::min(self.max_width, self.max_print)
+    }
+}
+
+impl<'cfg> Drop for State<'cfg> {
+    fn drop(&mut self) {
+        self.clear();
+    }
+}
+
+#[test]
+fn test_progress_status() {
+    let format = Format {
+        style: ProgressStyle::Ratio,
+        max_print: 40,
+        max_width: 60,
+    };
+    assert_eq!(
+        format.progress_status(0, 4, ""),
+        Some("[                   ] 0/4".to_string())
+    );
+    assert_eq!(
+        format.progress_status(1, 4, ""),
+        Some("[===>               ] 1/4".to_string())
+    );
+    assert_eq!(
+        format.progress_status(2, 4, ""),
+        Some("[========>          ] 2/4".to_string())
+    );
+    assert_eq!(
+        format.progress_status(3, 4, ""),
+        Some("[=============>     ] 3/4".to_string())
+    );
+    assert_eq!(
+        format.progress_status(4, 4, ""),
+        Some("[===================] 4/4".to_string())
+    );
+
+    assert_eq!(
+        format.progress_status(3999, 4000, ""),
+        Some("[===========> ] 3999/4000".to_string())
+    );
+    assert_eq!(
+        format.progress_status(4000, 4000, ""),
+        Some("[=============] 4000/4000".to_string())
+    );
+
+    assert_eq!(
+        format.progress_status(3, 4, ": short message"),
+        Some("[=============>     ] 3/4: short message".to_string())
+    );
+    assert_eq!(
+        format.progress_status(3, 4, ": msg thats just fit"),
+        Some("[=============>     ] 3/4: msg thats just fit".to_string())
+    );
+    assert_eq!(
+        format.progress_status(3, 4, ": msg that's just fit"),
+        Some("[=============>     ] 3/4: msg that's just...".to_string())
+    );
+
+    // combining diacritics have width zero and thus can fit max_width.
+    let zalgo_msg = "z̸̧̢̗͉̝̦͍̱ͧͦͨ̑̅̌ͥ́͢a̢ͬͨ̽ͯ̅̑ͥ͋̏̑ͫ̄͢͏̫̝̪̤͎̱̣͍̭̞̙̱͙͍̘̭͚l̶̡̛̥̝̰̭̹̯̯̞̪͇̱̦͙͔̘̼͇͓̈ͨ͗ͧ̓͒ͦ̀̇ͣ̈ͭ͊͛̃̑͒̿̕͜g̸̷̢̩̻̻͚̠͓̞̥͐ͩ͌̑ͥ̊̽͋͐̐͌͛̐̇̑ͨ́ͅo͙̳̣͔̰̠̜͕͕̞̦̙̭̜̯̹̬̻̓͑ͦ͋̈̉͌̃ͯ̀̂͠ͅ ̸̡͎̦̲̖̤̺̜̮̱̰̥͔̯̅̏ͬ̂ͨ̋̃̽̈́̾̔̇ͣ̚͜͜h̡ͫ̐̅̿̍̀͜҉̛͇̭̹̰̠͙̞ẽ̶̙̹̳̖͉͎̦͂̋̓ͮ̔ͬ̐̀͂̌͑̒͆̚͜͠ ͓͓̟͍̮̬̝̝̰͓͎̼̻ͦ͐̾̔͒̃̓͟͟c̮̦͍̺͈͚̯͕̄̒͐̂͊̊͗͊ͤͣ̀͘̕͝͞o̶͍͚͍̣̮͌ͦ̽̑ͩ̅ͮ̐̽̏͗́͂̅ͪ͠m̷̧͖̻͔̥̪̭͉͉̤̻͖̩̤͖̘ͦ̂͌̆̂ͦ̒͊ͯͬ͊̉̌ͬ͝͡e̵̹̣͍̜̺̤̤̯̫̹̠̮͎͙̯͚̰̼͗͐̀̒͂̉̀̚͝͞s̵̲͍͙͖̪͓͓̺̱̭̩̣͖̣ͤͤ͂̎̈͗͆ͨͪ̆̈͗͝͠";
+    assert_eq!(
+        format.progress_status(3, 4, zalgo_msg),
+        Some("[=============>     ] 3/4".to_string() + zalgo_msg)
+    );
+
+    // some non-ASCII ellipsize test
+    assert_eq!(
+        format.progress_status(3, 4, "_123456789123456e\u{301}\u{301}8\u{301}90a"),
+        Some("[=============>     ] 3/4_123456789123456e\u{301}\u{301}...".to_string())
+    );
+    assert_eq!(
+        format.progress_status(3, 4, ":每個漢字佔據了兩個字元"),
+        Some("[=============>     ] 3/4:每個漢字佔據了...".to_string())
+    );
+}
+
+#[test]
+fn test_progress_status_percentage() {
+    let format = Format {
+        style: ProgressStyle::Percentage,
+        max_print: 40,
+        max_width: 60,
+    };
+    assert_eq!(
+        format.progress_status(0, 77, ""),
+        Some("[               ]   0.00%".to_string())
+    );
+    assert_eq!(
+        format.progress_status(1, 77, ""),
+        Some("[               ]   1.30%".to_string())
+    );
+    assert_eq!(
+        format.progress_status(76, 77, ""),
+        Some("[=============> ]  98.70%".to_string())
+    );
+    assert_eq!(
+        format.progress_status(77, 77, ""),
+        Some("[===============] 100.00%".to_string())
+    );
+}
+
+#[test]
+fn test_progress_status_too_short() {
+    let format = Format {
+        style: ProgressStyle::Percentage,
+        max_print: 25,
+        max_width: 25,
+    };
+    assert_eq!(
+        format.progress_status(1, 1, ""),
+        Some("[] 100.00%".to_string())
+    );
+
+    let format = Format {
+        style: ProgressStyle::Percentage,
+        max_print: 24,
+        max_width: 24,
+    };
+    assert_eq!(
+        format.progress_status(1, 1, ""),
+        None
+    );
+}
diff --git a/src/cargo/util/read2.rs b/src/cargo/util/read2.rs
new file mode 100644 (file)
index 0000000..13a50a7
--- /dev/null
@@ -0,0 +1,185 @@
+pub use self::imp::read2;
+
+#[cfg(unix)]
+mod imp {
+    use std::io::prelude::*;
+    use std::io;
+    use std::mem;
+    use std::os::unix::prelude::*;
+    use std::process::{ChildStderr, ChildStdout};
+    use libc;
+
+    pub fn read2(
+        mut out_pipe: ChildStdout,
+        mut err_pipe: ChildStderr,
+        data: &mut FnMut(bool, &mut Vec<u8>, bool),
+    ) -> io::Result<()> {
+        unsafe {
+            libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+            libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+        }
+
+        let mut out_done = false;
+        let mut err_done = false;
+        let mut out = Vec::new();
+        let mut err = Vec::new();
+
+        let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() };
+        fds[0].fd = out_pipe.as_raw_fd();
+        fds[0].events = libc::POLLIN;
+        fds[1].fd = err_pipe.as_raw_fd();
+        fds[1].events = libc::POLLIN;
+        let mut nfds = 2;
+        let mut errfd = 1;
+
+        while nfds > 0 {
+            // wait for either pipe to become readable using `select`
+            let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) };
+            if r == -1 {
+                let err = io::Error::last_os_error();
+                if err.kind() == io::ErrorKind::Interrupted {
+                    continue;
+                }
+                return Err(err);
+            }
+
+            // Read as much as we can from each pipe, ignoring EWOULDBLOCK or
+            // EAGAIN. If we hit EOF, then this will happen because the underlying
+            // reader will return Ok(0), in which case we'll see `Ok` ourselves. In
+            // this case we flip the other fd back into blocking mode and read
+            // whatever's leftover on that file descriptor.
+            let handle = |res: io::Result<_>| match res {
+                Ok(_) => Ok(true),
+                Err(e) => {
+                    if e.kind() == io::ErrorKind::WouldBlock {
+                        Ok(false)
+                    } else {
+                        Err(e)
+                    }
+                }
+            };
+            if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? {
+                err_done = true;
+                nfds -= 1;
+            }
+            data(false, &mut err, err_done);
+            if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? {
+                out_done = true;
+                fds[0].fd = err_pipe.as_raw_fd();
+                errfd = 0;
+                nfds -= 1;
+            }
+            data(true, &mut out, out_done);
+        }
+        Ok(())
+    }
+}
+
+#[cfg(windows)]
+mod imp {
+    extern crate miow;
+    extern crate winapi;
+
+    use std::io;
+    use std::os::windows::prelude::*;
+    use std::process::{ChildStderr, ChildStdout};
+    use std::slice;
+
+    use self::miow::iocp::{CompletionPort, CompletionStatus};
+    use self::miow::pipe::NamedPipe;
+    use self::miow::Overlapped;
+    use self::winapi::shared::winerror::ERROR_BROKEN_PIPE;
+
+    struct Pipe<'a> {
+        dst: &'a mut Vec<u8>,
+        overlapped: Overlapped,
+        pipe: NamedPipe,
+        done: bool,
+    }
+
+    pub fn read2(
+        out_pipe: ChildStdout,
+        err_pipe: ChildStderr,
+        data: &mut FnMut(bool, &mut Vec<u8>, bool),
+    ) -> io::Result<()> {
+        let mut out = Vec::new();
+        let mut err = Vec::new();
+
+        let port = CompletionPort::new(1)?;
+        port.add_handle(0, &out_pipe)?;
+        port.add_handle(1, &err_pipe)?;
+
+        unsafe {
+            let mut out_pipe = Pipe::new(out_pipe, &mut out);
+            let mut err_pipe = Pipe::new(err_pipe, &mut err);
+
+            out_pipe.read()?;
+            err_pipe.read()?;
+
+            let mut status = [CompletionStatus::zero(), CompletionStatus::zero()];
+
+            while !out_pipe.done || !err_pipe.done {
+                for status in port.get_many(&mut status, None)? {
+                    if status.token() == 0 {
+                        out_pipe.complete(status);
+                        data(true, out_pipe.dst, out_pipe.done);
+                        out_pipe.read()?;
+                    } else {
+                        err_pipe.complete(status);
+                        data(false, err_pipe.dst, err_pipe.done);
+                        err_pipe.read()?;
+                    }
+                }
+            }
+
+            Ok(())
+        }
+    }
+
+    impl<'a> Pipe<'a> {
+        unsafe fn new<P: IntoRawHandle>(p: P, dst: &'a mut Vec<u8>) -> Pipe<'a> {
+            Pipe {
+                dst,
+                pipe: NamedPipe::from_raw_handle(p.into_raw_handle()),
+                overlapped: Overlapped::zero(),
+                done: false,
+            }
+        }
+
+        unsafe fn read(&mut self) -> io::Result<()> {
+            let dst = slice_to_end(self.dst);
+            match self.pipe.read_overlapped(dst, self.overlapped.raw()) {
+                Ok(_) => Ok(()),
+                Err(e) => {
+                    if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) {
+                        self.done = true;
+                        Ok(())
+                    } else {
+                        Err(e)
+                    }
+                }
+            }
+        }
+
+        unsafe fn complete(&mut self, status: &CompletionStatus) {
+            let prev = self.dst.len();
+            self.dst.set_len(prev + status.bytes_transferred() as usize);
+            if status.bytes_transferred() == 0 {
+                self.done = true;
+            }
+        }
+    }
+
+    unsafe fn slice_to_end(v: &mut Vec<u8>) -> &mut [u8] {
+        if v.capacity() == 0 {
+            v.reserve(16);
+        }
+        if v.capacity() == v.len() {
+            v.reserve(1);
+        }
+        slice::from_raw_parts_mut(
+            v.as_mut_ptr().offset(v.len() as isize),
+            v.capacity() - v.len(),
+        )
+    }
+}
diff --git a/src/cargo/util/rustc.rs b/src/cargo/util/rustc.rs
new file mode 100644 (file)
index 0000000..0852d63
--- /dev/null
@@ -0,0 +1,277 @@
+#![allow(deprecated)] // for SipHasher
+
+use std::path::{Path, PathBuf};
+use std::hash::{Hash, Hasher, SipHasher};
+use std::collections::hash_map::{Entry, HashMap};
+use std::sync::Mutex;
+use std::process::Stdio;
+use std::env;
+
+use serde_json;
+
+use util::{self, internal, profile, CargoResult, ProcessBuilder};
+use util::paths;
+
+/// Information on the `rustc` executable
+#[derive(Debug)]
+pub struct Rustc {
+    /// The location of the exe
+    pub path: PathBuf,
+    /// An optional program that will be passed the path of the rust exe as its first argument, and
+    /// rustc args following this.
+    pub wrapper: Option<PathBuf>,
+    /// Verbose version information (the output of `rustc -vV`)
+    pub verbose_version: String,
+    /// The host triple (arch-platform-OS), this comes from verbose_version.
+    pub host: String,
+    cache: Mutex<Cache>,
+}
+
+impl Rustc {
+    /// Run the compiler at `path` to learn various pieces of information about
+    /// it, with an optional wrapper.
+    ///
+    /// If successful this function returns a description of the compiler along
+    /// with a list of its capabilities.
+    pub fn new(
+        path: PathBuf,
+        wrapper: Option<PathBuf>,
+        rustup_rustc: &Path,
+        cache_location: Option<PathBuf>,
+    ) -> CargoResult<Rustc> {
+        let _p = profile::start("Rustc::new");
+
+        let mut cache = Cache::load(&path, rustup_rustc, cache_location);
+
+        let mut cmd = util::process(&path);
+        cmd.arg("-vV");
+        let verbose_version = cache.cached_output(&cmd)?.0;
+
+        let host = {
+            let triple = verbose_version
+                .lines()
+                .find(|l| l.starts_with("host: "))
+                .map(|l| &l[6..])
+                .ok_or_else(|| internal("rustc -v didn't have a line for `host:`"))?;
+            triple.to_string()
+        };
+
+        Ok(Rustc {
+            path,
+            wrapper,
+            verbose_version,
+            host,
+            cache: Mutex::new(cache),
+        })
+    }
+
+    /// Get a process builder set up to use the found rustc version, with a wrapper if Some
+    pub fn process(&self) -> ProcessBuilder {
+        match self.wrapper {
+            Some(ref wrapper) if !wrapper.as_os_str().is_empty() => {
+                let mut cmd = util::process(wrapper);
+                cmd.arg(&self.path);
+                cmd
+            }
+            _ => self.process_no_wrapper()
+        }
+    }
+
+    pub fn process_no_wrapper(&self) -> ProcessBuilder {
+        util::process(&self.path)
+    }
+
+    pub fn cached_output(&self, cmd: &ProcessBuilder) -> CargoResult<(String, String)> {
+        self.cache.lock().unwrap().cached_output(cmd)
+    }
+
+    pub fn cached_success(&self, cmd: &ProcessBuilder) -> CargoResult<bool> {
+        self.cache.lock().unwrap().cached_success(cmd)
+    }
+}
+
+/// It is a well known that `rustc` is not the fastest compiler in the world.
+/// What is less known is that even `rustc --version --verbose` takes about a
+/// hundred milliseconds! Because we need compiler version info even for no-op
+/// builds, we cache it here, based on compiler's mtime and rustup's current
+/// toolchain.
+///
+/// https://github.com/rust-lang/cargo/issues/5315
+/// https://github.com/rust-lang/rust/issues/49761
+#[derive(Debug)]
+struct Cache {
+    cache_location: Option<PathBuf>,
+    dirty: bool,
+    data: CacheData,
+}
+
+#[derive(Serialize, Deserialize, Debug, Default)]
+struct CacheData {
+    rustc_fingerprint: u64,
+    outputs: HashMap<u64, (String, String)>,
+    successes: HashMap<u64, bool>,
+}
+
+impl Cache {
+    fn load(rustc: &Path, rustup_rustc: &Path, cache_location: Option<PathBuf>) -> Cache {
+        match (cache_location, rustc_fingerprint(rustc, rustup_rustc)) {
+            (Some(cache_location), Ok(rustc_fingerprint)) => {
+                let empty = CacheData {
+                    rustc_fingerprint,
+                    outputs: HashMap::new(),
+                    successes: HashMap::new(),
+                };
+                let mut dirty = true;
+                let data = match read(&cache_location) {
+                    Ok(data) => {
+                        if data.rustc_fingerprint == rustc_fingerprint {
+                            info!("reusing existing rustc info cache");
+                            dirty = false;
+                            data
+                        } else {
+                            info!("different compiler, creating new rustc info cache");
+                            empty
+                        }
+                    }
+                    Err(e) => {
+                        info!("failed to read rustc info cache: {}", e);
+                        empty
+                    }
+                };
+                return Cache {
+                    cache_location: Some(cache_location),
+                    dirty,
+                    data,
+                };
+
+                fn read(path: &Path) -> CargoResult<CacheData> {
+                    let json = paths::read(path)?;
+                    Ok(serde_json::from_str(&json)?)
+                }
+            }
+            (_, fingerprint) => {
+                if let Err(e) = fingerprint {
+                    warn!("failed to calculate rustc fingerprint: {}", e);
+                }
+                info!("rustc info cache disabled");
+                Cache {
+                    cache_location: None,
+                    dirty: false,
+                    data: CacheData::default(),
+                }
+            }
+        }
+    }
+
+    fn cached_output(&mut self, cmd: &ProcessBuilder) -> CargoResult<(String, String)> {
+        let key = process_fingerprint(cmd);
+        match self.data.outputs.entry(key) {
+            Entry::Occupied(entry) => {
+                info!("rustc info cache hit");
+                Ok(entry.get().clone())
+            }
+            Entry::Vacant(entry) => {
+                info!("rustc info cache miss");
+                let output = cmd.exec_with_output()?;
+                let stdout = String::from_utf8(output.stdout)
+                    .map_err(|_| internal("rustc didn't return utf8 output"))?;
+                let stderr = String::from_utf8(output.stderr)
+                    .map_err(|_| internal("rustc didn't return utf8 output"))?;
+                let output = (stdout, stderr);
+                entry.insert(output.clone());
+                self.dirty = true;
+                Ok(output)
+            }
+        }
+    }
+
+    fn cached_success(&mut self, cmd: &ProcessBuilder) -> CargoResult<bool> {
+        let key = process_fingerprint(cmd);
+        match self.data.successes.entry(key) {
+            Entry::Occupied(entry) => {
+                info!("rustc info cache hit");
+                Ok(*entry.get())
+            }
+            Entry::Vacant(entry) => {
+                info!("rustc info cache miss");
+                let success = cmd
+                    .build_command()
+                    .stdout(Stdio::null())
+                    .stderr(Stdio::null())
+                    .status()?
+                    .success();
+                entry.insert(success);
+                self.dirty = true;
+                Ok(success)
+            }
+        }
+    }
+}
+
+impl Drop for Cache {
+    fn drop(&mut self) {
+        if !self.dirty {
+            return;
+        }
+        if let Some(ref path) = self.cache_location {
+            let json = serde_json::to_string(&self.data).unwrap();
+            match paths::write(path, json.as_bytes()) {
+                Ok(()) => info!("updated rustc info cache"),
+                Err(e) => warn!("failed to update rustc info cache: {}", e),
+            }
+        }
+    }
+}
+
+fn rustc_fingerprint(path: &Path, rustup_rustc: &Path) -> CargoResult<u64> {
+    let mut hasher = SipHasher::new_with_keys(0, 0);
+
+    let path = paths::resolve_executable(path)?;
+    path.hash(&mut hasher);
+
+    paths::mtime(&path)?.hash(&mut hasher);
+
+    // Rustup can change the effective compiler without touching
+    // the `rustc` binary, so we try to account for this here.
+    // If we see rustup's env vars, we mix them into the fingerprint,
+    // but we also mix in the mtime of the actual compiler (and not
+    // the rustup shim at `~/.cargo/bin/rustup`), because `RUSTUP_TOOLCHAIN`
+    // could be just `stable-x86_64-unknown-linux-gnu`, i.e, it could
+    // not mention the version of Rust at all, which changes after
+    // `rustup update`.
+    //
+    // If we don't see rustup env vars, but it looks like the compiler
+    // is managed by rustup, we conservatively bail out.
+    let maybe_rustup = rustup_rustc == path;
+    match (
+        maybe_rustup,
+        env::var("RUSTUP_HOME"),
+        env::var("RUSTUP_TOOLCHAIN"),
+    ) {
+        (_, Ok(rustup_home), Ok(rustup_toolchain)) => {
+            debug!("adding rustup info to rustc fingerprint");
+            rustup_toolchain.hash(&mut hasher);
+            rustup_home.hash(&mut hasher);
+            let real_rustc = Path::new(&rustup_home)
+                .join("toolchains")
+                .join(rustup_toolchain)
+                .join("bin")
+                .join("rustc")
+                .with_extension(env::consts::EXE_EXTENSION);
+            paths::mtime(&real_rustc)?.hash(&mut hasher);
+        }
+        (true, _, _) => bail!("probably rustup rustc, but without rustup's env vars"),
+        _ => (),
+    }
+
+    Ok(hasher.finish())
+}
+
+fn process_fingerprint(cmd: &ProcessBuilder) -> u64 {
+    let mut hasher = SipHasher::new_with_keys(0, 0);
+    cmd.get_args().hash(&mut hasher);
+    let mut env = cmd.get_envs().iter().collect::<Vec<_>>();
+    env.sort_unstable();
+    env.hash(&mut hasher);
+    hasher.finish()
+}
diff --git a/src/cargo/util/sha256.rs b/src/cargo/util/sha256.rs
new file mode 100644 (file)
index 0000000..575d22c
--- /dev/null
@@ -0,0 +1,29 @@
+extern crate crypto_hash;
+use self::crypto_hash::{Algorithm, Hasher};
+use std::io::Write;
+
+pub struct Sha256(Hasher);
+
+impl Sha256 {
+    pub fn new() -> Sha256 {
+        let hasher = Hasher::new(Algorithm::SHA256);
+        Sha256(hasher)
+    }
+
+    pub fn update(&mut self, bytes: &[u8]) {
+        let _ = self.0.write_all(bytes);
+    }
+
+    pub fn finish(&mut self) -> [u8; 32] {
+        let mut ret = [0u8; 32];
+        let data = self.0.finish();
+        ret.copy_from_slice(&data[..]);
+        ret
+    }
+}
+
+impl Default for Sha256 {
+    fn default() -> Self {
+        Self::new()
+    }
+}
diff --git a/src/cargo/util/to_semver.rs b/src/cargo/util/to_semver.rs
new file mode 100644 (file)
index 0000000..4ffd6e3
--- /dev/null
@@ -0,0 +1,33 @@
+use semver::Version;
+use util::errors::CargoResult;
+
+pub trait ToSemver {
+    fn to_semver(self) -> CargoResult<Version>;
+}
+
+impl ToSemver for Version {
+    fn to_semver(self) -> CargoResult<Version> {
+        Ok(self)
+    }
+}
+
+impl<'a> ToSemver for &'a str {
+    fn to_semver(self) -> CargoResult<Version> {
+        match Version::parse(self) {
+            Ok(v) => Ok(v),
+            Err(..) => Err(format_err!("cannot parse '{}' as a semver", self)),
+        }
+    }
+}
+
+impl<'a> ToSemver for &'a String {
+    fn to_semver(self) -> CargoResult<Version> {
+        (**self).to_semver()
+    }
+}
+
+impl<'a> ToSemver for &'a Version {
+    fn to_semver(self) -> CargoResult<Version> {
+        Ok(self.clone())
+    }
+}
diff --git a/src/cargo/util/to_url.rs b/src/cargo/util/to_url.rs
new file mode 100644 (file)
index 0000000..664c256
--- /dev/null
@@ -0,0 +1,23 @@
+use std::path::Path;
+
+use url::Url;
+
+use util::CargoResult;
+
+/// A type that can be converted to a Url
+pub trait ToUrl {
+    /// Performs the conversion
+    fn to_url(self) -> CargoResult<Url>;
+}
+
+impl<'a> ToUrl for &'a str {
+    fn to_url(self) -> CargoResult<Url> {
+        Url::parse(self).map_err(|s| format_err!("invalid url `{}`: {}", self, s))
+    }
+}
+
+impl<'a> ToUrl for &'a Path {
+    fn to_url(self) -> CargoResult<Url> {
+        Url::from_file_path(self).map_err(|()| format_err!("invalid path url `{}`", self.display()))
+    }
+}
diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs
new file mode 100644 (file)
index 0000000..367ef8b
--- /dev/null
@@ -0,0 +1,1491 @@
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::fmt;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::rc::Rc;
+use std::str;
+
+use semver::{self, VersionReq};
+use serde::de::{self, Deserialize};
+use serde::ser;
+use serde_ignored;
+use toml;
+use url::Url;
+
+use core::dependency::{Kind, Platform};
+use core::manifest::{LibKind, ManifestMetadata, TargetSourcePath, Warnings};
+use core::profiles::Profiles;
+use core::{Dependency, Manifest, PackageId, Summary, Target};
+use core::{Edition, EitherManifest, Feature, Features, VirtualManifest};
+use core::{GitReference, PackageIdSpec, SourceId, WorkspaceConfig, WorkspaceRootConfig};
+use sources::{CRATES_IO_INDEX, CRATES_IO_REGISTRY};
+use util::errors::{CargoError, CargoResult, CargoResultExt, ManifestError};
+use util::paths;
+use util::{self, Config, ToUrl};
+
+mod targets;
+use self::targets::targets;
+
+pub fn read_manifest(
+    path: &Path,
+    source_id: &SourceId,
+    config: &Config,
+) -> Result<(EitherManifest, Vec<PathBuf>), ManifestError> {
+    trace!(
+        "read_manifest; path={}; source-id={}",
+        path.display(),
+        source_id
+    );
+    let contents = paths::read(path).map_err(|err| ManifestError::new(err, path.into()))?;
+
+    do_read_manifest(&contents, path, source_id, config)
+        .chain_err(|| format!("failed to parse manifest at `{}`", path.display()))
+        .map_err(|err| ManifestError::new(err, path.into()))
+}
+
+fn do_read_manifest(
+    contents: &str,
+    manifest_file: &Path,
+    source_id: &SourceId,
+    config: &Config,
+) -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
+    let package_root = manifest_file.parent().unwrap();
+
+    let toml = {
+        let pretty_filename =
+            util::without_prefix(manifest_file, config.cwd()).unwrap_or(manifest_file);
+        parse(contents, pretty_filename, config)?
+    };
+
+    let mut unused = BTreeSet::new();
+    let manifest: TomlManifest = serde_ignored::deserialize(toml, |path| {
+        let mut key = String::new();
+        stringify(&mut key, &path);
+        unused.insert(key);
+    })?;
+    let add_unused = |warnings: &mut Warnings| {
+        for key in unused {
+            warnings.add_warning(format!("unused manifest key: {}", key));
+            if key == "profile.debug" || key == "profiles.debug" {
+                warnings.add_warning("use `[profile.dev]` to configure debug builds".to_string());
+            }
+        }
+    };
+
+    let manifest = Rc::new(manifest);
+    return if manifest.project.is_some() || manifest.package.is_some() {
+        let (mut manifest, paths) =
+            TomlManifest::to_real_manifest(&manifest, source_id, package_root, config)?;
+        add_unused(manifest.warnings_mut());
+        if !manifest.targets().iter().any(|t| !t.is_custom_build()) {
+            bail!(
+                "no targets specified in the manifest\n  \
+                 either src/lib.rs, src/main.rs, a [lib] section, or \
+                 [[bin]] section must be present"
+            )
+        }
+        Ok((EitherManifest::Real(manifest), paths))
+    } else {
+        let (mut m, paths) =
+            TomlManifest::to_virtual_manifest(&manifest, source_id, package_root, config)?;
+        add_unused(m.warnings_mut());
+        Ok((EitherManifest::Virtual(m), paths))
+    };
+
+    fn stringify(dst: &mut String, path: &serde_ignored::Path) {
+        use serde_ignored::Path;
+
+        match *path {
+            Path::Root => {}
+            Path::Seq { parent, index } => {
+                stringify(dst, parent);
+                if !dst.is_empty() {
+                    dst.push('.');
+                }
+                dst.push_str(&index.to_string());
+            }
+            Path::Map { parent, ref key } => {
+                stringify(dst, parent);
+                if !dst.is_empty() {
+                    dst.push('.');
+                }
+                dst.push_str(key);
+            }
+            Path::Some { parent }
+            | Path::NewtypeVariant { parent }
+            | Path::NewtypeStruct { parent } => stringify(dst, parent),
+        }
+    }
+}
+
+pub fn parse(toml: &str, file: &Path, config: &Config) -> CargoResult<toml::Value> {
+    let first_error = match toml.parse() {
+        Ok(ret) => return Ok(ret),
+        Err(e) => e,
+    };
+
+    let mut second_parser = toml::de::Deserializer::new(toml);
+    second_parser.set_require_newline_after_table(false);
+    if let Ok(ret) = toml::Value::deserialize(&mut second_parser) {
+        let msg = format!(
+            "\
+TOML file found which contains invalid syntax and will soon not parse
+at `{}`.
+
+The TOML spec requires newlines after table definitions (e.g. `[a] b = 1` is
+invalid), but this file has a table header which does not have a newline after
+it. A newline needs to be added and this warning will soon become a hard error
+in the future.",
+            file.display()
+        );
+        config.shell().warn(&msg)?;
+        return Ok(ret);
+    }
+
+    let first_error = CargoError::from(first_error);
+    Err(first_error.context("could not parse input as TOML").into())
+}
+
+type TomlLibTarget = TomlTarget;
+type TomlBinTarget = TomlTarget;
+type TomlExampleTarget = TomlTarget;
+type TomlTestTarget = TomlTarget;
+type TomlBenchTarget = TomlTarget;
+
+#[derive(Debug, Serialize)]
+#[serde(untagged)]
+pub enum TomlDependency {
+    Simple(String),
+    Detailed(DetailedTomlDependency),
+}
+
+impl<'de> de::Deserialize<'de> for TomlDependency {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        struct TomlDependencyVisitor;
+
+        impl<'de> de::Visitor<'de> for TomlDependencyVisitor {
+            type Value = TomlDependency;
+
+            fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                formatter.write_str(
+                    "a version string like \"0.9.8\" or a \
+                     detailed dependency like { version = \"0.9.8\" }",
+                )
+            }
+
+            fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+            where
+                E: de::Error,
+            {
+                Ok(TomlDependency::Simple(s.to_owned()))
+            }
+
+            fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
+            where
+                V: de::MapAccess<'de>,
+            {
+                let mvd = de::value::MapAccessDeserializer::new(map);
+                DetailedTomlDependency::deserialize(mvd).map(TomlDependency::Detailed)
+            }
+        }
+
+        deserializer.deserialize_any(TomlDependencyVisitor)
+    }
+}
+
+#[derive(Deserialize, Serialize, Clone, Debug, Default)]
+#[serde(rename_all = "kebab-case")]
+pub struct DetailedTomlDependency {
+    version: Option<String>,
+    registry: Option<String>,
+    registry_index: Option<String>,
+    path: Option<String>,
+    git: Option<String>,
+    branch: Option<String>,
+    tag: Option<String>,
+    rev: Option<String>,
+    features: Option<Vec<String>>,
+    optional: Option<bool>,
+    default_features: Option<bool>,
+    #[serde(rename = "default_features")]
+    default_features2: Option<bool>,
+    package: Option<String>,
+}
+
+#[derive(Debug, Deserialize, Serialize)]
+#[serde(rename_all = "kebab-case")]
+pub struct TomlManifest {
+    cargo_features: Option<Vec<String>>,
+    package: Option<Box<TomlProject>>,
+    project: Option<Box<TomlProject>>,
+    profile: Option<TomlProfiles>,
+    lib: Option<TomlLibTarget>,
+    bin: Option<Vec<TomlBinTarget>>,
+    example: Option<Vec<TomlExampleTarget>>,
+    test: Option<Vec<TomlTestTarget>>,
+    bench: Option<Vec<TomlTestTarget>>,
+    dependencies: Option<BTreeMap<String, TomlDependency>>,
+    dev_dependencies: Option<BTreeMap<String, TomlDependency>>,
+    #[serde(rename = "dev_dependencies")]
+    dev_dependencies2: Option<BTreeMap<String, TomlDependency>>,
+    build_dependencies: Option<BTreeMap<String, TomlDependency>>,
+    #[serde(rename = "build_dependencies")]
+    build_dependencies2: Option<BTreeMap<String, TomlDependency>>,
+    features: Option<BTreeMap<String, Vec<String>>>,
+    target: Option<BTreeMap<String, TomlPlatform>>,
+    replace: Option<BTreeMap<String, TomlDependency>>,
+    patch: Option<BTreeMap<String, BTreeMap<String, TomlDependency>>>,
+    workspace: Option<TomlWorkspace>,
+    badges: Option<BTreeMap<String, BTreeMap<String, String>>>,
+}
+
+#[derive(Deserialize, Serialize, Clone, Debug, Default)]
+pub struct TomlProfiles {
+    pub test: Option<TomlProfile>,
+    pub doc: Option<TomlProfile>,
+    pub bench: Option<TomlProfile>,
+    pub dev: Option<TomlProfile>,
+    pub release: Option<TomlProfile>,
+}
+
+impl TomlProfiles {
+    pub fn validate(&self, features: &Features, warnings: &mut Vec<String>) -> CargoResult<()> {
+        if let Some(ref test) = self.test {
+            test.validate("test", features, warnings)?;
+        }
+        if let Some(ref doc) = self.doc {
+            doc.validate("doc", features, warnings)?;
+        }
+        if let Some(ref bench) = self.bench {
+            bench.validate("bench", features, warnings)?;
+        }
+        if let Some(ref dev) = self.dev {
+            dev.validate("dev", features, warnings)?;
+        }
+        if let Some(ref release) = self.release {
+            release.validate("release", features, warnings)?;
+        }
+        Ok(())
+    }
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct TomlOptLevel(pub String);
+
+impl<'de> de::Deserialize<'de> for TomlOptLevel {
+    fn deserialize<D>(d: D) -> Result<TomlOptLevel, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        struct Visitor;
+
+        impl<'de> de::Visitor<'de> for Visitor {
+            type Value = TomlOptLevel;
+
+            fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                formatter.write_str("an optimization level")
+            }
+
+            fn visit_i64<E>(self, value: i64) -> Result<TomlOptLevel, E>
+            where
+                E: de::Error,
+            {
+                Ok(TomlOptLevel(value.to_string()))
+            }
+
+            fn visit_str<E>(self, value: &str) -> Result<TomlOptLevel, E>
+            where
+                E: de::Error,
+            {
+                if value == "s" || value == "z" {
+                    Ok(TomlOptLevel(value.to_string()))
+                } else {
+                    Err(E::custom(format!(
+                        "must be an integer, `z`, or `s`, \
+                         but found: {}",
+                        value
+                    )))
+                }
+            }
+        }
+
+        d.deserialize_any(Visitor)
+    }
+}
+
+impl ser::Serialize for TomlOptLevel {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        match self.0.parse::<u32>() {
+            Ok(n) => n.serialize(serializer),
+            Err(_) => self.0.serialize(serializer),
+        }
+    }
+}
+
+#[derive(Clone, Debug, Serialize, Eq, PartialEq)]
+#[serde(untagged)]
+pub enum U32OrBool {
+    U32(u32),
+    Bool(bool),
+}
+
+impl<'de> de::Deserialize<'de> for U32OrBool {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        struct Visitor;
+
+        impl<'de> de::Visitor<'de> for Visitor {
+            type Value = U32OrBool;
+
+            fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                formatter.write_str("a boolean or an integer")
+            }
+
+            fn visit_bool<E>(self, b: bool) -> Result<Self::Value, E>
+            where
+                E: de::Error,
+            {
+                Ok(U32OrBool::Bool(b))
+            }
+
+            fn visit_i64<E>(self, u: i64) -> Result<Self::Value, E>
+            where
+                E: de::Error,
+            {
+                Ok(U32OrBool::U32(u as u32))
+            }
+
+            fn visit_u64<E>(self, u: u64) -> Result<Self::Value, E>
+            where
+                E: de::Error,
+            {
+                Ok(U32OrBool::U32(u as u32))
+            }
+        }
+
+        deserializer.deserialize_any(Visitor)
+    }
+}
+
+#[derive(Deserialize, Serialize, Clone, Debug, Default, Eq, PartialEq)]
+#[serde(rename_all = "kebab-case")]
+pub struct TomlProfile {
+    pub opt_level: Option<TomlOptLevel>,
+    pub lto: Option<StringOrBool>,
+    pub codegen_units: Option<u32>,
+    pub debug: Option<U32OrBool>,
+    pub debug_assertions: Option<bool>,
+    pub rpath: Option<bool>,
+    pub panic: Option<String>,
+    pub overflow_checks: Option<bool>,
+    pub incremental: Option<bool>,
+    pub overrides: Option<BTreeMap<ProfilePackageSpec, TomlProfile>>,
+    pub build_override: Option<Box<TomlProfile>>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)]
+pub enum ProfilePackageSpec {
+    Spec(PackageIdSpec),
+    All,
+}
+
+impl ser::Serialize for ProfilePackageSpec {
+    fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        match *self {
+            ProfilePackageSpec::Spec(ref spec) => spec.serialize(s),
+            ProfilePackageSpec::All => "*".serialize(s),
+        }
+    }
+}
+
+impl<'de> de::Deserialize<'de> for ProfilePackageSpec {
+    fn deserialize<D>(d: D) -> Result<ProfilePackageSpec, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        let string = String::deserialize(d)?;
+        if string == "*" {
+            Ok(ProfilePackageSpec::All)
+        } else {
+            PackageIdSpec::parse(&string)
+                .map_err(de::Error::custom)
+                .map(ProfilePackageSpec::Spec)
+        }
+    }
+}
+
+impl TomlProfile {
+    pub fn validate(
+        &self,
+        name: &str,
+        features: &Features,
+        warnings: &mut Vec<String>,
+    ) -> CargoResult<()> {
+        if let Some(ref profile) = self.build_override {
+            features.require(Feature::profile_overrides())?;
+            profile.validate_override()?;
+        }
+        if let Some(ref override_map) = self.overrides {
+            features.require(Feature::profile_overrides())?;
+            for profile in override_map.values() {
+                profile.validate_override()?;
+            }
+        }
+
+        match name {
+            "dev" | "release" => {}
+            _ => {
+                if self.overrides.is_some() || self.build_override.is_some() {
+                    bail!(
+                        "Profile overrides may only be specified for \
+                         `dev` or `release` profile, not `{}`.",
+                        name
+                    );
+                }
+            }
+        }
+
+        match name {
+            "doc" => {
+                warnings.push("profile `doc` is deprecated and has no effect".to_string());
+            }
+            "test" | "bench" => {
+                if self.panic.is_some() {
+                    warnings.push(format!("`panic` setting is ignored for `{}` profile", name))
+                }
+            }
+            _ => {}
+        }
+        Ok(())
+    }
+
+    fn validate_override(&self) -> CargoResult<()> {
+        if self.overrides.is_some() || self.build_override.is_some() {
+            bail!("Profile overrides cannot be nested.");
+        }
+        if self.panic.is_some() {
+            bail!("`panic` may not be specified in a profile override.")
+        }
+        if self.lto.is_some() {
+            bail!("`lto` may not be specified in a profile override.")
+        }
+        if self.rpath.is_some() {
+            bail!("`rpath` may not be specified in a profile override.")
+        }
+        Ok(())
+    }
+}
+
+#[derive(Clone, Debug, Serialize, Eq, PartialEq)]
+pub struct StringOrVec(Vec<String>);
+
+impl<'de> de::Deserialize<'de> for StringOrVec {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        struct Visitor;
+
+        impl<'de> de::Visitor<'de> for Visitor {
+            type Value = StringOrVec;
+
+            fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                formatter.write_str("string or list of strings")
+            }
+
+            fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+            where
+                E: de::Error,
+            {
+                Ok(StringOrVec(vec![s.to_string()]))
+            }
+
+            fn visit_seq<V>(self, v: V) -> Result<Self::Value, V::Error>
+            where
+                V: de::SeqAccess<'de>,
+            {
+                let seq = de::value::SeqAccessDeserializer::new(v);
+                Vec::deserialize(seq).map(StringOrVec)
+
+            }
+        }
+
+        deserializer.deserialize_any(Visitor)
+    }
+}
+
+#[derive(Clone, Debug, Serialize, Eq, PartialEq)]
+#[serde(untagged)]
+pub enum StringOrBool {
+    String(String),
+    Bool(bool),
+}
+
+impl<'de> de::Deserialize<'de> for StringOrBool {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        struct Visitor;
+
+        impl<'de> de::Visitor<'de> for Visitor {
+            type Value = StringOrBool;
+
+            fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                formatter.write_str("a boolean or a string")
+            }
+
+            fn visit_bool<E>(self, b: bool) -> Result<Self::Value, E>
+            where
+                E: de::Error,
+            {
+                Ok(StringOrBool::Bool(b))
+            }
+
+            fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+            where
+                E: de::Error,
+            {
+                Ok(StringOrBool::String(s.to_string()))
+            }
+        }
+
+        deserializer.deserialize_any(Visitor)
+    }
+}
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(untagged)]
+pub enum VecStringOrBool {
+    VecString(Vec<String>),
+    Bool(bool),
+}
+
+impl<'de> de::Deserialize<'de> for VecStringOrBool {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        struct Visitor;
+
+        impl<'de> de::Visitor<'de> for Visitor {
+            type Value = VecStringOrBool;
+
+            fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                formatter.write_str("a boolean or vector of strings")
+            }
+
+            fn visit_seq<V>(self, v: V) -> Result<Self::Value, V::Error>
+            where
+                V: de::SeqAccess<'de>,
+            {
+                let seq = de::value::SeqAccessDeserializer::new(v);
+                Vec::deserialize(seq).map(VecStringOrBool::VecString)
+            }
+
+            fn visit_bool<E>(self, b: bool) -> Result<Self::Value, E>
+            where
+                E: de::Error,
+            {
+                Ok(VecStringOrBool::Bool(b))
+            }
+        }
+
+        deserializer.deserialize_any(Visitor)
+    }
+}
+
+/// Represents the `package`/`project` sections of a `Cargo.toml`.
+///
+/// Note that the order of the fields matters, since this is the order they
+/// are serialized to a TOML file.  For example, you cannot have values after
+/// the field `metadata`, since it is a table and values cannot appear after
+/// tables.
+#[derive(Deserialize, Serialize, Clone, Debug)]
+pub struct TomlProject {
+    edition: Option<String>,
+    name: String,
+    version: semver::Version,
+    authors: Option<Vec<String>>,
+    build: Option<StringOrBool>,
+    metabuild: Option<StringOrVec>,
+    links: Option<String>,
+    exclude: Option<Vec<String>>,
+    include: Option<Vec<String>>,
+    publish: Option<VecStringOrBool>,
+    #[serde(rename = "publish-lockfile")]
+    publish_lockfile: Option<bool>,
+    workspace: Option<String>,
+    #[serde(rename = "im-a-teapot")]
+    im_a_teapot: Option<bool>,
+    autobins: Option<bool>,
+    autoexamples: Option<bool>,
+    autotests: Option<bool>,
+    autobenches: Option<bool>,
+    #[serde(rename = "namespaced-features")]
+    namespaced_features: Option<bool>,
+    #[serde(rename = "default-run")]
+    default_run: Option<String>,
+
+    // package metadata
+    description: Option<String>,
+    homepage: Option<String>,
+    documentation: Option<String>,
+    readme: Option<String>,
+    keywords: Option<Vec<String>>,
+    categories: Option<Vec<String>>,
+    license: Option<String>,
+    #[serde(rename = "license-file")]
+    license_file: Option<String>,
+    repository: Option<String>,
+    metadata: Option<toml::Value>,
+}
+
+#[derive(Debug, Deserialize, Serialize)]
+pub struct TomlWorkspace {
+    members: Option<Vec<String>>,
+    #[serde(rename = "default-members")]
+    default_members: Option<Vec<String>>,
+    exclude: Option<Vec<String>>,
+}
+
+impl TomlProject {
+    pub fn to_package_id(&self, source_id: &SourceId) -> CargoResult<PackageId> {
+        PackageId::new(&self.name, self.version.clone(), source_id)
+    }
+}
+
+struct Context<'a, 'b> {
+    pkgid: Option<&'a PackageId>,
+    deps: &'a mut Vec<Dependency>,
+    source_id: &'a SourceId,
+    nested_paths: &'a mut Vec<PathBuf>,
+    config: &'b Config,
+    warnings: &'a mut Vec<String>,
+    platform: Option<Platform>,
+    root: &'a Path,
+    features: &'a Features,
+}
+
+impl TomlManifest {
+    pub fn prepare_for_publish(&self, config: &Config) -> CargoResult<TomlManifest> {
+        let mut package = self
+            .package
+            .as_ref()
+            .or_else(|| self.project.as_ref())
+            .unwrap()
+            .clone();
+        package.workspace = None;
+        return Ok(TomlManifest {
+            package: Some(package),
+            project: None,
+            profile: self.profile.clone(),
+            lib: self.lib.clone(),
+            bin: self.bin.clone(),
+            example: self.example.clone(),
+            test: self.test.clone(),
+            bench: self.bench.clone(),
+            dependencies: map_deps(config, self.dependencies.as_ref())?,
+            dev_dependencies: map_deps(
+                config,
+                self.dev_dependencies
+                    .as_ref()
+                    .or_else(|| self.dev_dependencies2.as_ref()),
+            )?,
+            dev_dependencies2: None,
+            build_dependencies: map_deps(
+                config,
+                self.build_dependencies
+                    .as_ref()
+                    .or_else(|| self.build_dependencies2.as_ref()),
+            )?,
+            build_dependencies2: None,
+            features: self.features.clone(),
+            target: match self.target.as_ref().map(|target_map| {
+                target_map
+                    .iter()
+                    .map(|(k, v)| {
+                        Ok((
+                            k.clone(),
+                            TomlPlatform {
+                                dependencies: map_deps(config, v.dependencies.as_ref())?,
+                                dev_dependencies: map_deps(
+                                    config,
+                                    v.dev_dependencies
+                                        .as_ref()
+                                        .or_else(|| v.dev_dependencies2.as_ref()),
+                                )?,
+                                dev_dependencies2: None,
+                                build_dependencies: map_deps(
+                                    config,
+                                    v.build_dependencies
+                                        .as_ref()
+                                        .or_else(|| v.build_dependencies2.as_ref()),
+                                )?,
+                                build_dependencies2: None,
+                            },
+                        ))
+                    })
+                    .collect()
+            }) {
+                Some(Ok(v)) => Some(v),
+                Some(Err(e)) => return Err(e),
+                None => None,
+            },
+            replace: None,
+            patch: None,
+            workspace: None,
+            badges: self.badges.clone(),
+            cargo_features: self.cargo_features.clone(),
+        });
+
+        fn map_deps(
+            config: &Config,
+            deps: Option<&BTreeMap<String, TomlDependency>>,
+        ) -> CargoResult<Option<BTreeMap<String, TomlDependency>>> {
+            let deps = match deps {
+                Some(deps) => deps,
+                None => return Ok(None),
+            };
+            let deps = deps
+                .iter()
+                .map(|(k, v)| Ok((k.clone(), map_dependency(config, v)?)))
+                .collect::<CargoResult<BTreeMap<_, _>>>()?;
+            Ok(Some(deps))
+        }
+
+        fn map_dependency(config: &Config, dep: &TomlDependency) -> CargoResult<TomlDependency> {
+            match *dep {
+                TomlDependency::Detailed(ref d) => {
+                    let mut d = d.clone();
+                    d.path.take(); // path dependencies become crates.io deps
+                                   // registry specifications are elaborated to the index URL
+                    if let Some(registry) = d.registry.take() {
+                        let src = SourceId::alt_registry(config, &registry)?;
+                        d.registry_index = Some(src.url().to_string());
+                    }
+                    Ok(TomlDependency::Detailed(d))
+                }
+                TomlDependency::Simple(ref s) => {
+                    Ok(TomlDependency::Detailed(DetailedTomlDependency {
+                        version: Some(s.clone()),
+                        ..Default::default()
+                    }))
+                }
+            }
+        }
+    }
+
+    fn to_real_manifest(
+        me: &Rc<TomlManifest>,
+        source_id: &SourceId,
+        package_root: &Path,
+        config: &Config,
+    ) -> CargoResult<(Manifest, Vec<PathBuf>)> {
+        let mut nested_paths = vec![];
+        let mut warnings = vec![];
+        let mut errors = vec![];
+
+        // Parse features first so they will be available when parsing other parts of the toml
+        let empty = Vec::new();
+        let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty);
+        let features = Features::new(&cargo_features, &mut warnings)?;
+
+        let project = me.project.as_ref().or_else(|| me.package.as_ref());
+        let project = project.ok_or_else(|| format_err!("no `package` section found"))?;
+
+        let package_name = project.name.trim();
+        if package_name.is_empty() {
+            bail!("package name cannot be an empty string")
+        }
+
+        for c in package_name.chars() {
+            if c.is_alphanumeric() {
+                continue;
+            }
+            if c == '_' || c == '-' {
+                continue;
+            }
+            bail!("Invalid character `{}` in package name: `{}`", c, package_name)
+        }
+
+        let pkgid = project.to_package_id(source_id)?;
+
+        let edition = if let Some(ref edition) = project.edition {
+            features
+                .require(Feature::edition())
+                .chain_err(|| "editions are unstable")?;
+            edition
+                .parse()
+                .chain_err(|| "failed to parse the `edition` key")?
+        } else {
+            Edition::Edition2015
+        };
+
+        if project.metabuild.is_some() {
+            features.require(Feature::metabuild())?;
+        }
+
+        // If we have no lib at all, use the inferred lib if available
+        // If we have a lib with a path, we're done
+        // If we have a lib with no path, use the inferred lib or_else package name
+        let targets = targets(
+            &features,
+            me,
+            package_name,
+            package_root,
+            edition,
+            &project.build,
+            &project.metabuild,
+            &mut warnings,
+            &mut errors,
+        )?;
+
+        if targets.is_empty() {
+            debug!("manifest has no build targets");
+        }
+
+        if let Err(e) = unique_build_targets(&targets, package_root) {
+            warnings.push(format!(
+                "file found to be present in multiple \
+                 build targets: {}",
+                e
+            ));
+        }
+
+        let mut deps = Vec::new();
+        let replace;
+        let patch;
+
+        {
+            let mut cx = Context {
+                pkgid: Some(&pkgid),
+                deps: &mut deps,
+                source_id,
+                nested_paths: &mut nested_paths,
+                config,
+                warnings: &mut warnings,
+                features: &features,
+                platform: None,
+                root: package_root,
+            };
+
+            fn process_dependencies(
+                cx: &mut Context,
+                new_deps: Option<&BTreeMap<String, TomlDependency>>,
+                kind: Option<Kind>,
+            ) -> CargoResult<()> {
+                let dependencies = match new_deps {
+                    Some(dependencies) => dependencies,
+                    None => return Ok(()),
+                };
+                for (n, v) in dependencies.iter() {
+                    let dep = v.to_dependency(n, cx, kind)?;
+                    cx.deps.push(dep);
+                }
+
+                Ok(())
+            }
+
+            // Collect the deps
+            process_dependencies(&mut cx, me.dependencies.as_ref(), None)?;
+            let dev_deps = me
+                .dev_dependencies
+                .as_ref()
+                .or_else(|| me.dev_dependencies2.as_ref());
+            process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?;
+            let build_deps = me
+                .build_dependencies
+                .as_ref()
+                .or_else(|| me.build_dependencies2.as_ref());
+            process_dependencies(&mut cx, build_deps, Some(Kind::Build))?;
+
+            for (name, platform) in me.target.iter().flat_map(|t| t) {
+                cx.platform = Some(name.parse()?);
+                process_dependencies(&mut cx, platform.dependencies.as_ref(), None)?;
+                let build_deps = platform
+                    .build_dependencies
+                    .as_ref()
+                    .or_else(|| platform.build_dependencies2.as_ref());
+                process_dependencies(&mut cx, build_deps, Some(Kind::Build))?;
+                let dev_deps = platform
+                    .dev_dependencies
+                    .as_ref()
+                    .or_else(|| platform.dev_dependencies2.as_ref());
+                process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?;
+            }
+
+            replace = me.replace(&mut cx)?;
+            patch = me.patch(&mut cx)?;
+        }
+
+        {
+            let mut names_sources = BTreeMap::new();
+            for dep in &deps {
+                let name = dep.name_in_toml();
+                let prev = names_sources.insert(name.to_string(), dep.source_id());
+                if prev.is_some() && prev != Some(dep.source_id()) {
+                    bail!(
+                        "Dependency '{}' has different source paths depending on the build \
+                         target. Each dependency must have a single canonical source path \
+                         irrespective of build target.",
+                        name
+                    );
+                }
+            }
+        }
+
+        let exclude = project.exclude.clone().unwrap_or_default();
+        let include = project.include.clone().unwrap_or_default();
+        if project.namespaced_features.is_some() {
+            features.require(Feature::namespaced_features())?;
+        }
+
+        let summary = Summary::new(
+            pkgid,
+            deps,
+            &me.features
+                .as_ref()
+                .map(|x| {
+                    x.iter()
+                        .map(|(k, v)| (k.as_str(), v.iter().collect()))
+                        .collect()
+                })
+                .unwrap_or_else(BTreeMap::new),
+            project.links.as_ref().map(|x| x.as_str()),
+            project.namespaced_features.unwrap_or(false),
+        )?;
+        let metadata = ManifestMetadata {
+            description: project.description.clone(),
+            homepage: project.homepage.clone(),
+            documentation: project.documentation.clone(),
+            readme: project.readme.clone(),
+            authors: project.authors.clone().unwrap_or_default(),
+            license: project.license.clone(),
+            license_file: project.license_file.clone(),
+            repository: project.repository.clone(),
+            keywords: project.keywords.clone().unwrap_or_default(),
+            categories: project.categories.clone().unwrap_or_default(),
+            badges: me.badges.clone().unwrap_or_default(),
+            links: project.links.clone(),
+        };
+
+        let workspace_config = match (me.workspace.as_ref(), project.workspace.as_ref()) {
+            (Some(config), None) => WorkspaceConfig::Root(WorkspaceRootConfig::new(
+                &package_root,
+                &config.members,
+                &config.default_members,
+                &config.exclude,
+            )),
+            (None, root) => WorkspaceConfig::Member {
+                root: root.cloned(),
+            },
+            (Some(..), Some(..)) => bail!(
+                "cannot configure both `package.workspace` and \
+                 `[workspace]`, only one can be specified"
+            ),
+        };
+        let profiles = Profiles::new(me.profile.as_ref(), config, &features, &mut warnings)?;
+        let publish = match project.publish {
+            Some(VecStringOrBool::VecString(ref vecstring)) => {
+                features
+                    .require(Feature::alternative_registries())
+                    .chain_err(|| {
+                        "the `publish` manifest key is unstable for anything other than a value of true or false"
+                    })?;
+                Some(vecstring.clone())
+            }
+            Some(VecStringOrBool::Bool(false)) => Some(vec![]),
+            None | Some(VecStringOrBool::Bool(true)) => None,
+        };
+
+        let publish_lockfile = match project.publish_lockfile {
+            Some(b) => {
+                features.require(Feature::publish_lockfile())?;
+                b
+            }
+            None => false,
+        };
+
+        let custom_metadata = project.metadata.clone();
+        let mut manifest = Manifest::new(
+            summary,
+            targets,
+            exclude,
+            include,
+            project.links.clone(),
+            metadata,
+            custom_metadata,
+            profiles,
+            publish,
+            publish_lockfile,
+            replace,
+            patch,
+            workspace_config,
+            features,
+            edition,
+            project.im_a_teapot,
+            project.default_run.clone(),
+            Rc::clone(me),
+            project.metabuild.clone().map(|sov| sov.0),
+        );
+        if project.license_file.is_some() && project.license.is_some() {
+            manifest.warnings_mut().add_warning(
+                "only one of `license` or \
+                 `license-file` is necessary"
+                    .to_string(),
+            );
+        }
+        for warning in warnings {
+            manifest.warnings_mut().add_warning(warning);
+        }
+        for error in errors {
+            manifest.warnings_mut().add_critical_warning(error);
+        }
+
+        manifest.feature_gate()?;
+
+        Ok((manifest, nested_paths))
+    }
+
+    fn to_virtual_manifest(
+        me: &Rc<TomlManifest>,
+        source_id: &SourceId,
+        root: &Path,
+        config: &Config,
+    ) -> CargoResult<(VirtualManifest, Vec<PathBuf>)> {
+        if me.project.is_some() {
+            bail!("virtual manifests do not define [project]");
+        }
+        if me.package.is_some() {
+            bail!("virtual manifests do not define [package]");
+        }
+        if me.lib.is_some() {
+            bail!("virtual manifests do not specify [lib]");
+        }
+        if me.bin.is_some() {
+            bail!("virtual manifests do not specify [[bin]]");
+        }
+        if me.example.is_some() {
+            bail!("virtual manifests do not specify [[example]]");
+        }
+        if me.test.is_some() {
+            bail!("virtual manifests do not specify [[test]]");
+        }
+        if me.bench.is_some() {
+            bail!("virtual manifests do not specify [[bench]]");
+        }
+
+        let mut nested_paths = Vec::new();
+        let mut warnings = Vec::new();
+        let mut deps = Vec::new();
+        let empty = Vec::new();
+        let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty);
+        let features = Features::new(&cargo_features, &mut warnings)?;
+
+        let (replace, patch) = {
+            let mut cx = Context {
+                pkgid: None,
+                deps: &mut deps,
+                source_id,
+                nested_paths: &mut nested_paths,
+                config,
+                warnings: &mut warnings,
+                platform: None,
+                features: &features,
+                root,
+            };
+            (me.replace(&mut cx)?, me.patch(&mut cx)?)
+        };
+        let profiles = Profiles::new(me.profile.as_ref(), config, &features, &mut warnings)?;
+        let workspace_config = match me.workspace {
+            Some(ref config) => WorkspaceConfig::Root(WorkspaceRootConfig::new(
+                &root,
+                &config.members,
+                &config.default_members,
+                &config.exclude,
+            )),
+            None => {
+                bail!("virtual manifests must be configured with [workspace]");
+            }
+        };
+        Ok((
+            VirtualManifest::new(replace, patch, workspace_config, profiles),
+            nested_paths,
+        ))
+    }
+
+    fn replace(&self, cx: &mut Context) -> CargoResult<Vec<(PackageIdSpec, Dependency)>> {
+        if self.patch.is_some() && self.replace.is_some() {
+            bail!("cannot specify both [replace] and [patch]");
+        }
+        let mut replace = Vec::new();
+        for (spec, replacement) in self.replace.iter().flat_map(|x| x) {
+            let mut spec = PackageIdSpec::parse(spec).chain_err(|| {
+                format!(
+                    "replacements must specify a valid semver \
+                     version to replace, but `{}` does not",
+                    spec
+                )
+            })?;
+            if spec.url().is_none() {
+                spec.set_url(CRATES_IO_INDEX.parse().unwrap());
+            }
+
+            let version_specified = match *replacement {
+                TomlDependency::Detailed(ref d) => d.version.is_some(),
+                TomlDependency::Simple(..) => true,
+            };
+            if version_specified {
+                bail!(
+                    "replacements cannot specify a version \
+                     requirement, but found one for `{}`",
+                    spec
+                );
+            }
+
+            let mut dep = replacement.to_dependency(spec.name(), cx, None)?;
+            {
+                let version = spec.version().ok_or_else(|| {
+                    format_err!(
+                        "replacements must specify a version \
+                         to replace, but `{}` does not",
+                        spec
+                    )
+                })?;
+                dep.set_version_req(VersionReq::exact(version));
+            }
+            replace.push((spec, dep));
+        }
+        Ok(replace)
+    }
+
+    fn patch(&self, cx: &mut Context) -> CargoResult<HashMap<Url, Vec<Dependency>>> {
+        let mut patch = HashMap::new();
+        for (url, deps) in self.patch.iter().flat_map(|x| x) {
+            let url = match &url[..] {
+                CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(),
+                _ => url.to_url()?,
+            };
+            patch.insert(
+                url,
+                deps.iter()
+                    .map(|(name, dep)| dep.to_dependency(name, cx, None))
+                    .collect::<CargoResult<Vec<_>>>()?,
+            );
+        }
+        Ok(patch)
+    }
+
+    fn maybe_custom_build(
+        &self,
+        build: &Option<StringOrBool>,
+        package_root: &Path,
+    ) -> Option<PathBuf> {
+        let build_rs = package_root.join("build.rs");
+        match *build {
+            Some(StringOrBool::Bool(false)) => None, // explicitly no build script
+            Some(StringOrBool::Bool(true)) => Some(build_rs),
+            Some(StringOrBool::String(ref s)) => Some(PathBuf::from(s)),
+            None => {
+                match fs::metadata(&build_rs) {
+                    // If there is a build.rs file next to the Cargo.toml, assume it is
+                    // a build script
+                    Ok(ref e) if e.is_file() => Some(build_rs),
+                    Ok(_) | Err(_) => None,
+                }
+            }
+        }
+    }
+
+    pub fn has_profiles(&self) -> bool {
+        self.profile.is_some()
+    }
+}
+
+/// Will check a list of build targets, and make sure the target names are unique within a vector.
+/// If not, the name of the offending build target is returned.
+fn unique_build_targets(targets: &[Target], package_root: &Path) -> Result<(), String> {
+    let mut seen = HashSet::new();
+    for target in targets {
+        if let TargetSourcePath::Path(path) = target.src_path() {
+            let full = package_root.join(path);
+            if !seen.insert(full.clone()) {
+                return Err(full.display().to_string());
+            }
+        }
+    }
+    Ok(())
+}
+
+impl TomlDependency {
+    fn to_dependency(
+        &self,
+        name: &str,
+        cx: &mut Context,
+        kind: Option<Kind>,
+    ) -> CargoResult<Dependency> {
+        match *self {
+            TomlDependency::Simple(ref version) => DetailedTomlDependency {
+                version: Some(version.clone()),
+                ..Default::default()
+            }.to_dependency(name, cx, kind),
+            TomlDependency::Detailed(ref details) => details.to_dependency(name, cx, kind),
+        }
+    }
+}
+
+impl DetailedTomlDependency {
+    fn to_dependency(
+        &self,
+        name_in_toml: &str,
+        cx: &mut Context,
+        kind: Option<Kind>,
+    ) -> CargoResult<Dependency> {
+        if self.version.is_none() && self.path.is_none() && self.git.is_none() {
+            let msg = format!(
+                "dependency ({}) specified without \
+                 providing a local path, Git repository, or \
+                 version to use. This will be considered an \
+                 error in future versions",
+                name_in_toml
+            );
+            cx.warnings.push(msg);
+        }
+
+        if self.git.is_none() {
+            let git_only_keys = [
+                (&self.branch, "branch"),
+                (&self.tag, "tag"),
+                (&self.rev, "rev"),
+            ];
+
+            for &(key, key_name) in &git_only_keys {
+                if key.is_some() {
+                    let msg = format!(
+                        "key `{}` is ignored for dependency ({}). \
+                         This will be considered an error in future versions",
+                        key_name, name_in_toml
+                    );
+                    cx.warnings.push(msg)
+                }
+            }
+        }
+
+        let registry_id = match self.registry {
+            Some(ref registry) => {
+                cx.features.require(Feature::alternative_registries())?;
+                SourceId::alt_registry(cx.config, registry)?
+            }
+            None => SourceId::crates_io(cx.config)?,
+        };
+
+        let new_source_id = match (
+            self.git.as_ref(),
+            self.path.as_ref(),
+            self.registry.as_ref(),
+            self.registry_index.as_ref(),
+        ) {
+            (Some(_), _, Some(_), _) | (Some(_), _, _, Some(_)) => bail!(
+                "dependency ({}) specification is ambiguous. \
+                 Only one of `git` or `registry` is allowed.",
+                name_in_toml
+            ),
+            (_, _, Some(_), Some(_)) => bail!(
+                "dependency ({}) specification is ambiguous. \
+                 Only one of `registry` or `registry-index` is allowed.",
+                name_in_toml
+            ),
+            (Some(git), maybe_path, _, _) => {
+                if maybe_path.is_some() {
+                    let msg = format!(
+                        "dependency ({}) specification is ambiguous. \
+                         Only one of `git` or `path` is allowed. \
+                         This will be considered an error in future versions",
+                        name_in_toml
+                    );
+                    cx.warnings.push(msg)
+                }
+
+                let n_details = [&self.branch, &self.tag, &self.rev]
+                    .iter()
+                    .filter(|d| d.is_some())
+                    .count();
+
+                if n_details > 1 {
+                    let msg = format!(
+                        "dependency ({}) specification is ambiguous. \
+                         Only one of `branch`, `tag` or `rev` is allowed. \
+                         This will be considered an error in future versions",
+                        name_in_toml
+                    );
+                    cx.warnings.push(msg)
+                }
+
+                let reference = self
+                    .branch
+                    .clone()
+                    .map(GitReference::Branch)
+                    .or_else(|| self.tag.clone().map(GitReference::Tag))
+                    .or_else(|| self.rev.clone().map(GitReference::Rev))
+                    .unwrap_or_else(|| GitReference::Branch("master".to_string()));
+                let loc = git.to_url()?;
+                SourceId::for_git(&loc, reference)?
+            }
+            (None, Some(path), _, _) => {
+                cx.nested_paths.push(PathBuf::from(path));
+                // If the source id for the package we're parsing is a path
+                // source, then we normalize the path here to get rid of
+                // components like `..`.
+                //
+                // The purpose of this is to get a canonical id for the package
+                // that we're depending on to ensure that builds of this package
+                // always end up hashing to the same value no matter where it's
+                // built from.
+                if cx.source_id.is_path() {
+                    let path = cx.root.join(path);
+                    let path = util::normalize_path(&path);
+                    SourceId::for_path(&path)?
+                } else {
+                    cx.source_id.clone()
+                }
+            }
+            (None, None, Some(registry), None) => SourceId::alt_registry(cx.config, registry)?,
+            (None, None, None, Some(registry_index)) => {
+                let url = registry_index.to_url()?;
+                SourceId::for_registry(&url)?
+            }
+            (None, None, None, None) => SourceId::crates_io(cx.config)?,
+        };
+
+        let (pkg_name, explicit_name_in_toml) = match self.package {
+            Some(ref s) => (&s[..], Some(name_in_toml)),
+            None => (name_in_toml, None),
+        };
+
+        let version = self.version.as_ref().map(|v| &v[..]);
+        let mut dep = match cx.pkgid {
+            Some(id) => Dependency::parse(pkg_name, version, &new_source_id, id, cx.config)?,
+            None => Dependency::parse_no_deprecated(pkg_name, version, &new_source_id)?,
+        };
+        dep.set_features(self.features.iter().flat_map(|x| x))
+            .set_default_features(
+                self.default_features
+                    .or(self.default_features2)
+                    .unwrap_or(true),
+            )
+            .set_optional(self.optional.unwrap_or(false))
+            .set_platform(cx.platform.clone())
+            .set_registry_id(&registry_id);
+        if let Some(kind) = kind {
+            dep.set_kind(kind);
+        }
+        if let Some(name_in_toml) = explicit_name_in_toml {
+            cx.features.require(Feature::rename_dependency())?;
+            dep.set_explicit_name_in_toml(name_in_toml);
+        }
+        Ok(dep)
+    }
+}
+
+#[derive(Default, Serialize, Deserialize, Debug, Clone)]
+struct TomlTarget {
+    name: Option<String>,
+
+    // The intention was to only accept `crate-type` here but historical
+    // versions of Cargo also accepted `crate_type`, so look for both.
+    #[serde(rename = "crate-type")]
+    crate_type: Option<Vec<String>>,
+    #[serde(rename = "crate_type")]
+    crate_type2: Option<Vec<String>>,
+
+    path: Option<PathValue>,
+    test: Option<bool>,
+    doctest: Option<bool>,
+    bench: Option<bool>,
+    doc: Option<bool>,
+    plugin: Option<bool>,
+    #[serde(rename = "proc-macro")]
+    proc_macro: Option<bool>,
+    #[serde(rename = "proc_macro")]
+    proc_macro2: Option<bool>,
+    harness: Option<bool>,
+    #[serde(rename = "required-features")]
+    required_features: Option<Vec<String>>,
+    edition: Option<String>,
+}
+
+#[derive(Clone)]
+struct PathValue(PathBuf);
+
+impl<'de> de::Deserialize<'de> for PathValue {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: de::Deserializer<'de>,
+    {
+        Ok(PathValue(String::deserialize(deserializer)?.into()))
+    }
+}
+
+impl ser::Serialize for PathValue {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: ser::Serializer,
+    {
+        self.0.serialize(serializer)
+    }
+}
+
+/// Corresponds to a `target` entry, but `TomlTarget` is already used.
+#[derive(Serialize, Deserialize, Debug)]
+struct TomlPlatform {
+    dependencies: Option<BTreeMap<String, TomlDependency>>,
+    #[serde(rename = "build-dependencies")]
+    build_dependencies: Option<BTreeMap<String, TomlDependency>>,
+    #[serde(rename = "build_dependencies")]
+    build_dependencies2: Option<BTreeMap<String, TomlDependency>>,
+    #[serde(rename = "dev-dependencies")]
+    dev_dependencies: Option<BTreeMap<String, TomlDependency>>,
+    #[serde(rename = "dev_dependencies")]
+    dev_dependencies2: Option<BTreeMap<String, TomlDependency>>,
+}
+
+impl TomlTarget {
+    fn new() -> TomlTarget {
+        TomlTarget::default()
+    }
+
+    fn name(&self) -> String {
+        match self.name {
+            Some(ref name) => name.clone(),
+            None => panic!("target name is required"),
+        }
+    }
+
+    fn proc_macro(&self) -> Option<bool> {
+        self.proc_macro.or(self.proc_macro2)
+    }
+
+    fn crate_types(&self) -> Option<&Vec<String>> {
+        self.crate_type
+            .as_ref()
+            .or_else(|| self.crate_type2.as_ref())
+    }
+}
+
+impl fmt::Debug for PathValue {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.0.fmt(f)
+    }
+}
diff --git a/src/cargo/util/toml/targets.rs b/src/cargo/util/toml/targets.rs
new file mode 100644 (file)
index 0000000..cc40b53
--- /dev/null
@@ -0,0 +1,797 @@
+//! This module implements Cargo conventions for directory layout:
+//!
+//!  * `src/lib.rs` is a library
+//!  * `src/main.rs` is a binary
+//!  * `src/bin/*.rs` are binaries
+//!  * `examples/*.rs` are examples
+//!  * `tests/*.rs` are integration tests
+//!  * `benches/*.rs` are benchmarks
+//!
+//! It is a bit tricky because we need match explicit information from `Cargo.toml`
+//! with implicit info in directory layout.
+
+use std::path::{Path, PathBuf};
+use std::fs::{self, DirEntry};
+use std::collections::HashSet;
+
+use core::{compiler, Edition, Feature, Features, Target};
+use util::errors::{CargoResult, CargoResultExt};
+use super::{
+    LibKind, PathValue, StringOrBool, StringOrVec, TomlBenchTarget, TomlBinTarget,
+    TomlExampleTarget, TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget,
+};
+
+pub fn targets(
+    features: &Features,
+    manifest: &TomlManifest,
+    package_name: &str,
+    package_root: &Path,
+    edition: Edition,
+    custom_build: &Option<StringOrBool>,
+    metabuild: &Option<StringOrVec>,
+    warnings: &mut Vec<String>,
+    errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
+    let mut targets = Vec::new();
+
+    let has_lib;
+
+    if let Some(target) = clean_lib(
+        features,
+        manifest.lib.as_ref(),
+        package_root,
+        package_name,
+        edition,
+        warnings,
+    )? {
+        targets.push(target);
+        has_lib = true;
+    } else {
+        has_lib = false;
+    }
+
+    let package = manifest
+        .package
+        .as_ref()
+        .or_else(|| manifest.project.as_ref())
+        .ok_or_else(|| format_err!("manifest has no `package` (or `project`)"))?;
+
+    targets.extend(clean_bins(
+        features,
+        manifest.bin.as_ref(),
+        package_root,
+        package_name,
+        edition,
+        package.autobins,
+        warnings,
+        errors,
+        has_lib,
+    )?);
+
+    targets.extend(clean_examples(
+        features,
+        manifest.example.as_ref(),
+        package_root,
+        edition,
+        package.autoexamples,
+        warnings,
+        errors,
+    )?);
+
+    targets.extend(clean_tests(
+        features,
+        manifest.test.as_ref(),
+        package_root,
+        edition,
+        package.autotests,
+        warnings,
+        errors,
+    )?);
+
+    targets.extend(clean_benches(
+        features,
+        manifest.bench.as_ref(),
+        package_root,
+        edition,
+        package.autobenches,
+        warnings,
+        errors,
+    )?);
+
+    // processing the custom build script
+    if let Some(custom_build) = manifest.maybe_custom_build(custom_build, package_root) {
+        if metabuild.is_some() {
+            bail!("cannot specify both `metabuild` and `build`");
+        }
+        let name = format!(
+            "build-script-{}",
+            custom_build
+                .file_stem()
+                .and_then(|s| s.to_str())
+                .unwrap_or("")
+        );
+        targets.push(Target::custom_build_target(
+            &name,
+            package_root.join(custom_build),
+            edition,
+        ));
+    }
+    if let Some(metabuild) = metabuild {
+        // Verify names match available build deps.
+        let bdeps = manifest.build_dependencies.as_ref();
+        for name in &metabuild.0 {
+            if !bdeps.map_or(false, |bd| bd.contains_key(name)) {
+                bail!(
+                    "metabuild package `{}` must be specified in `build-dependencies`",
+                    name
+                );
+            }
+        }
+
+        targets.push(Target::metabuild_target(&format!(
+            "metabuild-{}",
+            package.name
+        )));
+    }
+
+    Ok(targets)
+}
+
+fn clean_lib(
+    features: &Features,
+    toml_lib: Option<&TomlLibTarget>,
+    package_root: &Path,
+    package_name: &str,
+    edition: Edition,
+    warnings: &mut Vec<String>,
+) -> CargoResult<Option<Target>> {
+    let inferred = inferred_lib(package_root);
+    let lib = match toml_lib {
+        Some(lib) => {
+            if let Some(ref name) = lib.name {
+                // XXX: other code paths dodge this validation
+                if name.contains('-') {
+                    bail!("library target names cannot contain hyphens: {}", name)
+                }
+            }
+            Some(TomlTarget {
+                name: lib.name.clone().or_else(|| Some(package_name.to_owned())),
+                ..lib.clone()
+            })
+        }
+        None => inferred.as_ref().map(|lib| TomlTarget {
+            name: Some(package_name.to_string()),
+            path: Some(PathValue(lib.clone())),
+            ..TomlTarget::new()
+        }),
+    };
+
+    let lib = match lib {
+        Some(ref lib) => lib,
+        None => return Ok(None),
+    };
+
+    validate_has_name(lib, "library", "lib")?;
+
+    let path = match (lib.path.as_ref(), inferred) {
+        (Some(path), _) => package_root.join(&path.0),
+        (None, Some(path)) => path,
+        (None, None) => {
+            let legacy_path = package_root.join("src").join(format!("{}.rs", lib.name()));
+            if edition < Edition::Edition2018 && legacy_path.exists() {
+                warnings.push(format!(
+                    "path `{}` was erroneously implicitly accepted for library `{}`,\n\
+                     please rename the file to `src/lib.rs` or set lib.path in Cargo.toml",
+                    legacy_path.display(),
+                    lib.name()
+                ));
+                legacy_path
+            } else {
+                bail!(
+                    "can't find library `{}`, \
+                     rename file to `src/lib.rs` or specify lib.path",
+                    lib.name()
+                )
+            }
+        }
+    };
+
+    // Per the Macros 1.1 RFC:
+    //
+    // > Initially if a crate is compiled with the proc-macro crate type
+    // > (and possibly others) it will forbid exporting any items in the
+    // > crate other than those functions tagged #[proc_macro_derive] and
+    // > those functions must also be placed at the crate root.
+    //
+    // A plugin requires exporting plugin_registrar so a crate cannot be
+    // both at once.
+    let crate_types = match (lib.crate_types(), lib.plugin, lib.proc_macro()) {
+        (_, Some(true), Some(true)) => bail!("lib.plugin and lib.proc-macro cannot both be true"),
+        (Some(kinds), _, _) => kinds.iter().map(|s| s.into()).collect(),
+        (None, Some(true), _) => vec![LibKind::Dylib],
+        (None, _, Some(true)) => vec![LibKind::ProcMacro],
+        (None, _, _) => vec![LibKind::Lib],
+    };
+
+    let mut target = Target::lib_target(&lib.name(), crate_types, path, edition);
+    configure(features, lib, &mut target)?;
+    Ok(Some(target))
+}
+
+fn clean_bins(
+    features: &Features,
+    toml_bins: Option<&Vec<TomlBinTarget>>,
+    package_root: &Path,
+    package_name: &str,
+    edition: Edition,
+    autodiscover: Option<bool>,
+    warnings: &mut Vec<String>,
+    errors: &mut Vec<String>,
+    has_lib: bool,
+) -> CargoResult<Vec<Target>> {
+    let inferred = inferred_bins(package_root, package_name);
+
+    let bins = toml_targets_and_inferred(
+        toml_bins,
+        &inferred,
+        package_root,
+        autodiscover,
+        edition,
+        warnings,
+        "binary",
+        "bin",
+        "autobins",
+    );
+
+    for bin in &bins {
+        validate_has_name(bin, "binary", "bin")?;
+
+        let name = bin.name();
+
+        if let Some(crate_types) = bin.crate_types() {
+            if !crate_types.is_empty() {
+                errors.push(format!(
+                    "the target `{}` is a binary and can't have any \
+                     crate-types set (currently \"{}\")",
+                    name,
+                    crate_types.join(", ")
+                ));
+            }
+        }
+
+        if bin.proc_macro() == Some(true) {
+            errors.push(format!(
+                "the target `{}` is a binary and can't have `proc-macro` \
+                 set `true`",
+                name
+            ));
+        }
+
+        if compiler::is_bad_artifact_name(&name) {
+            bail!("the binary target name `{}` is forbidden", name)
+        }
+    }
+
+    validate_unique_names(&bins, "binary")?;
+
+    let mut result = Vec::new();
+    for bin in &bins {
+        let path = target_path(bin, &inferred, "bin", package_root, edition, &mut |_| {
+            if let Some(legacy_path) = legacy_bin_path(package_root, &bin.name(), has_lib) {
+                warnings.push(format!(
+                    "path `{}` was erroneously implicitly accepted for binary `{}`,\n\
+                     please set bin.path in Cargo.toml",
+                    legacy_path.display(),
+                    bin.name()
+                ));
+                Some(legacy_path)
+            } else {
+                None
+            }
+        });
+        let path = match path {
+            Ok(path) => path,
+            Err(e) => bail!("{}", e),
+        };
+
+        let mut target = Target::bin_target(
+            &bin.name(),
+            path,
+            bin.required_features.clone(),
+            edition,
+        );
+        configure(features, bin, &mut target)?;
+        result.push(target);
+    }
+    return Ok(result);
+
+    fn legacy_bin_path(package_root: &Path, name: &str, has_lib: bool) -> Option<PathBuf> {
+        if !has_lib {
+            let path = package_root.join("src").join(format!("{}.rs", name));
+            if path.exists() {
+                return Some(path);
+            }
+        }
+        let path = package_root.join("src").join("main.rs");
+        if path.exists() {
+            return Some(path);
+        }
+
+        let path = package_root.join("src").join("bin").join("main.rs");
+        if path.exists() {
+            return Some(path);
+        }
+        None
+    }
+}
+
+fn clean_examples(
+    features: &Features,
+    toml_examples: Option<&Vec<TomlExampleTarget>>,
+    package_root: &Path,
+    edition: Edition,
+    autodiscover: Option<bool>,
+    warnings: &mut Vec<String>,
+    errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
+    let inferred = infer_from_directory(&package_root.join("examples"));
+
+    let targets = clean_targets(
+        "example",
+        "example",
+        toml_examples,
+        &inferred,
+        package_root,
+        edition,
+        autodiscover,
+        warnings,
+        errors,
+        "autoexamples",
+    )?;
+
+    let mut result = Vec::new();
+    for (path, toml) in targets {
+        let crate_types = match toml.crate_types() {
+            Some(kinds) => kinds.iter().map(|s| s.into()).collect(),
+            None => Vec::new(),
+        };
+
+        let mut target = Target::example_target(
+            &toml.name(),
+            crate_types,
+            path,
+            toml.required_features.clone(),
+            edition,
+        );
+        configure(features, &toml, &mut target)?;
+        result.push(target);
+    }
+
+    Ok(result)
+}
+
+fn clean_tests(
+    features: &Features,
+    toml_tests: Option<&Vec<TomlTestTarget>>,
+    package_root: &Path,
+    edition: Edition,
+    autodiscover: Option<bool>,
+    warnings: &mut Vec<String>,
+    errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
+    let inferred = infer_from_directory(&package_root.join("tests"));
+
+    let targets = clean_targets(
+        "test",
+        "test",
+        toml_tests,
+        &inferred,
+        package_root,
+        edition,
+        autodiscover,
+        warnings,
+        errors,
+        "autotests",
+    )?;
+
+    let mut result = Vec::new();
+    for (path, toml) in targets {
+        let mut target = Target::test_target(
+            &toml.name(),
+            path,
+            toml.required_features.clone(),
+            edition,
+        );
+        configure(features, &toml, &mut target)?;
+        result.push(target);
+    }
+    Ok(result)
+}
+
+fn clean_benches(
+    features: &Features,
+    toml_benches: Option<&Vec<TomlBenchTarget>>,
+    package_root: &Path,
+    edition: Edition,
+    autodiscover: Option<bool>,
+    warnings: &mut Vec<String>,
+    errors: &mut Vec<String>,
+) -> CargoResult<Vec<Target>> {
+    let mut legacy_warnings = vec![];
+
+    let targets = {
+        let mut legacy_bench_path = |bench: &TomlTarget| {
+            let legacy_path = package_root.join("src").join("bench.rs");
+            if !(bench.name() == "bench" && legacy_path.exists()) {
+                return None;
+            }
+            legacy_warnings.push(format!(
+                "path `{}` was erroneously implicitly accepted for benchmark `{}`,\n\
+                 please set bench.path in Cargo.toml",
+                legacy_path.display(),
+                bench.name()
+            ));
+            Some(legacy_path)
+        };
+
+        let inferred = infer_from_directory(&package_root.join("benches"));
+
+        clean_targets_with_legacy_path(
+            "benchmark",
+            "bench",
+            toml_benches,
+            &inferred,
+            package_root,
+            edition,
+            autodiscover,
+            warnings,
+            errors,
+            &mut legacy_bench_path,
+            "autobenches",
+        )?
+    };
+
+    warnings.append(&mut legacy_warnings);
+
+    let mut result = Vec::new();
+    for (path, toml) in targets {
+        let mut target = Target::bench_target(
+            &toml.name(),
+            path,
+            toml.required_features.clone(),
+            edition,
+        );
+        configure(features, &toml, &mut target)?;
+        result.push(target);
+    }
+
+    Ok(result)
+}
+
+fn clean_targets(
+    target_kind_human: &str,
+    target_kind: &str,
+    toml_targets: Option<&Vec<TomlTarget>>,
+    inferred: &[(String, PathBuf)],
+    package_root: &Path,
+    edition: Edition,
+    autodiscover: Option<bool>,
+    warnings: &mut Vec<String>,
+    errors: &mut Vec<String>,
+    autodiscover_flag_name: &str,
+) -> CargoResult<Vec<(PathBuf, TomlTarget)>> {
+    clean_targets_with_legacy_path(
+        target_kind_human,
+        target_kind,
+        toml_targets,
+        inferred,
+        package_root,
+        edition,
+        autodiscover,
+        warnings,
+        errors,
+        &mut |_| None,
+        autodiscover_flag_name,
+    )
+}
+
+fn clean_targets_with_legacy_path(
+    target_kind_human: &str,
+    target_kind: &str,
+    toml_targets: Option<&Vec<TomlTarget>>,
+    inferred: &[(String, PathBuf)],
+    package_root: &Path,
+    edition: Edition,
+    autodiscover: Option<bool>,
+    warnings: &mut Vec<String>,
+    errors: &mut Vec<String>,
+    legacy_path: &mut FnMut(&TomlTarget) -> Option<PathBuf>,
+    autodiscover_flag_name: &str,
+) -> CargoResult<Vec<(PathBuf, TomlTarget)>> {
+    let toml_targets = toml_targets_and_inferred(
+        toml_targets,
+        inferred,
+        package_root,
+        autodiscover,
+        edition,
+        warnings,
+        target_kind_human,
+        target_kind,
+        autodiscover_flag_name,
+    );
+
+    for target in &toml_targets {
+        validate_has_name(target, target_kind_human, target_kind)?;
+    }
+
+    validate_unique_names(&toml_targets, target_kind)?;
+    let mut result = Vec::new();
+    for target in toml_targets {
+        let path = target_path(&target, inferred, target_kind, package_root, edition, legacy_path);
+        let path = match path {
+            Ok(path) => path,
+            Err(e) => {
+                errors.push(e);
+                continue;
+            }
+        };
+        result.push((path, target));
+    }
+    Ok(result)
+}
+
+fn inferred_lib(package_root: &Path) -> Option<PathBuf> {
+    let lib = package_root.join("src").join("lib.rs");
+    if fs::metadata(&lib).is_ok() {
+        Some(lib)
+    } else {
+        None
+    }
+}
+
+fn inferred_bins(package_root: &Path, package_name: &str) -> Vec<(String, PathBuf)> {
+    let main = package_root.join("src").join("main.rs");
+    let mut result = Vec::new();
+    if main.exists() {
+        result.push((package_name.to_string(), main));
+    }
+    result.extend(infer_from_directory(&package_root.join("src").join("bin")));
+
+    result
+}
+
+fn infer_from_directory(directory: &Path) -> Vec<(String, PathBuf)> {
+    let entries = match fs::read_dir(directory) {
+        Err(_) => return Vec::new(),
+        Ok(dir) => dir,
+    };
+
+    entries
+        .filter_map(|e| e.ok())
+        .filter(is_not_dotfile)
+        .filter_map(|d| infer_any(&d))
+        .collect()
+}
+
+fn infer_any(entry: &DirEntry) -> Option<(String, PathBuf)> {
+    if entry.path().extension().and_then(|p| p.to_str()) == Some("rs") {
+        infer_file(entry)
+    } else if entry.file_type().map(|t| t.is_dir()).ok() == Some(true) {
+        infer_subdirectory(entry)
+    } else {
+        None
+    }
+}
+
+fn infer_file(entry: &DirEntry) -> Option<(String, PathBuf)> {
+    let path = entry.path();
+    path.file_stem()
+        .and_then(|p| p.to_str())
+        .map(|p| (p.to_owned(), path.clone()))
+}
+
+fn infer_subdirectory(entry: &DirEntry) -> Option<(String, PathBuf)> {
+    let path = entry.path();
+    let main = path.join("main.rs");
+    let name = path.file_name().and_then(|n| n.to_str());
+    match (name, main.exists()) {
+        (Some(name), true) => Some((name.to_owned(), main)),
+        _ => None,
+    }
+}
+
+fn is_not_dotfile(entry: &DirEntry) -> bool {
+    entry.file_name().to_str().map(|s| s.starts_with('.')) == Some(false)
+}
+
+fn toml_targets_and_inferred(
+    toml_targets: Option<&Vec<TomlTarget>>,
+    inferred: &[(String, PathBuf)],
+    package_root: &Path,
+    autodiscover: Option<bool>,
+    edition: Edition,
+    warnings: &mut Vec<String>,
+    target_kind_human: &str,
+    target_kind: &str,
+    autodiscover_flag_name: &str,
+) -> Vec<TomlTarget> {
+    let inferred_targets = inferred_to_toml_targets(inferred);
+    match toml_targets {
+        None => inferred_targets,
+        Some(targets) => {
+            let mut targets = targets.clone();
+
+            let target_path =
+                |target: &TomlTarget| target.path.clone().map(|p| package_root.join(p.0));
+
+            let mut seen_names = HashSet::new();
+            let mut seen_paths = HashSet::new();
+            for target in targets.iter() {
+                seen_names.insert(target.name.clone());
+                seen_paths.insert(target_path(target));
+            }
+
+            let mut rem_targets = vec![];
+            for target in inferred_targets {
+                if !seen_names.contains(&target.name) && !seen_paths.contains(&target_path(&target))
+                {
+                    rem_targets.push(target);
+                }
+            }
+
+            let autodiscover = match autodiscover {
+                Some(autodiscover) => autodiscover,
+                None => match edition {
+                    Edition::Edition2018 => true,
+                    Edition::Edition2015 => {
+                        if !rem_targets.is_empty() {
+                            let mut rem_targets_str = String::new();
+                            for t in rem_targets.iter() {
+                                if let Some(p) = t.path.clone() {
+                                    rem_targets_str.push_str(&format!("* {}\n", p.0.display()))
+                                }
+                            }
+                            warnings.push(format!(
+                                "\
+An explicit [[{section}]] section is specified in Cargo.toml which currently
+disables Cargo from automatically inferring other {target_kind_human} targets.
+This inference behavior will change in the Rust 2018 edition and the following
+files will be included as a {target_kind_human} target:
+
+{rem_targets_str}
+This is likely to break cargo build or cargo test as these files may not be
+ready to be compiled as a {target_kind_human} target today. You can future-proof yourself
+and disable this warning by adding `{autodiscover_flag_name} = false` to your [package]
+section. You may also move the files to a location where Cargo would not
+automatically infer them to be a target, such as in subfolders.
+
+For more information on this warning you can consult
+https://github.com/rust-lang/cargo/issues/5330",
+                                section = target_kind,
+                                target_kind_human = target_kind_human,
+                                rem_targets_str = rem_targets_str,
+                                autodiscover_flag_name = autodiscover_flag_name,
+                            ));
+                        };
+                        false
+                    }
+                },
+            };
+
+            if autodiscover {
+                targets.append(&mut rem_targets);
+            }
+
+            targets
+        }
+    }
+}
+
+fn inferred_to_toml_targets(inferred: &[(String, PathBuf)]) -> Vec<TomlTarget> {
+    inferred
+        .iter()
+        .map(|&(ref name, ref path)| TomlTarget {
+            name: Some(name.clone()),
+            path: Some(PathValue(path.clone())),
+            ..TomlTarget::new()
+        })
+        .collect()
+}
+
+fn validate_has_name(
+    target: &TomlTarget,
+    target_kind_human: &str,
+    target_kind: &str,
+) -> CargoResult<()> {
+    match target.name {
+        Some(ref name) => if name.trim().is_empty() {
+            bail!("{} target names cannot be empty", target_kind_human)
+        },
+        None => bail!(
+            "{} target {}.name is required",
+            target_kind_human,
+            target_kind
+        ),
+    }
+
+    Ok(())
+}
+
+/// Will check a list of toml targets, and make sure the target names are unique within a vector.
+fn validate_unique_names(targets: &[TomlTarget], target_kind: &str) -> CargoResult<()> {
+    let mut seen = HashSet::new();
+    for name in targets.iter().map(|e| e.name()) {
+        if !seen.insert(name.clone()) {
+            bail!(
+                "found duplicate {target_kind} name {name}, \
+                 but all {target_kind} targets must have a unique name",
+                target_kind = target_kind,
+                name = name
+            );
+        }
+    }
+    Ok(())
+}
+
+fn configure(
+    features: &Features,
+    toml: &TomlTarget,
+    target: &mut Target,
+) -> CargoResult<()> {
+    let t2 = target.clone();
+    target
+        .set_tested(toml.test.unwrap_or_else(|| t2.tested()))
+        .set_doc(toml.doc.unwrap_or_else(|| t2.documented()))
+        .set_doctest(toml.doctest.unwrap_or_else(|| t2.doctested()))
+        .set_benched(toml.bench.unwrap_or_else(|| t2.benched()))
+        .set_harness(toml.harness.unwrap_or_else(|| t2.harness()))
+        .set_for_host(match (toml.plugin, toml.proc_macro()) {
+            (None, None) => t2.for_host(),
+            (Some(true), _) | (_, Some(true)) => true,
+            (Some(false), _) | (_, Some(false)) => false,
+        });
+    if let Some(edition) = toml.edition.clone() {
+        features.require(Feature::edition()).chain_err(|| "editions are unstable")?;
+        target.set_edition(edition.parse().chain_err(|| "failed to parse the `edition` key")?);
+    }
+    Ok(())
+}
+
+fn target_path(
+    target: &TomlTarget,
+    inferred: &[(String, PathBuf)],
+    target_kind: &str,
+    package_root: &Path,
+    edition: Edition,
+    legacy_path: &mut FnMut(&TomlTarget) -> Option<PathBuf>,
+) -> Result<PathBuf, String> {
+    if let Some(ref path) = target.path {
+        // Should we verify that this path exists here?
+        return Ok(package_root.join(&path.0));
+    }
+    let name = target.name();
+
+    let mut matching = inferred
+        .iter()
+        .filter(|&&(ref n, _)| n == &name)
+        .map(|&(_, ref p)| p.clone());
+
+    let first = matching.next();
+    let second = matching.next();
+    match (first, second) {
+        (Some(path), None) => Ok(path),
+        (None, None) | (Some(_), Some(_)) => {
+            if edition < Edition::Edition2018 {
+                if let Some(path) = legacy_path(target) {
+                    return Ok(path);
+                }
+            }
+            Err(format!(
+                "can't find `{name}` {target_kind}, specify {target_kind}.path",
+                name = name,
+                target_kind = target_kind
+            ))
+        }
+        (None, Some(_)) => unreachable!(),
+    }
+}
diff --git a/src/cargo/util/vcs.rs b/src/cargo/util/vcs.rs
new file mode 100644 (file)
index 0000000..a5c047d
--- /dev/null
@@ -0,0 +1,94 @@
+use std::path::Path;
+use std::fs::create_dir;
+
+use git2;
+
+use util::{process, CargoResult};
+
+// Check if we are in an existing repo. We define that to be true if either:
+//
+// 1. We are in a git repo and the path to the new package is not an ignored
+//    path in that repo.
+// 2. We are in an HG repo.
+pub fn existing_vcs_repo(path: &Path, cwd: &Path) -> bool {
+    fn in_git_repo(path: &Path, cwd: &Path) -> bool {
+        if let Ok(repo) = GitRepo::discover(path, cwd) {
+            repo.is_path_ignored(path).map(|ignored| !ignored).unwrap_or(true)
+        } else { false }
+    }
+
+    in_git_repo(path, cwd) || HgRepo::discover(path, cwd).is_ok()
+}
+
+pub struct HgRepo;
+pub struct GitRepo;
+pub struct PijulRepo;
+pub struct FossilRepo;
+
+impl GitRepo {
+    pub fn init(path: &Path, _: &Path) -> CargoResult<GitRepo> {
+        git2::Repository::init(path)?;
+        Ok(GitRepo)
+    }
+    pub fn discover(path: &Path, _: &Path) -> Result<git2::Repository, git2::Error> {
+        git2::Repository::discover(path)
+    }
+}
+
+impl HgRepo {
+    pub fn init(path: &Path, cwd: &Path) -> CargoResult<HgRepo> {
+        process("hg").cwd(cwd).arg("init").arg(path).exec()?;
+        Ok(HgRepo)
+    }
+    pub fn discover(path: &Path, cwd: &Path) -> CargoResult<HgRepo> {
+        process("hg")
+            .cwd(cwd)
+            .arg("--cwd")
+            .arg(path)
+            .arg("root")
+            .exec_with_output()?;
+        Ok(HgRepo)
+    }
+}
+
+impl PijulRepo {
+    pub fn init(path: &Path, cwd: &Path) -> CargoResult<PijulRepo> {
+        process("pijul").cwd(cwd).arg("init").arg(path).exec()?;
+        Ok(PijulRepo)
+    }
+}
+
+impl FossilRepo {
+    pub fn init(path: &Path, cwd: &Path) -> CargoResult<FossilRepo> {
+        // fossil doesn't create the directory so we'll do that first
+        create_dir(path)?;
+
+        // set up the paths we'll use
+        let db_fname = ".fossil";
+        let mut db_path = path.to_owned();
+        db_path.push(db_fname);
+
+        // then create the fossil DB in that location
+        process("fossil").cwd(cwd).arg("init").arg(&db_path).exec()?;
+
+        // open it in that new directory
+        process("fossil")
+            .cwd(&path)
+            .arg("open")
+            .arg(db_fname)
+            .exec()?;
+
+        // set `target` as ignoreable and cleanable
+        process("fossil")
+            .cwd(cwd)
+            .arg("settings")
+            .arg("ignore-glob")
+            .arg("target");
+        process("fossil")
+            .cwd(cwd)
+            .arg("settings")
+            .arg("clean-glob")
+            .arg("target");
+        Ok(FossilRepo)
+    }
+}
diff --git a/src/crates-io/Cargo.toml b/src/crates-io/Cargo.toml
new file mode 100644 (file)
index 0000000..8b0bd9c
--- /dev/null
@@ -0,0 +1,21 @@
+[package]
+name = "crates-io"
+version = "0.20.0"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/cargo"
+description = """
+Helpers for interacting with crates.io
+"""
+
+[lib]
+name = "crates_io"
+path = "lib.rs"
+
+[dependencies]
+curl = "0.4"
+failure = "0.1.1"
+serde = "1.0"
+serde_derive = "1.0"
+serde_json = "1.0"
+url = "1.0"
diff --git a/src/crates-io/LICENSE-APACHE b/src/crates-io/LICENSE-APACHE
new file mode 120000 (symlink)
index 0000000..1cd601d
--- /dev/null
@@ -0,0 +1 @@
+../../LICENSE-APACHE
\ No newline at end of file
diff --git a/src/crates-io/LICENSE-MIT b/src/crates-io/LICENSE-MIT
new file mode 120000 (symlink)
index 0000000..b2cfbdc
--- /dev/null
@@ -0,0 +1 @@
+../../LICENSE-MIT
\ No newline at end of file
diff --git a/src/crates-io/lib.rs b/src/crates-io/lib.rs
new file mode 100644 (file)
index 0000000..6de2f30
--- /dev/null
@@ -0,0 +1,339 @@
+#![allow(unknown_lints)]
+#![cfg_attr(feature = "cargo-clippy", allow(identity_op))] // used for vertical alignment
+
+extern crate curl;
+#[macro_use]
+extern crate failure;
+#[macro_use]
+extern crate serde_derive;
+extern crate serde_json;
+extern crate url;
+
+use std::collections::BTreeMap;
+use std::fs::File;
+use std::io::prelude::*;
+use std::io::Cursor;
+
+use curl::easy::{Easy, List};
+use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
+
+pub type Result<T> = std::result::Result<T, failure::Error>;
+
+pub struct Registry {
+    host: String,
+    token: Option<String>,
+    handle: Easy,
+}
+
+#[derive(PartialEq, Clone, Copy)]
+pub enum Auth {
+    Authorized,
+    Unauthorized,
+}
+
+#[derive(Deserialize)]
+pub struct Crate {
+    pub name: String,
+    pub description: Option<String>,
+    pub max_version: String,
+}
+
+#[derive(Serialize)]
+pub struct NewCrate {
+    pub name: String,
+    pub vers: String,
+    pub deps: Vec<NewCrateDependency>,
+    pub features: BTreeMap<String, Vec<String>>,
+    pub authors: Vec<String>,
+    pub description: Option<String>,
+    pub documentation: Option<String>,
+    pub homepage: Option<String>,
+    pub readme: Option<String>,
+    pub readme_file: Option<String>,
+    pub keywords: Vec<String>,
+    pub categories: Vec<String>,
+    pub license: Option<String>,
+    pub license_file: Option<String>,
+    pub repository: Option<String>,
+    pub badges: BTreeMap<String, BTreeMap<String, String>>,
+    #[serde(default)] pub links: Option<String>,
+}
+
+#[derive(Serialize)]
+pub struct NewCrateDependency {
+    pub optional: bool,
+    pub default_features: bool,
+    pub name: String,
+    pub features: Vec<String>,
+    pub version_req: String,
+    pub target: Option<String>,
+    pub kind: String,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub registry: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub explicit_name_in_toml: Option<String>,
+}
+
+#[derive(Deserialize)]
+pub struct User {
+    pub id: u32,
+    pub login: String,
+    pub avatar: Option<String>,
+    pub email: Option<String>,
+    pub name: Option<String>,
+}
+
+pub struct Warnings {
+    pub invalid_categories: Vec<String>,
+    pub invalid_badges: Vec<String>,
+}
+
+#[derive(Deserialize)]
+struct R {
+    ok: bool,
+}
+#[derive(Deserialize)]
+struct OwnerResponse {
+    ok: bool,
+    msg: String,
+}
+#[derive(Deserialize)]
+struct ApiErrorList {
+    errors: Vec<ApiError>,
+}
+#[derive(Deserialize)]
+struct ApiError {
+    detail: String,
+}
+#[derive(Serialize)]
+struct OwnersReq<'a> {
+    users: &'a [&'a str],
+}
+#[derive(Deserialize)]
+struct Users {
+    users: Vec<User>,
+}
+#[derive(Deserialize)]
+struct TotalCrates {
+    total: u32,
+}
+#[derive(Deserialize)]
+struct Crates {
+    crates: Vec<Crate>,
+    meta: TotalCrates,
+}
+impl Registry {
+    pub fn new(host: String, token: Option<String>) -> Registry {
+        Registry::new_handle(host, token, Easy::new())
+    }
+
+    pub fn new_handle(host: String, token: Option<String>, handle: Easy) -> Registry {
+        Registry {
+            host,
+            token,
+            handle,
+        }
+    }
+
+    pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result<String> {
+        let body = serde_json::to_string(&OwnersReq { users: owners })?;
+        let body = self.put(&format!("/crates/{}/owners", krate), body.as_bytes())?;
+        assert!(serde_json::from_str::<OwnerResponse>(&body)?.ok);
+        Ok(serde_json::from_str::<OwnerResponse>(&body)?.msg)
+    }
+
+    pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> {
+        let body = serde_json::to_string(&OwnersReq { users: owners })?;
+        let body = self.delete(&format!("/crates/{}/owners", krate), Some(body.as_bytes()))?;
+        assert!(serde_json::from_str::<OwnerResponse>(&body)?.ok);
+        Ok(())
+    }
+
+    pub fn list_owners(&mut self, krate: &str) -> Result<Vec<User>> {
+        let body = self.get(&format!("/crates/{}/owners", krate))?;
+        Ok(serde_json::from_str::<Users>(&body)?.users)
+    }
+
+    pub fn publish(&mut self, krate: &NewCrate, tarball: &File) -> Result<Warnings> {
+        let json = serde_json::to_string(krate)?;
+        // Prepare the body. The format of the upload request is:
+        //
+        //      <le u32 of json>
+        //      <json request> (metadata for the package)
+        //      <le u32 of tarball>
+        //      <source tarball>
+        let stat = tarball.metadata()?;
+        let header = {
+            let mut w = Vec::new();
+            w.extend(
+                [
+                    (json.len() >> 0) as u8,
+                    (json.len() >> 8) as u8,
+                    (json.len() >> 16) as u8,
+                    (json.len() >> 24) as u8,
+                ].iter().cloned(),
+            );
+            w.extend(json.as_bytes().iter().cloned());
+            w.extend(
+                [
+                    (stat.len() >> 0) as u8,
+                    (stat.len() >> 8) as u8,
+                    (stat.len() >> 16) as u8,
+                    (stat.len() >> 24) as u8,
+                ].iter().cloned(),
+            );
+            w
+        };
+        let size = stat.len() as usize + header.len();
+        let mut body = Cursor::new(header).chain(tarball);
+
+        let url = format!("{}/api/v1/crates/new", self.host);
+
+        let token = match self.token.as_ref() {
+            Some(s) => s,
+            None => bail!("no upload token found, please run `cargo login`"),
+        };
+        self.handle.put(true)?;
+        self.handle.url(&url)?;
+        self.handle.in_filesize(size as u64)?;
+        let mut headers = List::new();
+        headers.append("Accept: application/json")?;
+        headers.append(&format!("Authorization: {}", token))?;
+        self.handle.http_headers(headers)?;
+
+        let body = handle(&mut self.handle, &mut |buf| body.read(buf).unwrap_or(0))?;
+
+        let response = if body.is_empty() {
+            "{}".parse()?
+        } else {
+            body.parse::<serde_json::Value>()?
+        };
+
+        let invalid_categories: Vec<String> = response
+            .get("warnings")
+            .and_then(|j| j.get("invalid_categories"))
+            .and_then(|j| j.as_array())
+            .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect())
+            .unwrap_or_else(Vec::new);
+
+        let invalid_badges: Vec<String> = response
+            .get("warnings")
+            .and_then(|j| j.get("invalid_badges"))
+            .and_then(|j| j.as_array())
+            .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect())
+            .unwrap_or_else(Vec::new);
+
+        Ok(Warnings {
+            invalid_categories,
+            invalid_badges,
+        })
+    }
+
+    pub fn search(&mut self, query: &str, limit: u32) -> Result<(Vec<Crate>, u32)> {
+        let formatted_query = percent_encode(query.as_bytes(), QUERY_ENCODE_SET);
+        let body = self.req(
+            &format!("/crates?q={}&per_page={}", formatted_query, limit),
+            None,
+            Auth::Unauthorized,
+        )?;
+
+        let crates = serde_json::from_str::<Crates>(&body)?;
+        Ok((crates.crates, crates.meta.total))
+    }
+
+    pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> {
+        let body = self.delete(&format!("/crates/{}/{}/yank", krate, version), None)?;
+        assert!(serde_json::from_str::<R>(&body)?.ok);
+        Ok(())
+    }
+
+    pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> {
+        let body = self.put(&format!("/crates/{}/{}/unyank", krate, version), &[])?;
+        assert!(serde_json::from_str::<R>(&body)?.ok);
+        Ok(())
+    }
+
+    fn put(&mut self, path: &str, b: &[u8]) -> Result<String> {
+        self.handle.put(true)?;
+        self.req(path, Some(b), Auth::Authorized)
+    }
+
+    fn get(&mut self, path: &str) -> Result<String> {
+        self.handle.get(true)?;
+        self.req(path, None, Auth::Authorized)
+    }
+
+    fn delete(&mut self, path: &str, b: Option<&[u8]>) -> Result<String> {
+        self.handle.custom_request("DELETE")?;
+        self.req(path, b, Auth::Authorized)
+    }
+
+    fn req(&mut self, path: &str, body: Option<&[u8]>, authorized: Auth) -> Result<String> {
+        self.handle.url(&format!("{}/api/v1{}", self.host, path))?;
+        let mut headers = List::new();
+        headers.append("Accept: application/json")?;
+        headers.append("Content-Type: application/json")?;
+
+        if authorized == Auth::Authorized {
+            let token = match self.token.as_ref() {
+                Some(s) => s,
+                None => bail!("no upload token found, please run `cargo login`"),
+            };
+            headers.append(&format!("Authorization: {}", token))?;
+        }
+        self.handle.http_headers(headers)?;
+        match body {
+            Some(mut body) => {
+                self.handle.upload(true)?;
+                self.handle.in_filesize(body.len() as u64)?;
+                handle(&mut self.handle, &mut |buf| body.read(buf).unwrap_or(0))
+            }
+            None => handle(&mut self.handle, &mut |_| 0),
+        }
+    }
+}
+
+fn handle(handle: &mut Easy, read: &mut FnMut(&mut [u8]) -> usize) -> Result<String> {
+    let mut headers = Vec::new();
+    let mut body = Vec::new();
+    {
+        let mut handle = handle.transfer();
+        handle.read_function(|buf| Ok(read(buf)))?;
+        handle.write_function(|data| {
+            body.extend_from_slice(data);
+            Ok(data.len())
+        })?;
+        handle.header_function(|data| {
+            headers.push(String::from_utf8_lossy(data).into_owned());
+            true
+        })?;
+        handle.perform()?;
+    }
+
+    match handle.response_code()? {
+        0 => {} // file upload url sometimes
+        200 => {}
+        403 => bail!("received 403 unauthorized response code"),
+        404 => bail!("received 404 not found response code"),
+        code => bail!(
+            "failed to get a 200 OK response, got {}\n\
+             headers:\n\
+             \t{}\n\
+             body:\n\
+             {}",
+            code,
+            headers.join("\n\t"),
+            String::from_utf8_lossy(&body)
+        ),
+    }
+
+    let body = match String::from_utf8(body) {
+        Ok(body) => body,
+        Err(..) => bail!("response body was not valid utf-8"),
+    };
+    if let Ok(errors) = serde_json::from_str::<ApiErrorList>(&body) {
+        let errors = errors.errors.into_iter().map(|s| s.detail);
+        bail!("api errors: {}", errors.collect::<Vec<_>>().join(", "));
+    }
+    Ok(body)
+}
diff --git a/src/doc/README.md b/src/doc/README.md
new file mode 100644 (file)
index 0000000..983c966
--- /dev/null
@@ -0,0 +1,47 @@
+# The Cargo Book
+
+
+### Requirements
+
+Building the book requires [mdBook]. To get it:
+
+[mdBook]: https://github.com/azerupi/mdBook
+
+```console
+$ cargo install mdbook
+```
+
+### Building
+
+To build the book:
+
+```console
+$ mdbook build
+```
+
+The output will be in the `book` subdirectory. To check it out, open it in
+your web browser.
+
+_Firefox:_
+```console
+$ firefox book/index.html                       # Linux
+$ open -a "Firefox" book/index.html             # OS X
+$ Start-Process "firefox.exe" .\book\index.html # Windows (PowerShell)
+$ start firefox.exe .\book\index.html           # Windows (Cmd)
+```
+
+_Chrome:_
+```console
+$ google-chrome book/index.html                 # Linux
+$ open -a "Google Chrome" book/index.html       # OS X
+$ Start-Process "chrome.exe" .\book\index.html  # Windows (PowerShell)
+$ start chrome.exe .\book\index.html            # Windows (Cmd)
+```
+
+
+## Contributing
+
+Given that the book is still in a draft state, we'd love your help! Please feel free to open
+issues about anything, and send in PRs for things you'd like to fix or change. If your change is
+large, please open an issue first, so we can make sure that it's something we'd accept before you
+go through the work of getting a PR together.
diff --git a/src/doc/book.toml b/src/doc/book.toml
new file mode 100644 (file)
index 0000000..1f21e1e
--- /dev/null
@@ -0,0 +1,2 @@
+title = "The Cargo Book"
+author = "Alex Crichton, Steve Klabnik and Carol Nichols, with Contributions from the Rust Community"
diff --git a/src/doc/src/SUMMARY.md b/src/doc/src/SUMMARY.md
new file mode 100644 (file)
index 0000000..8b3dfac
--- /dev/null
@@ -0,0 +1,32 @@
+# Summary
+
+[Introduction](index.md)
+
+* [Getting Started](getting-started/index.md)
+    * [Installation](getting-started/installation.md)
+    * [First Steps with Cargo](getting-started/first-steps.md)
+
+* [Cargo Guide](guide/index.md)
+    * [Why Cargo Exists](guide/why-cargo-exists.md)
+    * [Creating a New Package](guide/creating-a-new-project.md)
+    * [Working on an Existing Package](guide/working-on-an-existing-project.md)
+    * [Dependencies](guide/dependencies.md)
+    * [Package Layout](guide/project-layout.md)
+    * [Cargo.toml vs Cargo.lock](guide/cargo-toml-vs-cargo-lock.md)
+    * [Tests](guide/tests.md)
+    * [Continuous Integration](guide/continuous-integration.md)
+    * [Build Cache](guide/build-cache.md)
+
+* [Cargo Reference](reference/index.md)
+    * [Specifying Dependencies](reference/specifying-dependencies.md)
+    * [The Manifest Format](reference/manifest.md)
+    * [Configuration](reference/config.md)
+    * [Environment Variables](reference/environment-variables.md)
+    * [Build Scripts](reference/build-scripts.md)
+    * [Publishing on crates.io](reference/publishing.md)
+    * [Package ID Specifications](reference/pkgid-spec.md)
+    * [Source Replacement](reference/source-replacement.md)
+    * [External Tools](reference/external-tools.md)
+    * [Unstable Features](reference/unstable.md)
+
+* [FAQ](faq.md)
diff --git a/src/doc/src/faq.md b/src/doc/src/faq.md
new file mode 100644 (file)
index 0000000..108dfc2
--- /dev/null
@@ -0,0 +1,193 @@
+## Frequently Asked Questions
+
+### Is the plan to use GitHub as a package repository?
+
+No. The plan for Cargo is to use [crates.io], like npm or Rubygems do with
+npmjs.org and rubygems.org.
+
+We plan to support git repositories as a source of packages forever,
+because they can be used for early development and temporary patches,
+even when people use the registry as the primary source of packages.
+
+### Why build crates.io rather than use GitHub as a registry?
+
+We think that it’s very important to support multiple ways to download
+packages, including downloading from GitHub and copying packages into
+your package itself.
+
+That said, we think that [crates.io] offers a number of important benefits, and
+will likely become the primary way that people download packages in Cargo.
+
+For precedent, both Node.js’s [npm][1] and Ruby’s [bundler][2] support both a
+central registry model as well as a Git-based model, and most packages
+are downloaded through the registry in those ecosystems, with an
+important minority of packages making use of git-based packages.
+
+[1]: https://www.npmjs.org
+[2]: https://bundler.io
+
+Some of the advantages that make a central registry popular in other
+languages include:
+
+* **Discoverability**. A central registry provides an easy place to look
+  for existing packages. Combined with tagging, this also makes it
+  possible for a registry to provide ecosystem-wide information, such as a
+  list of the most popular or most-depended-on packages.
+* **Speed**. A central registry makes it possible to easily fetch just
+  the metadata for packages quickly and efficiently, and then to
+  efficiently download just the published package, and not other bloat
+  that happens to exist in the repository. This adds up to a significant
+  improvement in the speed of dependency resolution and fetching. As
+  dependency graphs scale up, downloading all of the git repositories bogs
+  down fast. Also remember that not everybody has a high-speed,
+  low-latency Internet connection.
+
+### Will Cargo work with C code (or other languages)?
+
+Yes!
+
+Cargo handles compiling Rust code, but we know that many Rust packages
+link against C code. We also know that there are decades of tooling
+built up around compiling languages other than Rust.
+
+Our solution: Cargo allows a package to [specify a script](reference/build-scripts.html)
+(written in Rust) to run before invoking `rustc`. Rust is leveraged to
+implement platform-specific configuration and refactor out common build
+functionality among packages.
+
+### Can Cargo be used inside of `make` (or `ninja`, or ...)
+
+Indeed. While we intend Cargo to be useful as a standalone way to
+compile Rust packages at the top-level, we know that some people will
+want to invoke Cargo from other build tools.
+
+We have designed Cargo to work well in those contexts, paying attention
+to things like error codes and machine-readable output modes. We still
+have some work to do on those fronts, but using Cargo in the context of
+conventional scripts is something we designed for from the beginning and
+will continue to prioritize.
+
+### Does Cargo handle multi-platform packages or cross-compilation?
+
+Rust itself provides facilities for configuring sections of code based
+on the platform. Cargo also supports [platform-specific
+dependencies][target-deps], and we plan to support more per-platform
+configuration in `Cargo.toml` in the future.
+
+[target-deps]: reference/specifying-dependencies.html#platform-specific-dependencies
+
+In the longer-term, we’re looking at ways to conveniently cross-compile
+packages using Cargo.
+
+### Does Cargo support environments, like `production` or `test`?
+
+We support environments through the use of [profiles][profile] to support:
+
+[profile]: reference/manifest.html#the-profile-sections
+
+* environment-specific flags (like `-g --opt-level=0` for development
+  and `--opt-level=3` for production).
+* environment-specific dependencies (like `hamcrest` for test assertions).
+* environment-specific `#[cfg]`
+* a `cargo test` command
+
+### Does Cargo work on Windows?
+
+Yes!
+
+All commits to Cargo are required to pass the local test suite on Windows.
+If, however, you find a Windows issue, we consider it a bug, so [please file an
+issue][3].
+
+[3]: https://github.com/rust-lang/cargo/issues
+
+### Why do binaries have `Cargo.lock` in version control, but not libraries?
+
+The purpose of a `Cargo.lock` is to describe the state of the world at the time
+of a successful build. It is then used to provide deterministic builds across
+whatever machine is building the package by ensuring that the exact same
+dependencies are being compiled.
+
+This property is most desirable from applications and packages which are at the
+very end of the dependency chain (binaries). As a result, it is recommended that
+all binaries check in their `Cargo.lock`.
+
+For libraries the situation is somewhat different. A library is not only used by
+the library developers, but also any downstream consumers of the library. Users
+dependent on the library will not inspect the library’s `Cargo.lock` (even if it
+exists). This is precisely because a library should **not** be deterministically
+recompiled for all users of the library.
+
+If a library ends up being used transitively by several dependencies, it’s
+likely that just a single copy of the library is desired (based on semver
+compatibility). If Cargo used all of the dependencies' `Cargo.lock` files,
+then multiple copies of the library could be used, and perhaps even a version
+conflict.
+
+In other words, libraries specify semver requirements for their dependencies but
+cannot see the full picture. Only end products like binaries have a full
+picture to decide what versions of dependencies should be used.
+
+### Can libraries use `*` as a version for their dependencies?
+
+**As of January 22nd, 2016, [crates.io] rejects all packages (not just libraries)
+with wildcard dependency constraints.**
+
+While libraries _can_, strictly speaking, they should not. A version requirement
+of `*` says “This will work with every version ever,” which is never going
+to be true. Libraries should always specify the range that they do work with,
+even if it’s something as general as “every 1.x.y version.”
+
+### Why `Cargo.toml`?
+
+As one of the most frequent interactions with Cargo, the question of why the
+configuration file is named `Cargo.toml` arises from time to time. The leading
+capital-`C` was chosen to ensure that the manifest was grouped with other
+similar configuration files in directory listings. Sorting files often puts
+capital letters before lowercase letters, ensuring files like `Makefile` and
+`Cargo.toml` are placed together. The trailing `.toml` was chosen to emphasize
+the fact that the file is in the [TOML configuration
+format](https://github.com/toml-lang/toml).
+
+Cargo does not allow other names such as `cargo.toml` or `Cargofile` to
+emphasize the ease of how a Cargo repository can be identified. An option of
+many possible names has historically led to confusion where one case was handled
+but others were accidentally forgotten.
+
+[crates.io]: https://crates.io/
+
+### How can Cargo work offline?
+
+Cargo is often used in situations with limited or no network access such as
+airplanes, CI environments, or embedded in large production deployments. Users
+are often surprised when Cargo attempts to fetch resources from the network, and
+hence the request for Cargo to work offline comes up frequently.
+
+Cargo, at its heart, will not attempt to access the network unless told to do
+so. That is, if no crates comes from crates.io, a git repository, or some other
+network location, Cargo will never attempt to make a network connection. As a
+result, if Cargo attempts to touch the network, then it's because it needs to
+fetch a required resource.
+
+Cargo is also quite aggressive about caching information to minimize the amount
+of network activity. It will guarantee, for example, that if `cargo build` (or
+an equivalent) is run to completion then the next `cargo build` is guaranteed to
+not touch the network so long as `Cargo.toml` has not been modified in the
+meantime. This avoidance of the network boils down to a `Cargo.lock` existing
+and a populated cache of the crates reflected in the lock file. If either of
+these components are missing, then they're required for the build to succeed and
+must be fetched remotely.
+
+As of Rust 1.11.0 Cargo understands a new flag, `--frozen`, which is an
+assertion that it shouldn't touch the network. When passed, Cargo will
+immediately return an error if it would otherwise attempt a network request.
+The error should include contextual information about why the network request is
+being made in the first place to help debug as well. Note that this flag *does
+not change the behavior of Cargo*, it simply asserts that Cargo shouldn't touch
+the network as a previous command has been run to ensure that network activity
+shouldn't be necessary.
+
+For more information about vendoring, see documentation on [source
+replacement][replace].
+
+[replace]: reference/source-replacement.html
diff --git a/src/doc/src/getting-started/first-steps.md b/src/doc/src/getting-started/first-steps.md
new file mode 100644 (file)
index 0000000..018c1db
--- /dev/null
@@ -0,0 +1,70 @@
+## First Steps with Cargo
+
+To start a new package with Cargo, use `cargo new`:
+
+```console
+$ cargo new hello_world
+```
+
+Cargo defaults to `--bin` to make a binary program. To make a library, we'd
+pass `--lib`.
+
+Let’s check out what Cargo has generated for us:
+
+```console
+$ cd hello_world
+$ tree .
+.
+├── Cargo.toml
+└── src
+    └── main.rs
+
+1 directory, 2 files
+```
+
+This is all we need to get started. First, let’s check out `Cargo.toml`:
+
+```toml
+[package]
+name = "hello_world"
+version = "0.1.0"
+authors = ["Your Name <you@example.com>"]
+```
+
+This is called a **manifest**, and it contains all of the metadata that Cargo
+needs to compile your package.
+
+Here’s what’s in `src/main.rs`:
+
+```rust
+fn main() {
+    println!("Hello, world!");
+}
+```
+
+Cargo generated a “hello world” for us. Let’s compile it:
+
+```console
+$ cargo build
+   Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+```
+
+And then run it:
+
+```console
+$ ./target/debug/hello_world
+Hello, world!
+```
+
+We can also use `cargo run` to compile and then run it, all in one step:
+
+```console
+$ cargo run
+     Fresh hello_world v0.1.0 (file:///path/to/package/hello_world)
+   Running `target/hello_world`
+Hello, world!
+```
+
+### Going further
+
+For more details on using Cargo, check out the [Cargo Guide](guide/index.html)
diff --git a/src/doc/src/getting-started/index.md b/src/doc/src/getting-started/index.md
new file mode 100644 (file)
index 0000000..22a7315
--- /dev/null
@@ -0,0 +1,6 @@
+## Getting Started
+
+To get started with Cargo, install Cargo (and Rust) and set up your first crate.
+
+* [Installation](getting-started/installation.html)
+* [First steps with Cargo](getting-started/first-steps.html)
diff --git a/src/doc/src/getting-started/installation.md b/src/doc/src/getting-started/installation.md
new file mode 100644 (file)
index 0000000..186c9da
--- /dev/null
@@ -0,0 +1,37 @@
+## Installation
+
+### Install Rust and Cargo
+
+The easiest way to get Cargo is to install the current stable release of [Rust]
+by using `rustup`.
+
+On Linux and macOS systems, this is done as follows:
+
+```console
+$ curl -sSf https://static.rust-lang.org/rustup.sh | sh
+```
+
+It will download a script, and start the installation. If everything goes well,
+you’ll see this appear:
+
+```console
+Rust is installed now. Great! 
+```
+
+On Windows, download and run [rustup-init.exe]. It will start the installation
+in a console and present the above message on success.
+
+After this, you can use the `rustup` command to also install `beta` or `nightly`
+channels for Rust and Cargo.
+
+For other installation options and information, visit the
+[install][install-rust] page of the Rust website.
+
+### Build and Install Cargo from Source
+
+Alternatively, you can [build Cargo from source][compiling-from-source].
+
+[rust]: https://www.rust-lang.org/
+[rustup-init.exe]: https://win.rustup.rs/
+[install-rust]: https://www.rust-lang.org/install.html
+[compiling-from-source]: https://github.com/rust-lang/cargo#compiling-from-source
diff --git a/src/doc/src/guide/build-cache.md b/src/doc/src/guide/build-cache.md
new file mode 100644 (file)
index 0000000..d253b8a
--- /dev/null
@@ -0,0 +1,14 @@
+## Build cache
+
+Cargo shares build artifacts among all the packages of a single workspace.
+Today, Cargo does not share build results across different workspaces, but 
+a similar result can be achieved by using a third party tool, [sccache].
+
+To setup `sccache`, install it with `cargo install sccache` and set 
+`RUSTC_WRAPPER` environmental variable to `sccache` before invoking Cargo.
+If you use bash, it makes sense to add `export RUSTC_WRAPPER=sccache` to 
+`.bashrc`. Refer to sccache documentation for more details.
+
+[sccache]: https://github.com/mozilla/sccache
+
+
diff --git a/src/doc/src/guide/cargo-toml-vs-cargo-lock.md b/src/doc/src/guide/cargo-toml-vs-cargo-lock.md
new file mode 100644 (file)
index 0000000..b0a8da5
--- /dev/null
@@ -0,0 +1,103 @@
+## Cargo.toml vs Cargo.lock
+
+`Cargo.toml` and `Cargo.lock` serve two different purposes. Before we talk
+about them, here’s a summary:
+
+* `Cargo.toml` is about describing your dependencies in a broad sense, and is
+  written by you.
+* `Cargo.lock` contains exact information about your dependencies. It is
+  maintained by Cargo and should not be manually edited.
+
+If you’re building a library that other packages will depend on, put
+`Cargo.lock` in your `.gitignore`. If you’re building an executable like a
+command-line tool or an application, check `Cargo.lock` into `git`. If you're
+curious about why that is, see ["Why do binaries have `Cargo.lock` in version
+control, but not libraries?" in the
+FAQ](faq.html#why-do-binaries-have-cargolock-in-version-control-but-not-libraries).
+
+Let’s dig in a little bit more.
+
+`Cargo.toml` is a **manifest** file in which we can specify a bunch of
+different metadata about our package. For example, we can say that we depend
+on another package:
+
+```toml
+[package]
+name = "hello_world"
+version = "0.1.0"
+authors = ["Your Name <you@example.com>"]
+
+[dependencies]
+rand = { git = "https://github.com/rust-lang-nursery/rand.git" }
+```
+
+This package has a single dependency, on the `rand` library. We’ve stated in
+this case that we’re relying on a particular Git repository that lives on
+GitHub. Since we haven’t specified any other information, Cargo assumes that
+we intend to use the latest commit on the `master` branch to build our package.
+
+Sound good? Well, there’s one problem: If you build this package today, and
+then you send a copy to me, and I build this package tomorrow, something bad
+could happen. There could be more commits to `rand` in the meantime, and my
+build would include new commits while yours would not. Therefore, we would
+get different builds. This would be bad because we want reproducible builds.
+
+We could fix this problem by putting a `rev` line in our `Cargo.toml`:
+
+```toml
+[dependencies]
+rand = { git = "https://github.com/rust-lang-nursery/rand.git", rev = "9f35b8e" }
+```
+
+Now our builds will be the same. But there’s a big drawback: now we have to
+manually think about SHA-1s every time we want to update our library. This is
+both tedious and error prone.
+
+Enter the `Cargo.lock`. Because of its existence, we don’t need to manually
+keep track of the exact revisions: Cargo will do it for us. When we have a
+manifest like this:
+
+```toml
+[package]
+name = "hello_world"
+version = "0.1.0"
+authors = ["Your Name <you@example.com>"]
+
+[dependencies]
+rand = { git = "https://github.com/rust-lang-nursery/rand.git" }
+```
+
+Cargo will take the latest commit and write that information out into our
+`Cargo.lock` when we build for the first time. That file will look like this:
+
+```toml
+[[package]]
+name = "hello_world"
+version = "0.1.0"
+dependencies = [
+ "rand 0.1.0 (git+https://github.com/rust-lang-nursery/rand.git#9f35b8e439eeedd60b9414c58f389bdc6a3284f9)",
+]
+
+[[package]]
+name = "rand"
+version = "0.1.0"
+source = "git+https://github.com/rust-lang-nursery/rand.git#9f35b8e439eeedd60b9414c58f389bdc6a3284f9"
+```
+
+You can see that there’s a lot more information here, including the exact
+revision we used to build. Now when you give your package to someone else,
+they’ll use the exact same SHA, even though we didn’t specify it in our
+`Cargo.toml`.
+
+When we’re ready to opt in to a new version of the library, Cargo can
+re-calculate the dependencies and update things for us:
+
+```console
+$ cargo update           # updates all dependencies
+$ cargo update -p rand   # updates just “rand”
+```
+
+This will write out a new `Cargo.lock` with the new version information. Note
+that the argument to `cargo update` is actually a
+[Package ID Specification](reference/pkgid-spec.html) and `rand` is just a short
+specification.
diff --git a/src/doc/src/guide/continuous-integration.md b/src/doc/src/guide/continuous-integration.md
new file mode 100644 (file)
index 0000000..2e610ee
--- /dev/null
@@ -0,0 +1,50 @@
+## Continuous Integration
+
+### Travis CI
+
+To test your package on Travis CI, here is a sample `.travis.yml` file:
+
+```yaml
+language: rust
+rust:
+  - stable
+  - beta
+  - nightly
+matrix:
+  allow_failures:
+    - rust: nightly
+```
+
+This will test all three release channels, but any breakage in nightly
+will not fail your overall build. Please see the [Travis CI Rust
+documentation](https://docs.travis-ci.com/user/languages/rust/) for more
+information.
+
+### GitLab CI
+
+To test your package on GitLab CI, here is a sample `.gitlab-ci.yml` file:
+
+```yaml
+stages:
+  - build
+
+rust-latest:
+  stage: build
+  image: rust:latest
+  script:
+    - cargo build --verbose
+    - cargo test --verbose
+
+rust-nightly:
+  stage: build
+  image: rustlang/rust:nightly
+  script:
+    - cargo build --verbose
+    - cargo test --verbose
+  allow_failure: true
+```
+
+This will test on the stable channel and nightly channel, but any
+breakage in nightly will not fail your overall build. Please see the
+[GitLab CI](https://docs.gitlab.com/ce/ci/yaml/README.html) for more
+information.
diff --git a/src/doc/src/guide/creating-a-new-project.md b/src/doc/src/guide/creating-a-new-project.md
new file mode 100644 (file)
index 0000000..5d79c2c
--- /dev/null
@@ -0,0 +1,87 @@
+## Creating a New Package
+
+To start a new package with Cargo, use `cargo new`:
+
+```console
+$ cargo new hello_world --bin
+```
+
+We’re passing `--bin` because we’re making a binary program: if we
+were making a library, we’d pass `--lib`. This also initializes a new `git`
+repository by default. If you don't want it to do that, pass `--vcs none`.
+
+Let’s check out what Cargo has generated for us:
+
+```console
+$ cd hello_world
+$ tree .
+.
+├── Cargo.toml
+└── src
+    └── main.rs
+
+1 directory, 2 files
+```
+
+Let’s take a closer look at `Cargo.toml`:
+
+```toml
+[package]
+name = "hello_world"
+version = "0.1.0"
+authors = ["Your Name <you@example.com>"]
+```
+
+This is called a **manifest**, and it contains all of the metadata that Cargo
+needs to compile your package.
+
+Here’s what’s in `src/main.rs`:
+
+```rust
+fn main() {
+    println!("Hello, world!");
+}
+```
+
+Cargo generated a “hello world” for us. Let’s compile it:
+
+```console
+$ cargo build
+   Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+```
+
+And then run it:
+
+```console
+$ ./target/debug/hello_world
+Hello, world!
+```
+
+We can also use `cargo run` to compile and then run it, all in one step (You
+won't see the `Compiling` line if you have not made any changes since you last
+compiled):
+
+```console
+$ cargo run
+   Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+     Running `target/debug/hello_world`
+Hello, world!
+```
+
+You’ll now notice a new file, `Cargo.lock`. It contains information about our
+dependencies. Since we don’t have any yet, it’s not very interesting.
+
+Once you’re ready for release, you can use `cargo build --release` to compile
+your files with optimizations turned on:
+
+```console
+$ cargo build --release
+   Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+```
+
+`cargo build --release` puts the resulting binary in `target/release` instead of
+`target/debug`.
+
+Compiling in debug mode is the default for development-- compilation time is
+shorter since the compiler doesn't do optimizations, but the code will run
+slower. Release mode takes longer to compile, but the code will run faster.
diff --git a/src/doc/src/guide/dependencies.md b/src/doc/src/guide/dependencies.md
new file mode 100644 (file)
index 0000000..80167eb
--- /dev/null
@@ -0,0 +1,90 @@
+## Dependencies
+
+[crates.io] is the Rust community's central package registry that serves as a
+location to discover and download packages. `cargo` is configured to use it by
+default to find requested packages.
+
+To depend on a library hosted on [crates.io], add it to your `Cargo.toml`.
+
+[crates.io]: https://crates.io/
+
+### Adding a dependency
+
+If your `Cargo.toml` doesn't already have a `[dependencies]` section, add that,
+then list the crate name and version that you would like to use. This example
+adds a dependency of the `time` crate:
+
+```toml
+[dependencies]
+time = "0.1.12"
+```
+
+The version string is a [semver] version requirement. The [specifying
+dependencies](reference/specifying-dependencies.html) docs have more information about
+the options you have here.
+
+[semver]: https://github.com/steveklabnik/semver#requirements
+
+If we also wanted to add a dependency on the `regex` crate, we would not need
+to add `[dependencies]` for each crate listed. Here's what your whole
+`Cargo.toml` file would look like with dependencies on the `time` and `regex`
+crates:
+
+```toml
+[package]
+name = "hello_world"
+version = "0.1.0"
+authors = ["Your Name <you@example.com>"]
+
+[dependencies]
+time = "0.1.12"
+regex = "0.1.41"
+```
+
+Re-run `cargo build`, and Cargo will fetch the new dependencies and all of
+their dependencies, compile them all, and update the `Cargo.lock`:
+
+```console
+$ cargo build
+      Updating crates.io index
+   Downloading memchr v0.1.5
+   Downloading libc v0.1.10
+   Downloading regex-syntax v0.2.1
+   Downloading memchr v0.1.5
+   Downloading aho-corasick v0.3.0
+   Downloading regex v0.1.41
+     Compiling memchr v0.1.5
+     Compiling libc v0.1.10
+     Compiling regex-syntax v0.2.1
+     Compiling memchr v0.1.5
+     Compiling aho-corasick v0.3.0
+     Compiling regex v0.1.41
+     Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+```
+
+Our `Cargo.lock` contains the exact information about which revision of all of
+these dependencies we used.
+
+Now, if `regex` gets updated, we will still build with the same revision until
+we choose to `cargo update`.
+
+You can now use the `regex` library using `extern crate` in `main.rs`.
+
+```rust
+extern crate regex;
+
+use regex::Regex;
+
+fn main() {
+    let re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap();
+    println!("Did our date match? {}", re.is_match("2014-01-01"));
+}
+```
+
+Running it will show:
+
+```console
+$ cargo run
+   Running `target/hello_world`
+Did our date match? true
+```
diff --git a/src/doc/src/guide/index.md b/src/doc/src/guide/index.md
new file mode 100644 (file)
index 0000000..08fb1c6
--- /dev/null
@@ -0,0 +1,14 @@
+## Cargo Guide
+
+This guide will give you all that you need to know about how to use Cargo to
+develop Rust packages.
+
+* [Why Cargo Exists](guide/why-cargo-exists.html)
+* [Creating a New Package](guide/creating-a-new-project.html)
+* [Working on an Existing Cargo Package](guide/working-on-an-existing-project.html)
+* [Dependencies](guide/dependencies.html)
+* [Package Layout](guide/project-layout.html)
+* [Cargo.toml vs Cargo.lock](guide/cargo-toml-vs-cargo-lock.html)
+* [Tests](guide/tests.html)
+* [Continuous Integration](guide/continuous-integration.html)
+* [Build Cache](guide/build-cache.html)
diff --git a/src/doc/src/guide/project-layout.md b/src/doc/src/guide/project-layout.md
new file mode 100644 (file)
index 0000000..516f065
--- /dev/null
@@ -0,0 +1,35 @@
+## Package Layout
+
+Cargo uses conventions for file placement to make it easy to dive into a new
+Cargo package:
+
+```
+.
+├── Cargo.lock
+├── Cargo.toml
+├── benches
+│   └── large-input.rs
+├── examples
+│   └── simple.rs
+├── src
+│   ├── bin
+│   │   └── another_executable.rs
+│   ├── lib.rs
+│   └── main.rs
+└── tests
+    └── some-integration-tests.rs
+```
+
+* `Cargo.toml` and `Cargo.lock` are stored in the root of your package (*package
+  root*).
+* Source code goes in the `src` directory.
+* The default library file is `src/lib.rs`.
+* The default executable file is `src/main.rs`.
+* Other executables can be placed in `src/bin/*.rs`.
+* Integration tests go in the `tests` directory (unit tests go in each file
+  they're testing).
+* Examples go in the `examples` directory.
+* Benchmarks go in the `benches` directory.
+
+These are explained in more detail in the [manifest
+description](reference/manifest.html#the-project-layout).
diff --git a/src/doc/src/guide/tests.md b/src/doc/src/guide/tests.md
new file mode 100644 (file)
index 0000000..3ffa4af
--- /dev/null
@@ -0,0 +1,39 @@
+## Tests
+
+Cargo can run your tests with the `cargo test` command. Cargo looks for tests
+to run in two places: in each of your `src` files and any tests in `tests/`.
+Tests in your `src` files should be unit tests, and tests in `tests/` should be
+integration-style tests. As such, you’ll need to import your crates into
+the files in `tests`.
+
+Here's an example of running `cargo test` in our package, which currently has
+no tests:
+
+```console
+$ cargo test
+   Compiling rand v0.1.0 (https://github.com/rust-lang-nursery/rand.git#9f35b8e)
+   Compiling hello_world v0.1.0 (file:///path/to/package/hello_world)
+     Running target/test/hello_world-9c2b65bbb79eabce
+
+running 0 tests
+
+test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out
+```
+
+If our package had tests, we would see more output with the correct number of
+tests.
+
+You can also run a specific test by passing a filter:
+
+```console
+$ cargo test foo
+```
+
+This will run any test with `foo` in its name.
+
+`cargo test` runs additional checks as well. For example, it will compile any
+examples you’ve included and will also test the examples in your
+documentation. Please see the [testing guide][testing] in the Rust
+documentation for more details.
+
+[testing]: https://doc.rust-lang.org/book/testing.html
diff --git a/src/doc/src/guide/why-cargo-exists.md b/src/doc/src/guide/why-cargo-exists.md
new file mode 100644 (file)
index 0000000..215bbce
--- /dev/null
@@ -0,0 +1,12 @@
+## Why Cargo Exists
+
+Cargo is a tool that allows Rust packages to declare their various
+dependencies and ensure that you’ll always get a repeatable build.
+
+To accomplish this goal, Cargo does four things:
+
+* Introduces two metadata files with various bits of package information.
+* Fetches and builds your package’s dependencies.
+* Invokes `rustc` or another build tool with the correct parameters to build
+  your package.
+* Introduces conventions to make working with Rust packages easier.
diff --git a/src/doc/src/guide/working-on-an-existing-project.md b/src/doc/src/guide/working-on-an-existing-project.md
new file mode 100644 (file)
index 0000000..ff5a31f
--- /dev/null
@@ -0,0 +1,22 @@
+## Working on an Existing Cargo Package
+
+If you download an existing package that uses Cargo, it’s really easy
+to get going.
+
+First, get the package from somewhere. In this example, we’ll use `rand`
+cloned from its repository on GitHub:
+
+```console
+$ git clone https://github.com/rust-lang-nursery/rand.git
+$ cd rand
+```
+
+To build, use `cargo build`:
+
+```console
+$ cargo build
+   Compiling rand v0.1.0 (file:///path/to/package/rand)
+```
+
+This will fetch all of the dependencies and then build them, along with the
+package.
diff --git a/src/doc/src/images/Cargo-Logo-Small.png b/src/doc/src/images/Cargo-Logo-Small.png
new file mode 100644 (file)
index 0000000..e3a9920
Binary files /dev/null and b/src/doc/src/images/Cargo-Logo-Small.png differ
diff --git a/src/doc/src/images/auth-level-acl.png b/src/doc/src/images/auth-level-acl.png
new file mode 100644 (file)
index 0000000..e7bc251
Binary files /dev/null and b/src/doc/src/images/auth-level-acl.png differ
diff --git a/src/doc/src/images/org-level-acl.png b/src/doc/src/images/org-level-acl.png
new file mode 100644 (file)
index 0000000..ed5aa88
Binary files /dev/null and b/src/doc/src/images/org-level-acl.png differ
diff --git a/src/doc/src/index.md b/src/doc/src/index.md
new file mode 100644 (file)
index 0000000..d3af532
--- /dev/null
@@ -0,0 +1,30 @@
+# The Cargo Book
+
+![Cargo Logo](images/Cargo-Logo-Small.png)
+
+Cargo is the [Rust] *package manager*. Cargo downloads your Rust package’s
+dependencies, compiles your packages, makes distributable packages, and uploads them to
+[crates.io], the Rust community’s *package registry*. You can contribute
+to this book on [GitHub].
+
+
+### Sections
+
+**[Getting Started](getting-started/index.html)**
+
+To get started with Cargo, install Cargo (and Rust) and set up your first crate.
+
+**[Cargo Guide](guide/index.html)**
+
+The guide will give you all you need to know about how to use Cargo to develop
+Rust packages.
+
+**[Cargo Reference](reference/index.html)**
+
+The reference covers the details of various areas of Cargo.
+
+**[Frequently Asked Questions](faq.html)**
+
+[rust]: https://www.rust-lang.org/
+[crates.io]: https://crates.io/
+[GitHub]: https://github.com/rust-lang/cargo/tree/master/src/doc/src
diff --git a/src/doc/src/reference/build-scripts.md b/src/doc/src/reference/build-scripts.md
new file mode 100644 (file)
index 0000000..e9727e1
--- /dev/null
@@ -0,0 +1,570 @@
+## Build Scripts
+
+Some packages need to compile third-party non-Rust code, for example C
+libraries. Other packages need to link to C libraries which can either be
+located on the system or possibly need to be built from source. Others still
+need facilities for functionality such as code generation before building (think
+parser generators).
+
+Cargo does not aim to replace other tools that are well-optimized for
+these tasks, but it does integrate with them with the `build` configuration
+option.
+
+```toml
+[package]
+# ...
+build = "build.rs"
+```
+
+The Rust file designated by the `build` command (relative to the package root)
+will be compiled and invoked before anything else is compiled in the package,
+allowing your Rust code to depend on the built or generated artifacts.
+By default Cargo looks up for `"build.rs"` file in a package root (even if you
+do not specify a value for `build`). Use `build = "custom_build_name.rs"` to specify
+a custom build name or `build = false` to disable automatic detection of the build script.
+
+Some example use cases of the build command are:
+
+* Building a bundled C library.
+* Finding a C library on the host system.
+* Generating a Rust module from a specification.
+* Performing any platform-specific configuration needed for the crate.
+
+Each of these use cases will be detailed in full below to give examples of how
+the build command works.
+
+### Inputs to the Build Script
+
+When the build script is run, there are a number of inputs to the build script,
+all passed in the form of [environment variables][env].
+
+In addition to environment variables, the build script’s current directory is
+the source directory of the build script’s package.
+
+[env]: reference/environment-variables.html
+
+### Outputs of the Build Script
+
+All the lines printed to stdout by a build script are written to a file like
+`target/debug/build/<pkg>/output` (the precise location may depend on your
+configuration). If you would like to see such output directly in your terminal,
+invoke cargo as 'very verbose' with the `-vv` flag. Note that if neither the
+build script nor package source files are modified, subsequent calls to
+cargo with `-vv` will **not** print output to the terminal because a
+new build is not executed. Run `cargo clean` before each cargo invocation
+if you want to ensure that output is always displayed on your terminal.
+Any line that starts with `cargo:` is interpreted directly by Cargo.
+This line must be of the form `cargo:key=value`, like the examples below:
+
+```
+# specially recognized by Cargo
+cargo:rustc-link-lib=static=foo
+cargo:rustc-link-search=native=/path/to/foo
+cargo:rustc-cfg=foo
+cargo:rustc-env=FOO=bar
+# arbitrary user-defined metadata
+cargo:root=/path/to/foo
+cargo:libdir=/path/to/foo/lib
+cargo:include=/path/to/foo/include
+```
+
+On the other hand, lines printed to stderr are written to a file like
+`target/debug/build/<pkg>/stderr` but are not interpreted by cargo.
+
+There are a few special keys that Cargo recognizes, some affecting how the
+crate is built:
+
+* `rustc-link-lib=[KIND=]NAME` indicates that the specified value is a library
+  name and should be passed to the compiler as a `-l` flag. The optional `KIND`
+  can be one of `static`, `dylib` (the default), or `framework`, see
+  `rustc --help` for more details.
+* `rustc-link-search=[KIND=]PATH` indicates the specified value is a library
+  search path and should be passed to the compiler as a `-L` flag. The optional
+  `KIND` can be one of `dependency`, `crate`, `native`, `framework` or `all`
+  (the default), see `rustc --help` for more details.
+* `rustc-flags=FLAGS` is a set of flags passed to the compiler, only `-l` and
+  `-L` flags are supported.
+* `rustc-cfg=FEATURE` indicates that the specified feature will be passed as a
+  `--cfg` flag to the compiler. This is often useful for performing compile-time
+  detection of various features.
+* `rustc-env=VAR=VALUE` indicates that the specified environment variable
+  will be added to the environment which the compiler is run within.
+  The value can be then retrieved by the `env!` macro in the compiled crate.
+  This is useful for embedding additional metadata in crate's code,
+  such as the hash of Git HEAD or the unique identifier of a continuous
+  integration server.
+* `rerun-if-changed=PATH` is a path to a file or directory which indicates that
+  the build script should be re-run if it changes (detected by a more-recent
+  last-modified timestamp on the file). Normally build scripts are re-run if
+  any file inside the crate root changes, but this can be used to scope changes
+  to just a small set of files. (If this path points to a directory the entire
+  directory will not be traversed for changes -- only changes to the timestamp
+  of the directory itself (which corresponds to some types of changes within the
+  directory, depending on platform) will trigger a rebuild. To request a re-run
+  on any changes within an entire directory, print a line for the directory and
+  another line for everything inside it, recursively.)
+  Note that if the build script itself (or one of its dependencies) changes,
+  then it's rebuilt and rerun unconditionally, so
+  `cargo:rerun-if-changed=build.rs` is almost always redundant (unless you
+  want to ignore changes in all other files except for `build.rs`).
+* `rerun-if-env-changed=VAR` is the name of an environment variable which
+  indicates that if the environment variable's value changes the build script
+  should be rerun. This basically behaves the same as `rerun-if-changed` except
+  that it works with environment variables instead. Note that the environment
+  variables here are intended for global environment variables like `CC` and
+  such, it's not necessary to use this for env vars like `TARGET` that Cargo
+  sets. Also note that if `rerun-if-env-changed` is printed out then Cargo will
+  *only* rerun the build script if those environment variables change or if
+  files printed out by `rerun-if-changed` change.
+
+* `warning=MESSAGE` is a message that will be printed to the main console after
+  a build script has finished running. Warnings are only shown for path
+  dependencies (that is, those you're working on locally), so for example
+  warnings printed out in crates.io crates are not emitted by default.
+
+Any other element is a user-defined metadata that will be passed to
+dependents. More information about this can be found in the [`links`][links]
+section.
+
+[links]: #the-links-manifest-key
+
+### Build Dependencies
+
+Build scripts are also allowed to have dependencies on other Cargo-based crates.
+Dependencies are declared through the `build-dependencies` section of the
+manifest.
+
+```toml
+[build-dependencies]
+foo = { git = "https://github.com/your-packages/foo" }
+```
+
+The build script **does not** have access to the dependencies listed in the
+`dependencies` or `dev-dependencies` section (they’re not built yet!). All build
+dependencies will also not be available to the package itself unless explicitly
+stated as so.
+
+### The `links` Manifest Key
+
+In addition to the manifest key `build`, Cargo also supports a `links` manifest
+key to declare the name of a native library that is being linked to:
+
+```toml
+[package]
+# ...
+links = "foo"
+build = "build.rs"
+```
+
+This manifest states that the package links to the `libfoo` native library, and
+it also has a build script for locating and/or building the library. Cargo
+requires that a `build` command is specified if a `links` entry is also
+specified.
+
+The purpose of this manifest key is to give Cargo an understanding about the set
+of native dependencies that a package has, as well as providing a principled
+system of passing metadata between package build scripts.
+
+Primarily, Cargo requires that there is at most one package per `links` value.
+In other words, it’s forbidden to have two packages link to the same native
+library. Note, however, that there are [conventions in place][star-sys] to
+alleviate this.
+
+[star-sys]: #a-sys-packages
+
+As mentioned above in the output format, each build script can generate an
+arbitrary set of metadata in the form of key-value pairs. This metadata is
+passed to the build scripts of **dependent** packages. For example, if `libbar`
+depends on `libfoo`, then if `libfoo` generates `key=value` as part of its
+metadata, then the build script of `libbar` will have the environment variables
+`DEP_FOO_KEY=value`.
+
+Note that metadata is only passed to immediate dependents, not transitive
+dependents. The motivation for this metadata passing is outlined in the linking
+to system libraries case study below.
+
+### Overriding Build Scripts
+
+If a manifest contains a `links` key, then Cargo supports overriding the build
+script specified with a custom library. The purpose of this functionality is to
+prevent running the build script in question altogether and instead supply the
+metadata ahead of time.
+
+To override a build script, place the following configuration in any acceptable
+Cargo [configuration location](reference/config.html).
+
+```toml
+[target.x86_64-unknown-linux-gnu.foo]
+rustc-link-search = ["/path/to/foo"]
+rustc-link-lib = ["foo"]
+root = "/path/to/foo"
+key = "value"
+```
+
+This section states that for the target `x86_64-unknown-linux-gnu` the library
+named `foo` has the metadata specified. This metadata is the same as the
+metadata generated as if the build script had run, providing a number of
+key/value pairs where the `rustc-flags`, `rustc-link-search`, and
+`rustc-link-lib` keys are slightly special.
+
+With this configuration, if a package declares that it links to `foo` then the
+build script will **not** be compiled or run, and the metadata specified will
+instead be used.
+
+### Case study: Code generation
+
+Some Cargo packages need to have code generated just before they are compiled
+for various reasons. Here we’ll walk through a simple example which generates a
+library call as part of the build script.
+
+First, let’s take a look at the directory structure of this package:
+
+```
+.
+├── Cargo.toml
+├── build.rs
+└── src
+    └── main.rs
+
+1 directory, 3 files
+```
+
+Here we can see that we have a `build.rs` build script and our binary in
+`main.rs`. Next, let’s take a look at the manifest:
+
+```toml
+# Cargo.toml
+
+[package]
+name = "hello-from-generated-code"
+version = "0.1.0"
+authors = ["you@example.com"]
+build = "build.rs"
+```
+
+Here we can see we’ve got a build script specified which we’ll use to generate
+some code. Let’s see what’s inside the build script:
+
+```rust,no_run
+// build.rs
+
+use std::env;
+use std::fs::File;
+use std::io::Write;
+use std::path::Path;
+
+fn main() {
+    let out_dir = env::var("OUT_DIR").unwrap();
+    let dest_path = Path::new(&out_dir).join("hello.rs");
+    let mut f = File::create(&dest_path).unwrap();
+
+    f.write_all(b"
+        pub fn message() -> &'static str {
+            \"Hello, World!\"
+        }
+    ").unwrap();
+}
+```
+
+There’s a couple of points of note here:
+
+* The script uses the `OUT_DIR` environment variable to discover where the
+  output files should be located. It can use the process’ current working
+  directory to find where the input files should be located, but in this case we
+  don’t have any input files.
+* In general, build scripts should not modify any files outside of `OUT_DIR`.
+  It may seem fine on the first blush, but it does cause problems when you use
+  such crate as a dependency, because there's an *implicit* invariant that
+  sources in `.cargo/registry` should be immutable. `cargo` won't allow such
+  scripts when packaging.
+* This script is relatively simple as it just writes out a small generated file.
+  One could imagine that other more fanciful operations could take place such as
+  generating a Rust module from a C header file or another language definition,
+  for example.
+
+Next, let’s peek at the library itself:
+
+```rust,ignore
+// src/main.rs
+
+include!(concat!(env!("OUT_DIR"), "/hello.rs"));
+
+fn main() {
+    println!("{}", message());
+}
+```
+
+This is where the real magic happens. The library is using the rustc-defined
+`include!` macro in combination with the `concat!` and `env!` macros to include
+the generated file (`hello.rs`) into the crate’s compilation.
+
+Using the structure shown here, crates can include any number of generated files
+from the build script itself.
+
+### Case study: Building some native code
+
+Sometimes it’s necessary to build some native C or C++ code as part of a
+package. This is another excellent use case of leveraging the build script to
+build a native library before the Rust crate itself. As an example, we’ll create
+a Rust library which calls into C to print “Hello, World!”.
+
+Like above, let’s first take a look at the package layout:
+
+```
+.
+├── Cargo.toml
+├── build.rs
+└── src
+    ├── hello.c
+    └── main.rs
+
+1 directory, 4 files
+```
+
+Pretty similar to before! Next, the manifest:
+
+```toml
+# Cargo.toml
+
+[package]
+name = "hello-world-from-c"
+version = "0.1.0"
+authors = ["you@example.com"]
+build = "build.rs"
+```
+
+For now we’re not going to use any build dependencies, so let’s take a look at
+the build script now:
+
+```rust,no_run
+// build.rs
+
+use std::process::Command;
+use std::env;
+use std::path::Path;
+
+fn main() {
+    let out_dir = env::var("OUT_DIR").unwrap();
+
+    // note that there are a number of downsides to this approach, the comments
+    // below detail how to improve the portability of these commands.
+    Command::new("gcc").args(&["src/hello.c", "-c", "-fPIC", "-o"])
+                       .arg(&format!("{}/hello.o", out_dir))
+                       .status().unwrap();
+    Command::new("ar").args(&["crus", "libhello.a", "hello.o"])
+                      .current_dir(&Path::new(&out_dir))
+                      .status().unwrap();
+
+    println!("cargo:rustc-link-search=native={}", out_dir);
+    println!("cargo:rustc-link-lib=static=hello");
+}
+```
+
+This build script starts out by compiling our C file into an object file (by
+invoking `gcc`) and then converting this object file into a static library (by
+invoking `ar`). The final step is feedback to Cargo itself to say that our
+output was in `out_dir` and the compiler should link the crate to `libhello.a`
+statically via the `-l static=hello` flag.
+
+Note that there are a number of drawbacks to this hardcoded approach:
+
+* The `gcc` command itself is not portable across platforms. For example it’s
+  unlikely that Windows platforms have `gcc`, and not even all Unix platforms
+  may have `gcc`. The `ar` command is also in a similar situation.
+* These commands do not take cross-compilation into account. If we’re cross
+  compiling for a platform such as Android it’s unlikely that `gcc` will produce
+  an ARM executable.
+
+Not to fear, though, this is where a `build-dependencies` entry would help! The
+Cargo ecosystem has a number of packages to make this sort of task much easier,
+portable, and standardized. For example, the build script could be written as:
+
+```rust,ignore
+// build.rs
+
+// Bring in a dependency on an externally maintained `cc` package which manages
+// invoking the C compiler.
+extern crate cc;
+
+fn main() {
+    cc::Build::new()
+        .file("src/hello.c")
+        .compile("hello");
+}
+```
+
+Add a build time dependency on the `cc` crate with the following addition to
+your `Cargo.toml`:
+
+```toml
+[build-dependencies]
+cc = "1.0"
+```
+
+The [`cc` crate](https://crates.io/crates/cc) abstracts a range of build
+script requirements for C code:
+
+* It invokes the appropriate compiler (MSVC for windows, `gcc` for MinGW, `cc`
+  for Unix platforms, etc.).
+* It takes the `TARGET` variable into account by passing appropriate flags to
+  the compiler being used.
+* Other environment variables, such as `OPT_LEVEL`, `DEBUG`, etc., are all
+  handled automatically.
+* The stdout output and `OUT_DIR` locations are also handled by the `cc`
+  library.
+
+Here we can start to see some of the major benefits of farming as much
+functionality as possible out to common build dependencies rather than
+duplicating logic across all build scripts!
+
+Back to the case study though, let’s take a quick look at the contents of the
+`src` directory:
+
+```c
+// src/hello.c
+
+#include <stdio.h>
+
+void hello() {
+    printf("Hello, World!\n");
+}
+```
+
+```rust,ignore
+// src/main.rs
+
+// Note the lack of the `#[link]` attribute. We’re delegating the responsibility
+// of selecting what to link to over to the build script rather than hardcoding
+// it in the source file.
+extern { fn hello(); }
+
+fn main() {
+    unsafe { hello(); }
+}
+```
+
+And there we go! This should complete our example of building some C code from a
+Cargo package using the build script itself. This also shows why using a build
+dependency can be crucial in many situations and even much more concise!
+
+We’ve also seen a brief example of how a build script can use a crate as a
+dependency purely for the build process and not for the crate itself at runtime.
+
+### Case study: Linking to system libraries
+
+The final case study here will be investigating how a Cargo library links to a
+system library and how the build script is leveraged to support this use case.
+
+Quite frequently a Rust crate wants to link to a native library often provided
+on the system to bind its functionality or just use it as part of an
+implementation detail. This is quite a nuanced problem when it comes to
+performing this in a platform-agnostic fashion, and the purpose of a build
+script is again to farm out as much of this as possible to make this as easy as
+possible for consumers.
+
+As an example to follow, let’s take a look at one of [Cargo’s own
+dependencies][git2-rs], [libgit2][libgit2]. The C library has a number of
+constraints:
+
+[git2-rs]: https://github.com/alexcrichton/git2-rs/tree/master/libgit2-sys
+[libgit2]: https://github.com/libgit2/libgit2
+
+* It has an optional dependency on OpenSSL on Unix to implement the https
+  transport.
+* It has an optional dependency on libssh2 on all platforms to implement the ssh
+  transport.
+* It is often not installed on all systems by default.
+* It can be built from source using `cmake`.
+
+To visualize what’s going on here, let’s take a look at the manifest for the
+relevant Cargo package that links to the native C library.
+
+```toml
+[package]
+name = "libgit2-sys"
+version = "0.1.0"
+authors = ["..."]
+links = "git2"
+build = "build.rs"
+
+[dependencies]
+libssh2-sys = { git = "https://github.com/alexcrichton/ssh2-rs" }
+
+[target.'cfg(unix)'.dependencies]
+openssl-sys = { git = "https://github.com/alexcrichton/openssl-sys" }
+
+# ...
+```
+
+As the above manifests show, we’ve got a `build` script specified, but it’s
+worth noting that this example has a `links` entry which indicates that the
+crate (`libgit2-sys`) links to the `git2` native library.
+
+Here we also see that we chose to have the Rust crate have an unconditional
+dependency on `libssh2` via the `libssh2-sys` crate, as well as a
+platform-specific dependency on `openssl-sys` for \*nix (other variants elided
+for now). It may seem a little counterintuitive to express *C dependencies* in
+the *Cargo manifest*, but this is actually using one of Cargo’s conventions in
+this space.
+
+### `*-sys` Packages
+
+To alleviate linking to system libraries, crates.io has a *convention* of package
+naming and functionality. Any package named `foo-sys` should provide two major
+pieces of functionality:
+
+* The library crate should link to the native library `libfoo`. This will often
+  probe the current system for `libfoo` before resorting to building from
+  source.
+* The library crate should provide **declarations** for functions in `libfoo`,
+  but **not** bindings or higher-level abstractions.
+
+The set of `*-sys` packages provides a common set of dependencies for linking
+to native libraries. There are a number of benefits earned from having this
+convention of native-library-related packages:
+
+* Common dependencies on `foo-sys` alleviates the above rule about one package
+  per value of `links`.
+* A common dependency allows centralizing logic on discovering `libfoo` itself
+  (or building it from source).
+* These dependencies are easily overridable.
+
+### Building libgit2
+
+Now that we’ve got libgit2’s dependencies sorted out, we need to actually write
+the build script. We’re not going to look at specific snippets of code here and
+instead only take a look at the high-level details of the build script of
+`libgit2-sys`. This is not recommending all packages follow this strategy, but
+rather just outlining one specific strategy.
+
+The first step of the build script should do is to query whether libgit2 is
+already installed on the host system. To do this we’ll leverage the preexisting
+tool `pkg-config` (when its available). We’ll also use a `build-dependencies`
+section to refactor out all the `pkg-config` related code (or someone’s already
+done that!).
+
+If `pkg-config` failed to find libgit2, or if `pkg-config` just wasn’t
+installed, the next step is to build libgit2 from bundled source code
+(distributed as part of `libgit2-sys` itself). There are a few nuances when
+doing so that we need to take into account, however:
+
+* The build system of libgit2, `cmake`, needs to be able to find libgit2’s
+  optional dependency of libssh2. We’re sure we’ve already built it (it’s a
+  Cargo dependency), we just need to communicate this information. To do this
+  we leverage the metadata format to communicate information between build
+  scripts. In this example the libssh2 package printed out `cargo:root=...` to
+  tell us where libssh2 is installed at, and we can then pass this along to
+  cmake with the `CMAKE_PREFIX_PATH` environment variable.
+
+* We’ll need to handle some `CFLAGS` values when compiling C code (and tell
+  `cmake` about this). Some flags we may want to pass are `-m64` for 64-bit
+  code, `-m32` for 32-bit code, or `-fPIC` for 64-bit code as well.
+
+* Finally, we’ll invoke `cmake` to place all output into the `OUT_DIR`
+  environment variable, and then we’ll print the necessary metadata to instruct
+  rustc how to link to libgit2.
+
+Most of the functionality of this build script is easily refactorable into
+common dependencies, so our build script isn’t quite as intimidating as this
+descriptions! In reality it’s expected that build scripts are quite succinct by
+farming logic such as above to build dependencies.
diff --git a/src/doc/src/reference/config.md b/src/doc/src/reference/config.md
new file mode 100644 (file)
index 0000000..8a22e95
--- /dev/null
@@ -0,0 +1,162 @@
+## Configuration
+
+This document will explain how Cargo’s configuration system works, as well as
+available keys or configuration.  For configuration of a package through its
+manifest, see the [manifest format](reference/manifest.html).
+
+### Hierarchical structure
+
+
+Cargo allows local configuration for a particular package as well as global
+configuration, like git. Cargo extends this to a hierarchical strategy.
+If, for example, Cargo were invoked in `/projects/foo/bar/baz`, then the
+following configuration files would be probed for and unified in this order:
+
+* `/projects/foo/bar/baz/.cargo/config`
+* `/projects/foo/bar/.cargo/config`
+* `/projects/foo/.cargo/config`
+* `/projects/.cargo/config`
+* `/.cargo/config`
+* `$HOME/.cargo/config`
+
+With this structure, you can specify configuration per-package, and even
+possibly check it into version control. You can also specify personal defaults
+with a configuration file in your home directory.
+
+### Configuration format
+
+All configuration is currently in the [TOML format][toml] (like the manifest),
+with simple key-value pairs inside of sections (tables) which all get merged
+together.
+
+[toml]: https://github.com/toml-lang/toml
+
+### Configuration keys
+
+All of the following keys are optional, and their defaults are listed as their
+value unless otherwise noted.
+
+Key values that specify a tool may be given as an absolute path, a relative path
+or as a pathless tool name. Absolute paths and pathless tool names are used as
+given. Relative paths are resolved relative to the parent directory of the
+`.cargo` directory of the config file that the value resides within.
+
+```toml
+# An array of paths to local repositories which are to be used as overrides for
+# dependencies. For more information see the Specifying Dependencies guide.
+paths = ["/path/to/override"]
+
+[cargo-new]
+# This is your name/email to place in the `authors` section of a new Cargo.toml
+# that is generated. If not present, then `git` will be probed, and if that is
+# not present then `$USER` and `$EMAIL` will be used.
+name = "..."
+email = "..."
+
+# By default `cargo new` will initialize a new Git repository. This key can be
+# set to `hg` to create a Mercurial repository, or `none` to disable this
+# behavior.
+vcs = "none"
+
+# For the following sections, $triple refers to any valid target triple, not the
+# literal string "$triple", and it will apply whenever that target triple is
+# being compiled to. 'cfg(...)' refers to the Rust-like `#[cfg]` syntax for
+# conditional compilation.
+[target.$triple]
+# This is the linker which is passed to rustc (via `-C linker=`) when the `$triple`
+# is being compiled for. By default this flag is not passed to the compiler.
+linker = ".."
+# Same but for the library archiver which is passed to rustc via `-C ar=`.
+ar = ".."
+# If a runner is provided, compiled targets for the `$triple` will be executed
+# by invoking the specified runner executable with actual target as first argument.
+# This applies to `cargo run`, `cargo test` and `cargo bench` commands.
+# By default compiled targets are executed directly.
+runner = ".."
+# custom flags to pass to all compiler invocations that target $triple
+# this value overrides build.rustflags when both are present
+rustflags = ["..", ".."]
+
+[target.'cfg(...)']
+# Similar for the $triple configuration, but using the `cfg` syntax.
+# If several `cfg` and $triple targets are candidates, then the rustflags
+# are concatenated. The `cfg` syntax only applies to rustflags, and not to
+# linker.
+rustflags = ["..", ".."]
+# Similar for the $triple configuration, but using the `cfg` syntax.
+# If one or more `cfg`s, and a $triple target are candidates, then the $triple
+# will be used
+# If several `cfg` are candidates, then the build will error
+runner = ".."
+
+# Configuration keys related to the registry
+[registry]
+index = "..."   # URL of the registry index (defaults to the central repository)
+token = "..."   # Access token (found on the central repo’s website)
+default = "..." # Default alternative registry to use (can be overriden with --registry)
+
+[http]
+proxy = "host:port" # HTTP proxy to use for HTTP requests (defaults to none)
+                    # in libcurl format, e.g. "socks5h://host:port"
+timeout = 30        # Timeout for each HTTP request, in seconds
+cainfo = "cert.pem" # Path to Certificate Authority (CA) bundle (optional)
+check-revoke = true # Indicates whether SSL certs are checked for revocation
+low-speed-limit = 5 # Lower threshold for bytes/sec (10 = default, 0 = disabled)
+multiplexing = false  # whether or not to use HTTP/2 multiplexing where possible
+
+# This setting can be used to help debug what's going on with HTTP requests made
+# by Cargo. When set to `true` then Cargo's normal debug logging will be filled
+# in with HTTP information, which you can extract with
+# `RUST_LOG=cargo::ops::registry=debug` (and `trace` may print more).
+#
+# Be wary when posting these logs elsewhere though, it may be the case that a
+# header has an authentication token in it you don't want leaked! Be sure to
+# briefly review logs before posting them.
+debug = false
+
+[build]
+jobs = 1                  # number of parallel jobs, defaults to # of CPUs
+rustc = "rustc"           # the rust compiler tool
+rustdoc = "rustdoc"       # the doc generator tool
+target = "triple"         # build for the target triple (ignored by `cargo install`)
+target-dir = "target"     # path of where to place all generated artifacts
+rustflags = ["..", ".."]  # custom flags to pass to all compiler invocations
+incremental = true        # whether or not to enable incremental compilation
+dep-info-basedir = ".."   # full path for the base directory for targets in depfiles
+
+[term]
+verbose = false        # whether cargo provides verbose output
+color = 'auto'         # whether cargo colorizes output
+
+# Network configuration
+[net]
+retry = 2 # number of times a network call will automatically retried
+git-fetch-with-cli = false  # if `true` we'll use `git`-the-CLI to fetch git repos
+
+# Alias cargo commands. The first 3 aliases are built in. If your
+# command requires grouped whitespace use the list format.
+[alias]
+b = "build"
+t = "test"
+r = "run"
+rr = "run --release"
+space_example = ["run", "--release", "--", "\"command list\""]
+```
+
+### Environment variables
+
+Cargo can also be configured through environment variables in addition to the
+TOML syntax above. For each configuration key above of the form `foo.bar` the
+environment variable `CARGO_FOO_BAR` can also be used to define the value. For
+example the `build.jobs` key can also be defined by `CARGO_BUILD_JOBS`.
+
+Environment variables will take precedent over TOML configuration, and currently
+only integer, boolean, and string keys are supported to be defined by
+environment variables. This means that [source replacement][source], which is expressed by
+tables, cannot be configured through environment variables.
+
+In addition to the system above, Cargo recognizes a few other specific
+[environment variables][env].
+
+[env]: reference/environment-variables.html
+[source]: reference/source-replacement.html
diff --git a/src/doc/src/reference/environment-variables.md b/src/doc/src/reference/environment-variables.md
new file mode 100644 (file)
index 0000000..4062b9f
--- /dev/null
@@ -0,0 +1,144 @@
+## Environment Variables
+
+Cargo sets and reads a number of environment variables which your code can detect
+or override. Here is a list of the variables Cargo sets, organized by when it interacts
+with them:
+
+### Environment variables Cargo reads
+
+You can override these environment variables to change Cargo's behavior on your
+system:
+
+* `CARGO_HOME` — Cargo maintains a local cache of the registry index and of git
+  checkouts of crates.  By default these are stored under `$HOME/.cargo`, but
+  this variable overrides the location of this directory. Once a crate is cached
+  it is not removed by the clean command.
+* `CARGO_TARGET_DIR` — Location of where to place all generated artifacts,
+  relative to the current working directory.
+* `RUSTC` — Instead of running `rustc`, Cargo will execute this specified
+  compiler instead.
+* `RUSTC_WRAPPER` — Instead of simply running `rustc`, Cargo will execute this
+  specified wrapper instead, passing as its commandline arguments the rustc
+  invocation, with the first argument being rustc.
+* `RUSTDOC` — Instead of running `rustdoc`, Cargo will execute this specified
+  `rustdoc` instance instead.
+* `RUSTDOCFLAGS` — A space-separated list of custom flags to pass to all `rustdoc`
+  invocations that Cargo performs. In contrast with `cargo rustdoc`, this is
+  useful for passing a flag to *all* `rustdoc` instances.
+* `RUSTFLAGS` — A space-separated list of custom flags to pass to all compiler
+  invocations that Cargo performs. In contrast with `cargo rustc`, this is
+  useful for passing a flag to *all* compiler instances.
+* `CARGO_INCREMENTAL` — If this is set to 1 then Cargo will force incremental
+  compilation to be enabled for the current compilation, and when set to 0 it
+  will force disabling it. If this env var isn't present then cargo's defaults
+  will otherwise be used.
+* `CARGO_CACHE_RUSTC_INFO` — If this is set to 0 then Cargo will not try to cache
+  compiler version information.
+
+Note that Cargo will also read environment variables for `.cargo/config`
+configuration values, as described in [that documentation][config-env]
+
+[config-env]: reference/config.html#environment-variables
+
+### Environment variables Cargo sets for crates
+
+Cargo exposes these environment variables to your crate when it is compiled.
+Note that this applies for test binaries as well.
+To get the value of any of these variables in a Rust program, do this:
+
+```rust
+let version = env!("CARGO_PKG_VERSION");
+```
+
+`version` will now contain the value of `CARGO_PKG_VERSION`.
+
+* `CARGO` - Path to the `cargo` binary performing the build.
+* `CARGO_MANIFEST_DIR` - The directory containing the manifest of your package.
+* `CARGO_PKG_VERSION` - The full version of your package.
+* `CARGO_PKG_VERSION_MAJOR` - The major version of your package.
+* `CARGO_PKG_VERSION_MINOR` - The minor version of your package.
+* `CARGO_PKG_VERSION_PATCH` - The patch version of your package.
+* `CARGO_PKG_VERSION_PRE` - The pre-release version of your package.
+* `CARGO_PKG_AUTHORS` - Colon separated list of authors from the manifest of your package.
+* `CARGO_PKG_NAME` - The name of your package.
+* `CARGO_PKG_DESCRIPTION` - The description from the manifest of your package.
+* `CARGO_PKG_HOMEPAGE` - The home page from the manifest of your package.
+* `CARGO_PKG_REPOSITORY` - The repository from the manifest of your package.
+* `OUT_DIR` - If the package has a build script, this is set to the folder where the build
+              script should place its output.  See below for more information.
+
+### Environment variables Cargo sets for build scripts
+
+Cargo sets several environment variables when build scripts are run. Because these variables
+are not yet set when the build script is compiled, the above example using `env!` won't work
+and instead you'll need to retrieve the values when the build script is run:
+
+```rust
+use std::env;
+let out_dir = env::var("OUT_DIR").unwrap();
+```
+
+`out_dir` will now contain the value of `OUT_DIR`.
+
+* `CARGO` - Path to the `cargo` binary performing the build.
+* `CARGO_MANIFEST_DIR` - The directory containing the manifest for the package
+                         being built (the package containing the build
+                         script). Also note that this is the value of the
+                         current working directory of the build script when it
+                         starts.
+* `CARGO_MANIFEST_LINKS` - the manifest `links` value.
+* `CARGO_FEATURE_<name>` - For each activated feature of the package being
+                           built, this environment variable will be present
+                           where `<name>` is the name of the feature uppercased
+                           and having `-` translated to `_`.
+* `CARGO_CFG_<cfg>` - For each [configuration option][configuration] of the
+                      package being built, this environment variable will
+                      contain the value of the configuration, where `<cfg>` is
+                      the name of the configuration uppercased and having `-`
+                      translated to `_`.
+                      Boolean configurations are present if they are set, and
+                      not present otherwise.
+                      Configurations with multiple values are joined to a
+                      single variable with the values delimited by `,`.
+* `OUT_DIR` - the folder in which all output should be placed. This folder is
+              inside the build directory for the package being built, and it is
+              unique for the package in question.
+* `TARGET` - the target triple that is being compiled for. Native code should be
+             compiled for this triple. Some more information about target
+             triples can be found in [clang’s own documentation][clang].
+* `HOST` - the host triple of the rust compiler.
+* `NUM_JOBS` - the parallelism specified as the top-level parallelism. This can
+               be useful to pass a `-j` parameter to a system like `make`. Note
+               that care should be taken when interpreting this environment
+               variable. For historical purposes this is still provided but
+               recent versions of Cargo, for example, do not need to run `make
+               -j` as it'll automatically happen. Cargo implements its own
+               [jobserver] and will allow build scripts to inherit this
+               information, so programs compatible with GNU make jobservers will
+               already have appropriately configured parallelism.
+* `OPT_LEVEL`, `DEBUG` - values of the corresponding variables for the
+                         profile currently being built.
+* `PROFILE` - `release` for release builds, `debug` for other builds.
+* `DEP_<name>_<key>` - For more information about this set of environment
+                       variables, see build script documentation about [`links`][links].
+* `RUSTC`, `RUSTDOC` - the compiler and documentation generator that Cargo has
+                       resolved to use, passed to the build script so it might
+                       use it as well.
+* `RUSTC_LINKER` - The path to the linker binary that Cargo has resolved to use
+                   for the current target, if specified. The linker can be
+                   changed by editing `.cargo/config`; see the documentation
+                   about [cargo configuration][cargo-config] for more
+                   information.
+
+[links]: reference/build-scripts.html#the-links-manifest-key
+[configuration]: https://doc.rust-lang.org/reference/attributes.html#conditional-compilation
+[clang]: http://clang.llvm.org/docs/CrossCompilation.html#target-triple
+[jobserver]: https://www.gnu.org/software/make/manual/html_node/Job-Slots.html
+[cargo-config]: reference/config.html
+
+### Environment variables Cargo sets for 3rd party subcommands
+
+Cargo exposes this environment variable to 3rd party subcommands
+(ie. programs named `cargo-foobar` placed in `$PATH`):
+
+* `CARGO` - Path to the `cargo` binary performing the build.
diff --git a/src/doc/src/reference/external-tools.md b/src/doc/src/reference/external-tools.md
new file mode 100644 (file)
index 0000000..ea055fc
--- /dev/null
@@ -0,0 +1,114 @@
+## External tools
+
+One of the goals of Cargo is simple integration with third-party tools, like
+IDEs and other build systems. To make integration easier, Cargo has several
+facilities:
+
+* a `cargo metadata` command, which outputs package structure and dependencies
+  information in JSON,
+
+* a `--message-format` flag, which outputs information about a particular build,
+  and
+
+* support for custom subcommands.
+
+
+### Information about package structure
+
+You can use `cargo metadata` command to get information about package structure
+and dependencies. The output of the command looks like this:
+
+```text
+{
+  // Integer version number of the format.
+  "version": integer,
+
+  // List of packages for this workspace, including dependencies.
+  "packages": [
+    {
+      // Opaque package identifier.
+      "id": PackageId,
+
+      "name": string,
+
+      "version": string,
+
+      "source": SourceId,
+
+      // A list of declared dependencies, see `resolve` field for actual dependencies.
+      "dependencies": [ Dependency ],
+
+      "targets: [ Target ],
+
+      // Path to Cargo.toml
+      "manifest_path": string,
+    }
+  ],
+
+  "workspace_members": [ PackageId ],
+
+  // Dependencies graph.
+  "resolve": {
+     "nodes": [
+       {
+         "id": PackageId,
+         "dependencies": [ PackageId ]
+       }
+     ]
+  }
+}
+```
+
+The format is stable and versioned. When calling `cargo metadata`, you should
+pass `--format-version` flag explicitly to avoid forward incompatibility
+hazard.
+
+If you are using Rust, there is [cargo_metadata] crate.
+
+[cargo_metadata]: https://crates.io/crates/cargo_metadata
+
+
+### Information about build
+
+When passing `--message-format=json`, Cargo will output the following
+information during the build:
+
+* compiler errors and warnings,
+
+* produced artifacts,
+
+* results of the build scripts (for example, native dependencies).
+
+The output goes to stdout in the JSON object per line format. The `reason` field
+distinguishes different kinds of messages.
+
+Information about dependencies in the Makefile-compatible format is stored in
+the `.d` files alongside the artifacts.
+
+
+### Custom subcommands
+
+Cargo is designed to be extensible with new subcommands without having to modify
+Cargo itself. This is achieved by translating a cargo invocation of the form
+cargo `(?<command>[^ ]+)` into an invocation of an external tool
+`cargo-${command}`. The external tool must be present in one of the user's
+`$PATH` directories.
+
+When Cargo invokes a custom subcommand, the first argument to the subcommand
+will be the filename of the custom subcommand, as usual. The second argument
+will be the subcommand name itself. For example, the second argument would be
+`${command}` when invoking `cargo-${command}`. Any additional arguments on the
+command line will be forwarded unchanged.
+
+Cargo can also display the help output of a custom subcommand with `cargo help
+${command}`. Cargo assumes that the subcommand will print a help message if its
+third argument is `--help`. So, `cargo help ${command}` would invoke
+`cargo-${command} ${command} --help`.
+
+Custom subcommands may use the `CARGO` environment variable to call back to
+Cargo. Alternatively, it can link to `cargo` crate as a library, but this
+approach has drawbacks:
+
+* Cargo as a library is unstable: the  API may change without deprecation
+
+* versions of the linked Cargo library may be different from the Cargo binary
diff --git a/src/doc/src/reference/index.md b/src/doc/src/reference/index.md
new file mode 100644 (file)
index 0000000..c4c46b7
--- /dev/null
@@ -0,0 +1,14 @@
+## Cargo Reference
+
+The reference covers the details of various areas of Cargo.
+
+* [Specifying Dependencies](reference/specifying-dependencies.html)
+* [The Manifest Format](reference/manifest.html)
+* [Configuration](reference/config.html)
+* [Environment Variables](reference/environment-variables.html)
+* [Build Scripts](reference/build-scripts.html)
+* [Publishing on crates.io](reference/publishing.html)
+* [Package ID Specifications](reference/pkgid-spec.html)
+* [Source Replacement](reference/source-replacement.html)
+* [External Tools](reference/external-tools.html)
+* [Unstable Features](reference/unstable.html)
diff --git a/src/doc/src/reference/manifest.md b/src/doc/src/reference/manifest.md
new file mode 100644 (file)
index 0000000..8bc522d
--- /dev/null
@@ -0,0 +1,855 @@
+## The Manifest Format
+
+The `Cargo.toml` file for each package is called its *manifest*. Every manifest
+file consists of one or more sections.
+
+### The `[package]` section
+
+The first section in a `Cargo.toml` is `[package]`.
+
+```toml
+[package]
+name = "hello_world" # the name of the package
+version = "0.1.0"    # the current version, obeying semver
+authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
+```
+
+All three of these fields are mandatory.
+
+#### The `version` field
+
+Cargo bakes in the concept of [Semantic
+Versioning](http://semver.org/), so make sure you follow some basic rules:
+
+* Before you reach 1.0.0, anything goes, but if you make breaking changes,
+  increment the minor version. In Rust, breaking changes include adding fields to
+  structs or variants to enums.
+* After 1.0.0, only make breaking changes when you increment the major version.
+  Don’t break the build.
+* After 1.0.0, don’t add any new public API (no new `pub` anything) in patch-level
+  versions. Always increment the minor version if you add any new `pub` structs,
+  traits, fields, types, functions, methods or anything else.
+* Use version numbers with three numeric parts such as 1.0.0 rather than 1.0.
+
+#### The `edition` field (optional)
+
+You can opt in to a specific Rust Edition for your package with the
+`edition` key in `Cargo.toml`.  If you don't specify the edition, it will
+default to 2015.
+
+```toml
+[package]
+# ...
+edition = '2018'
+```
+
+The `edition` key affects which edition your package is compiled with. Cargo
+will always generate packages via `cargo new` with the `edition` key set to the
+latest edition. Setting the `edition` key in `[package]` will affect all
+targets/crates in the package, including test suites, benchmarks, binaries,
+examples, etc.
+
+#### The `build` field (optional)
+
+This field specifies a file in the package root which is a [build script][1] for
+building native code. More information can be found in the build script
+[guide][1].
+
+[1]: reference/build-scripts.html
+
+```toml
+[package]
+# ...
+build = "build.rs"
+```
+
+#### The `links` field (optional)
+
+This field specifies the name of a native library that is being linked to.
+More information can be found in the [`links`][links] section of the build
+script guide.
+
+[links]: reference/build-scripts.html#the-links-manifest-key
+
+```toml
+[package]
+# ...
+links = "foo"
+build = "build.rs"
+```
+
+#### The `documentation` field (optional)
+
+This field specifies a URL to a website hosting the crate's documentation.
+If no URL is specified in the manifest file, [crates.io][cratesio] will
+automatically link your crate to the corresponding [docs.rs][docsrs] page.
+
+Documentation links from specific hosts are blacklisted. Hosts are added
+to the blacklist if they are known to not be hosting documentation and are
+possibly of malicious intent e.g. ad tracking networks. URLs from the
+following hosts are blacklisted:
+
+* rust-ci.org
+
+Documentation URLs from blacklisted hosts will not appear on crates.io, and
+may be replaced by docs.rs links.
+
+[docsrs]: https://docs.rs/
+[cratesio]: https://crates.io/
+
+#### The `exclude` and `include` fields (optional)
+
+You can explicitly specify to Cargo that a set of [globs][globs] should be
+ignored or included for the purposes of packaging and rebuilding a package. The
+globs specified in the `exclude` field identify a set of files that are not
+included when a package is published as well as ignored for the purposes of
+detecting when to rebuild a package, and the globs in `include` specify files
+that are explicitly included.
+
+If a VCS is being used for a package, the `exclude` field will be seeded with
+the VCS’ ignore settings (`.gitignore` for git for example).
+
+```toml
+[package]
+# ...
+exclude = ["build/**/*.o", "doc/**/*.html"]
+```
+
+```toml
+[package]
+# ...
+include = ["src/**/*", "Cargo.toml"]
+```
+
+The options are mutually exclusive: setting `include` will override an
+`exclude`. Note that `include` must be an exhaustive list of files as otherwise
+necessary source files may not be included.
+
+[globs]: http://doc.rust-lang.org/glob/glob/struct.Pattern.html
+
+#### Migrating to `gitignore`-like pattern matching
+
+The current interpretation of these configs is based on UNIX Globs, as
+implemented in the [`glob` crate](https://crates.io/crates/glob). We want
+Cargo's `include` and `exclude` configs to work as similar to `gitignore` as
+possible. [The `gitignore` specification](https://git-scm.com/docs/gitignore) is
+also based on Globs, but has a bunch of additional features that enable easier
+pattern writing and more control. Therefore, we are migrating the interpretation
+for the rules of these configs to use the [`ignore`
+crate](https://crates.io/crates/ignore), and treat them each rule as a single
+line in a `gitignore` file. See [the tracking
+issue](https://github.com/rust-lang/cargo/issues/4268) for more details on the
+migration.
+
+#### The `publish`  field (optional)
+
+The `publish` field can be used to prevent a package from being published to a
+package registry (like *crates.io*) by mistake.
+
+```toml
+[package]
+# ...
+publish = false
+```
+
+#### The `workspace`  field (optional)
+
+The `workspace` field can be used to configure the workspace that this package
+will be a member of. If not specified this will be inferred as the first
+Cargo.toml with `[workspace]` upwards in the filesystem.
+
+```toml
+[package]
+# ...
+workspace = "path/to/workspace/root"
+```
+
+For more information, see the documentation for the workspace table below.
+
+#### Package metadata
+
+There are a number of optional metadata fields also accepted under the
+`[package]` section:
+
+```toml
+[package]
+# ...
+
+# A short blurb about the package. This is not rendered in any format when
+# uploaded to crates.io (aka this is not markdown).
+description = "..."
+
+# These URLs point to more information about the package. These are
+# intended to be webviews of the relevant data, not necessarily compatible
+# with VCS tools and the like.
+documentation = "..."
+homepage = "..."
+repository = "..."
+
+# This points to a file under the package root (relative to this `Cargo.toml`).
+# The contents of this file are stored and indexed in the registry.
+# crates.io will render this file and place the result on the crate's page.
+readme = "..."
+
+# This is a list of up to five keywords that describe this crate. Keywords
+# are searchable on crates.io, and you may choose any words that would
+# help someone find this crate.
+keywords = ["...", "..."]
+
+# This is a list of up to five categories where this crate would fit.
+# Categories are a fixed list available at crates.io/category_slugs, and
+# they must match exactly.
+categories = ["...", "..."]
+
+# This is an SPDX 2.1 license expression for this package.  Currently
+# crates.io will validate the license provided against a whitelist of
+# known license and exception identifiers from the SPDX license list
+# 2.4.  Parentheses are not currently supported.
+#
+# Multiple licenses can be separated with a `/`, although that usage
+# is deprecated.  Instead, use a license expression with AND and OR
+# operators to get more explicit semantics.
+license = "..."
+
+# If a package is using a nonstandard license, then this key may be specified in
+# lieu of the above key and must point to a file relative to this manifest
+# (similar to the readme key).
+license-file = "..."
+
+# Optional specification of badges to be displayed on crates.io.
+#
+# - The badges pertaining to build status that are currently available are
+#   Appveyor, CircleCI, GitLab, and TravisCI.
+# - Available badges pertaining to code test coverage are Codecov and
+#   Coveralls.
+# - There are also maintenance-related badges based on isitmaintained.com
+#   which state the issue resolution time, percent of open issues, and future
+#   maintenance intentions.
+#
+# If a `repository` key is required, this refers to a repository in
+# `user/repo` format.
+[badges]
+
+# Appveyor: `repository` is required. `branch` is optional; default is `master`
+# `service` is optional; valid values are `github` (default), `bitbucket`, and
+# `gitlab`; `id` is optional; you can specify the appveyor project id if you
+# want to use that instead. `project_name` is optional; use when the repository
+# name differs from the appveyor project name.
+appveyor = { repository = "...", branch = "master", service = "github" }
+
+# Circle CI: `repository` is required. `branch` is optional; default is `master`
+circle-ci = { repository = "...", branch = "master" }
+
+# GitLab: `repository` is required. `branch` is optional; default is `master`
+gitlab = { repository = "...", branch = "master" }
+
+# Travis CI: `repository` in format "<user>/<project>" is required.
+# `branch` is optional; default is `master`
+travis-ci = { repository = "...", branch = "master" }
+
+# Codecov: `repository` is required. `branch` is optional; default is `master`
+# `service` is optional; valid values are `github` (default), `bitbucket`, and
+# `gitlab`.
+codecov = { repository = "...", branch = "master", service = "github" }
+
+# Coveralls: `repository` is required. `branch` is optional; default is `master`
+# `service` is optional; valid values are `github` (default) and `bitbucket`.
+coveralls = { repository = "...", branch = "master", service = "github" }
+
+# Is it maintained resolution time: `repository` is required.
+is-it-maintained-issue-resolution = { repository = "..." }
+
+# Is it maintained percentage of open issues: `repository` is required.
+is-it-maintained-open-issues = { repository = "..." }
+
+# Maintenance: `status` is required. Available options are `actively-developed`,
+# `passively-maintained`, `as-is`, `experimental`, `looking-for-maintainer`,
+# `deprecated`, and the default `none`, which displays no badge on crates.io.
+maintenance = { status = "..." }
+```
+
+The [crates.io](https://crates.io) registry will render the description, display
+the license, link to the three URLs and categorize by the keywords. These keys
+provide useful information to users of the registry and also influence the
+search ranking of a crate. It is highly discouraged to omit everything in a
+published crate.
+
+SPDX 2.1 license expressions are documented
+[here][spdx-2.1-license-expressions].  The current version of the
+license list is available [here][spdx-license-list], and version 2.4
+is available [here][spdx-license-list-2.4].
+
+#### The `metadata` table (optional)
+
+Cargo by default will warn about unused keys in `Cargo.toml` to assist in
+detecting typos and such. The `package.metadata` table, however, is completely
+ignored by Cargo and will not be warned about. This section can be used for
+tools which would like to store package configuration in `Cargo.toml`. For
+example:
+
+```toml
+[package]
+name = "..."
+# ...
+
+# Metadata used when generating an Android APK, for example.
+[package.metadata.android]
+package-name = "my-awesome-android-app"
+assets = "path/to/static"
+```
+
+### Dependency sections
+
+See the [specifying dependencies page](reference/specifying-dependencies.html) for
+information on the `[dependencies]`, `[dev-dependencies]`,
+`[build-dependencies]`, and target-specific `[target.*.dependencies]` sections.
+
+### The `[profile.*]` sections
+
+Cargo supports custom configuration of how rustc is invoked through profiles at
+the top level. Any manifest may declare a profile, but only the top level
+package’s profiles are actually read. All dependencies’ profiles will be
+overridden. This is done so the top-level package has control over how its
+dependencies are compiled.
+
+There are four currently supported profile names, all of which have the same
+configuration available to them. Listed below is the configuration available,
+along with the defaults for each profile.
+
+```toml
+# The development profile, used for `cargo build`.
+[profile.dev]
+opt-level = 0      # controls the `--opt-level` the compiler builds with.
+                   # 0-1 is good for debugging. 2 is well-optimized. Max is 3.
+                   # 's' attempts to reduce size, 'z' reduces size even more.
+debug = true       # (u32 or bool) Include debug information (debug symbols).
+                   # Equivalent to `-C debuginfo=2` compiler flag.
+rpath = false      # controls whether compiler should set loader paths.
+                   # If true, passes `-C rpath` flag to the compiler.
+lto = false        # Link Time Optimization usually reduces size of binaries
+                   # and static libraries. Increases compilation time.
+                   # If true, passes `-C lto` flag to the compiler, and if a
+                   # string is specified like 'thin' then `-C lto=thin` will
+                   # be passed.
+debug-assertions = true # controls whether debug assertions are enabled
+                   # (e.g. debug_assert!() and arithmetic overflow checks)
+codegen-units = 16 # if > 1 enables parallel code generation which improves
+                   # compile times, but prevents some optimizations.
+                   # Passes `-C codegen-units`.
+panic = 'unwind'   # panic strategy (`-C panic=...`), can also be 'abort'
+incremental = true # whether or not incremental compilation is enabled
+overflow-checks = true # use overflow checks for integer arithmetic.
+                   # Passes the `-C overflow-checks=...` flag to the compiler.
+
+# The release profile, used for `cargo build --release`.
+[profile.release]
+opt-level = 3
+debug = false
+rpath = false
+lto = false
+debug-assertions = false
+codegen-units = 16
+panic = 'unwind'
+incremental = false
+overflow-checks = false
+
+# The testing profile, used for `cargo test`.
+[profile.test]
+opt-level = 0
+debug = 2
+rpath = false
+lto = false
+debug-assertions = true
+codegen-units = 16
+panic = 'unwind'
+incremental = true
+overflow-checks = true
+
+# The benchmarking profile, used for `cargo bench` and `cargo test --release`.
+[profile.bench]
+opt-level = 3
+debug = false
+rpath = false
+lto = false
+debug-assertions = false
+codegen-units = 16
+panic = 'unwind'
+incremental = false
+overflow-checks = false
+```
+
+### The `[features]` section
+
+Cargo supports features to allow expression of:
+
+* conditional compilation options (usable through `cfg` attributes);
+* optional dependencies, which enhance a package, but are not required; and
+* clusters of optional dependencies, such as `postgres`, that would include the
+  `postgres` package, the `postgres-macros` package, and possibly other packages
+  (such as development-time mocking libraries, debugging tools, etc.).
+
+A feature of a package is either an optional dependency, or a set of other
+features. The format for specifying features is:
+
+```toml
+[package]
+name = "awesome"
+
+[features]
+# The default set of optional packages. Most people will want to use these
+# packages, but they are strictly optional. Note that `session` is not a package
+# but rather another feature listed in this manifest.
+default = ["jquery", "uglifier", "session"]
+
+# A feature with no dependencies is used mainly for conditional compilation,
+# like `#[cfg(feature = "go-faster")]`.
+go-faster = []
+
+# The `secure-password` feature depends on the bcrypt package. This aliasing
+# will allow people to talk about the feature in a higher-level way and allow
+# this package to add more requirements to the feature in the future.
+secure-password = ["bcrypt"]
+
+# Features can be used to reexport features of other packages. The `session`
+# feature of package `awesome` will ensure that the `session` feature of the
+# package `cookie` is also enabled.
+session = ["cookie/session"]
+
+[dependencies]
+# These packages are mandatory and form the core of this package’s distribution.
+cookie = "1.2.0"
+oauth = "1.1.0"
+route-recognizer = "=2.1.0"
+
+# A list of all of the optional dependencies, some of which are included in the
+# above `features`. They can be opted into by apps.
+jquery = { version = "1.0.2", optional = true }
+uglifier = { version = "1.5.3", optional = true }
+bcrypt = { version = "*", optional = true }
+civet = { version = "*", optional = true }
+```
+
+To use the package `awesome`:
+
+```toml
+[dependencies.awesome]
+version = "1.3.5"
+default-features = false # do not include the default features, and optionally
+                         # cherry-pick individual features
+features = ["secure-password", "civet"]
+```
+
+#### Rules
+
+The usage of features is subject to a few rules:
+
+* Feature names must not conflict with other package names in the manifest. This
+  is because they are opted into via `features = [...]`, which only has a single
+  namespace.
+* With the exception of the `default` feature, all features are opt-in. To opt
+  out of the default feature, use `default-features = false` and cherry-pick
+  individual features.
+* Feature groups are not allowed to cyclically depend on one another.
+* Dev-dependencies cannot be optional.
+* Features groups can only reference optional dependencies.
+* When a feature is selected, Cargo will call `rustc` with `--cfg
+  feature="${feature_name}"`. If a feature group is included, it and all of its
+  individual features will be included. This can be tested in code via
+  `#[cfg(feature = "foo")]`.
+
+Note that it is explicitly allowed for features to not actually activate any
+optional dependencies. This allows packages to internally enable/disable
+features without requiring a new dependency.
+
+#### Usage in end products
+
+One major use-case for this feature is specifying optional features in
+end-products. For example, the Servo package may want to include optional
+features that people can enable or disable when they build it.
+
+In that case, Servo will describe features in its `Cargo.toml` and they can be
+enabled using command-line flags:
+
+```console
+$ cargo build --release --features "shumway pdf"
+```
+
+Default features could be excluded using `--no-default-features`.
+
+#### Usage in packages
+
+In most cases, the concept of *optional dependency* in a library is best
+expressed as a separate package that the top-level application depends on.
+
+However, high-level packages, like Iron or Piston, may want the ability to
+curate a number of packages for easy installation. The current Cargo system
+allows them to curate a number of mandatory dependencies into a single package
+for easy installation.
+
+In some cases, packages may want to provide additional curation for optional
+dependencies:
+
+* grouping a number of low-level optional dependencies together into a single
+  high-level feature;
+* specifying packages that are recommended (or suggested) to be included by
+  users of the package; and
+* including a feature (like `secure-password` in the motivating example) that
+  will only work if an optional dependency is available, and would be difficult
+  to implement as a separate package (for example, it may be overly difficult to
+  design an IO package to be completely decoupled from OpenSSL, with opt-in via
+  the inclusion of a separate package).
+
+In almost all cases, it is an antipattern to use these features outside of
+high-level packages that are designed for curation. If a feature is optional, it
+can almost certainly be expressed as a separate package.
+
+### The `[workspace]` section
+
+Packages can define a workspace which is a set of crates that will all share the
+same `Cargo.lock` and output directory. The `[workspace]` table can be defined
+as:
+
+```toml
+[workspace]
+
+# Optional key, inferred from path dependencies if not present.
+# Additional non-path dependencies that should be included must be given here.
+# In particular, for a virtual manifest, all members have to be listed.
+members = ["path/to/member1", "path/to/member2", "path/to/member3/*"]
+
+# Optional key, empty if not present.
+exclude = ["path1", "path/to/dir2"]
+```
+
+Workspaces were added to Cargo as part of [RFC 1525] and have a number of
+properties:
+
+* A workspace can contain multiple crates where one of them is the *root crate*.
+* The *root crate*'s `Cargo.toml` contains the `[workspace]` table, but is not
+  required to have other configuration.
+* Whenever any crate in the workspace is compiled, output is placed in the
+  *workspace root*. i.e. next to the *root crate*'s `Cargo.toml`.
+* The lock file for all crates in the workspace resides in the *workspace root*.
+* The `[patch]`, `[replace]` and `[profile.*]` sections in `Cargo.toml`
+  are only recognized
+  in the *root crate*'s manifest, and ignored in member crates' manifests.
+
+[RFC 1525]: https://github.com/rust-lang/rfcs/blob/master/text/1525-cargo-workspace.md
+
+The *root crate* of a workspace, indicated by the presence of `[workspace]` in
+its manifest, is responsible for defining the entire workspace. All `path`
+dependencies residing in the workspace directory become members. You can add
+additional packages to the workspace by listing them in the `members` key. Note
+that members of the workspaces listed explicitly will also have their path
+dependencies included in the workspace. Sometimes a package may have a lot of
+workspace members and it can be onerous to keep up to date. The path dependency
+can also use [globs][globs] to match multiple paths. Finally, the `exclude`
+key can be used to blacklist paths from being included in a workspace. This can
+be useful if some path dependencies aren't desired to be in the workspace at
+all.
+
+The `package.workspace` manifest key (described above) is used in member crates
+to point at a workspace's root crate. If this key is omitted then it is inferred
+to be the first crate whose manifest contains `[workspace]` upwards in the
+filesystem.
+
+A crate may either specify `package.workspace` or specify `[workspace]`. That
+is, a crate cannot both be a root crate in a workspace (contain `[workspace]`)
+and also be a member crate of another workspace (contain `package.workspace`).
+
+Most of the time workspaces will not need to be dealt with as `cargo new` and
+`cargo init` will handle workspace configuration automatically.
+
+#### Virtual Manifest
+
+In workspace manifests, if the `package` table is present, the workspace root
+crate will be treated as a normal package, as well as a workspace. If the
+`package` table is not present in a workspace manifest, it is called a *virtual
+manifest*.
+
+#### Package selection
+
+In a workspace, package-related cargo commands like `cargo build` apply to
+packages selected by `-p` / `--package` or `--all` command-line parameters.
+When neither is specified, the optional `default-members` configuration is used:
+
+```toml
+[workspace]
+members = ["path/to/member1", "path/to/member2", "path/to/member3/*"]
+default-members = ["path/to/member2", "path/to/member3/foo"]
+```
+
+When specified, `default-members` must expand to a subset of `members`.
+
+When `default-members` is not specified, the default is the root manifest
+if it is a package, or every member manifest (as if `--all` were specified
+on the command-line) for virtual workspaces.
+
+### The project layout
+
+If your package is an executable, name the main source file `src/main.rs`. If it
+is a library, name the main source file `src/lib.rs`.
+
+Cargo will also treat any files located in `src/bin/*.rs` as executables. If your
+executable consists of more than just one source file, you might also use a directory
+inside `src/bin` containing a `main.rs` file which will be treated as an executable
+with a name of the parent directory.
+Do note, however, once you add a `[[bin]]` section ([see
+below](#configuring-a-target)), Cargo will no longer automatically build files
+located in `src/bin/*.rs`.  Instead you must create a `[[bin]]` section for
+each file you want to build.
+
+Your package can optionally contain folders named `examples`, `tests`, and
+`benches`, which Cargo will treat as containing examples,
+integration tests, and benchmarks respectively. Analogous to `bin` targets, they
+may be composed of single files or directories with a `main.rs` file.
+
+```
+▾ src/           # directory containing source files
+  lib.rs         # the main entry point for libraries and packages
+  main.rs        # the main entry point for packages producing executables
+  ▾ bin/         # (optional) directory containing additional executables
+    *.rs
+  ▾ */           # (optional) directories containing multi-file executables
+    main.rs
+▾ examples/      # (optional) examples
+  *.rs
+  ▾ */           # (optional) directories containing multi-file examples
+    main.rs
+▾ tests/         # (optional) integration tests
+  *.rs
+  ▾ */           # (optional) directories containing multi-file tests
+    main.rs
+▾ benches/       # (optional) benchmarks
+  *.rs
+  ▾ */           # (optional) directories containing multi-file benchmarks
+    main.rs
+```
+
+To structure your code after you've created the files and folders for your
+package, you should remember to use Rust's module system, which you can read
+about in [the book](https://doc.rust-lang.org/book/crates-and-modules.html).
+
+### Examples
+
+Files located under `examples` are example uses of the functionality provided by
+the library. When compiled, they are placed in the `target/examples` directory.
+
+They can compile either as executables (with a `main()` function) or libraries
+and pull in the library by using `extern crate <library-name>`. They are
+compiled when you run your tests to protect them from bitrotting.
+
+You can run individual executable examples with the command `cargo run --example
+<example-name>`.
+
+Specify `crate-type` to make an example be compiled as a library (additional
+information about crate types is available in
+[The Rust Reference](https://doc.rust-lang.org/reference/linkage.html)):
+
+```toml
+[[example]]
+name = "foo"
+crate-type = ["staticlib"]
+```
+
+You can build individual library examples with the command `cargo build
+--example <example-name>`.
+
+### Tests
+
+When you run `cargo test`, Cargo will:
+
+* compile and run your library’s unit tests, which are in the files reachable
+  from `lib.rs` (naturally, any sections marked with `#[cfg(test)]` will be
+  considered at this stage);
+* compile and run your library’s documentation tests, which are embedded inside
+  of documentation blocks;
+* compile and run your library’s [integration tests](#integration-tests); and
+* compile your library’s examples.
+
+#### Integration tests
+
+Each file in `tests/*.rs` is an integration test. When you run `cargo test`,
+Cargo will compile each of these files as a separate crate. The crate can link
+to your library by using `extern crate <library-name>`, like any other code that
+depends on it.
+
+Cargo will not automatically compile files inside subdirectories of `tests`, but
+an integration test can import modules from these directories as usual. For
+example, if you want several integration tests to share some code, you can put
+the shared code in `tests/common/mod.rs` and then put `mod common;` in each of
+the test files.
+
+### Configuring a target
+
+All of the  `[[bin]]`, `[lib]`, `[[bench]]`, `[[test]]`, and `[[example]]`
+sections support similar configuration for specifying how a target should be
+built. The double-bracket sections like `[[bin]]` are array-of-table of
+[TOML](https://github.com/toml-lang/toml#array-of-tables), which means you can
+write more than one `[[bin]]` section to make several executables in your crate.
+
+The example below uses `[lib]`, but it also applies to all other sections
+as well. All values listed are the defaults for that option unless otherwise
+specified.
+
+```toml
+[package]
+# ...
+
+[lib]
+# The name of a target is the name of the library that will be generated. This
+# is defaulted to the name of the package, with any dashes replaced
+# with underscores. (Rust `extern crate` declarations reference this name;
+# therefore the value must be a valid Rust identifier to be usable.)
+name = "foo"
+
+# This field points at where the crate is located, relative to the `Cargo.toml`.
+path = "src/lib.rs"
+
+# A flag for enabling unit tests for this target. This is used by `cargo test`.
+test = true
+
+# A flag for enabling documentation tests for this target. This is only relevant
+# for libraries, it has no effect on other sections. This is used by
+# `cargo test`.
+doctest = true
+
+# A flag for enabling benchmarks for this target. This is used by `cargo bench`.
+bench = true
+
+# A flag for enabling documentation of this target. This is used by `cargo doc`.
+doc = true
+
+# If the target is meant to be a compiler plugin, this field must be set to true
+# for Cargo to correctly compile it and make it available for all dependencies.
+plugin = false
+
+# If the target is meant to be a "macros 1.1" procedural macro, this field must
+# be set to true.
+proc-macro = false
+
+# If set to false, `cargo test` will omit the `--test` flag to rustc, which
+# stops it from generating a test harness. This is useful when the binary being
+# built manages the test runner itself.
+harness = true
+
+# If set then a target can be configured to use a different edition than the
+# `[package]` is configured to use, perhaps only compiling a library with the
+# 2018 edition or only compiling one unit test with the 2015 edition. By default
+# all targets are compiled with the edition specified in `[package]`.
+edition = '2015'
+```
+
+The `[package]` also includes the optional `autobins`, `autoexamples`,
+`autotests`, and `autobenches` keys to explicitly opt-in or opt-out of
+auto-discovering specific target kinds.
+
+#### The `required-features` field (optional)
+
+The `required-features` field specifies which features the target needs in order
+to be built. If any of the required features are not selected, the target will
+be skipped. This is only relevant for the `[[bin]]`, `[[bench]]`, `[[test]]`,
+and `[[example]]` sections, it has no effect on `[lib]`.
+
+```toml
+[features]
+# ...
+postgres = []
+sqlite = []
+tools = []
+
+[[bin]]
+# ...
+required-features = ["postgres", "tools"]
+```
+
+#### Building dynamic or static libraries
+
+If your package produces a library, you can specify which kind of library to
+build by explicitly listing the library in your `Cargo.toml`:
+
+```toml
+# ...
+
+[lib]
+name = "..."
+crate-type = ["dylib"] # could be `staticlib` as well
+```
+
+The available options are `dylib`, `rlib`, `staticlib`, `cdylib`, and
+`proc-macro`. You should only use this option in a package. Cargo will always
+compile packages (dependencies) based on the requirements of the package that
+includes them.
+
+You can read more about the different crate types in the
+[Rust Reference Manual](https://doc.rust-lang.org/reference/linkage.html)
+
+### The `[patch]` Section
+
+This section of Cargo.toml can be used to [override dependencies][replace] with
+other copies. The syntax is similar to the `[dependencies]` section:
+
+```toml
+[patch.crates-io]
+foo = { git = 'https://github.com/example/foo' }
+bar = { path = 'my/local/bar' }
+
+[dependencies.baz]
+git = 'https://github.com/example/baz'
+
+[patch.'https://github.com/example/baz']
+baz = { git = 'https://github.com/example/patched-baz', branch = 'my-branch' }
+```
+
+The `[patch]` table is made of dependency-like sub-tables. Each key after
+`[patch]` is a URL of the source that's being patched, or `crates-io` if
+you're modifying the https://crates.io registry. In the example above
+`crates-io` could be replaced with a git URL such as
+`https://github.com/rust-lang-nursery/log`; the second `[patch]`
+section in the example uses this to specify a source called `baz`.
+
+Each entry in these tables is a normal dependency specification, the same as
+found in the `[dependencies]` section of the manifest. The dependencies listed
+in the `[patch]` section are resolved and used to patch the source at the
+URL specified. The above manifest snippet patches the `crates-io` source (e.g.
+crates.io itself) with the `foo` crate and `bar` crate. It also
+patches the `https://github.com/example/baz` source with a `my-branch` that
+comes from elsewhere.
+
+Sources can be patched with versions of crates that do not exist, and they can
+also be patched with versions of crates that already exist. If a source is
+patched with a crate version that already exists in the source, then the
+source's original crate is replaced.
+
+More information about overriding dependencies can be found in the [overriding
+dependencies][replace] section of the documentation and [RFC 1969] for the
+technical specification of this feature.
+
+[RFC 1969]: https://github.com/rust-lang/rfcs/pull/1969
+[replace]: reference/specifying-dependencies.html#overriding-dependencies
+
+### The `[replace]` Section
+
+This section of Cargo.toml can be used to [override dependencies][replace] with
+other copies. The syntax is similar to the `[dependencies]` section:
+
+```toml
+[replace]
+"foo:0.1.0" = { git = 'https://github.com/example/foo' }
+"bar:1.0.2" = { path = 'my/local/bar' }
+```
+
+Each key in the `[replace]` table is a [package id
+specification](reference/pkgid-spec.html) which allows arbitrarily choosing a node in the
+dependency graph to override. The value of each key is the same as the
+`[dependencies]` syntax for specifying dependencies, except that you can't
+specify features. Note that when a crate is overridden the copy it's overridden
+with must have both the same name and version, but it can come from a different
+source (e.g. git or a local path).
+
+More information about overriding dependencies can be found in the [overriding
+dependencies][replace] section of the documentation.
+
+[spdx-2.1-license-expressions]: https://spdx.org/spdx-specification-21-web-version#h.jxpfx0ykyb60
+[spdx-license-list]: https://spdx.org/licenses/
+[spdx-license-list-2.4]: https://github.com/spdx/license-list-data/tree/v2.4
diff --git a/src/doc/src/reference/pkgid-spec.md b/src/doc/src/reference/pkgid-spec.md
new file mode 100644 (file)
index 0000000..bd7ac2d
--- /dev/null
@@ -0,0 +1,44 @@
+## Package ID Specifications
+
+### Package ID specifications
+
+Subcommands of Cargo frequently need to refer to a particular package within a
+dependency graph for various operations like updating, cleaning, building, etc.
+To solve this problem, Cargo supports Package ID Specifications. A specification
+is a string which is used to uniquely refer to one package within a graph of
+packages.
+
+#### Specification grammar
+
+The formal grammar for a Package Id Specification is:
+
+```notrust
+pkgid := pkgname
+       | [ proto "://" ] hostname-and-path [ "#" ( pkgname | semver ) ]
+pkgname := name [ ":" semver ]
+
+proto := "http" | "git" | ...
+```
+
+Here, brackets indicate that the contents are optional.
+
+#### Example specifications
+
+These could all be references to a package `foo` version `1.2.3` from the
+registry at `crates.io`
+
+| pkgid                        | name  | version | url                    |
+|:-----------------------------|:-----:|:-------:|:----------------------:|
+| `foo`                        | `foo` | `*`     | `*`                    |
+| `foo:1.2.3`                  | `foo` | `1.2.3` | `*`                    |
+| `crates.io/foo`              | `foo` | `*`     | `*://crates.io/foo`    |
+| `crates.io/foo#1.2.3`        | `foo` | `1.2.3` | `*://crates.io/foo`    |
+| `crates.io/bar#foo:1.2.3`    | `foo` | `1.2.3` | `*://crates.io/bar`    |
+| `http://crates.io/foo#1.2.3` | `foo` | `1.2.3` | `http://crates.io/foo` |
+
+#### Brevity of specifications
+
+The goal of this is to enable both succinct and exhaustive syntaxes for
+referring to packages in a dependency graph. Ambiguous references may refer to
+one or more packages. Most commands generate an error if more than one package
+could be referred to with the same specification.
diff --git a/src/doc/src/reference/publishing.md b/src/doc/src/reference/publishing.md
new file mode 100644 (file)
index 0000000..79cf9c2
--- /dev/null
@@ -0,0 +1,222 @@
+## Publishing on crates.io
+
+Once you've got a library that you'd like to share with the world, it's time to
+publish it on [crates.io]! Publishing a crate is when a specific
+version is uploaded to be hosted on [crates.io].
+
+Take care when publishing a crate, because a publish is **permanent**. The
+version can never be overwritten, and the code cannot be deleted. There is no
+limit to the number of versions which can be published, however.
+
+### Before your first publish
+
+First thing’s first, you’ll need an account on [crates.io] to acquire
+an API token. To do so, [visit the home page][crates.io] and log in via a GitHub
+account (required for now). After this, visit your [Account
+Settings](https://crates.io/me) page and run the `cargo login` command
+specified.
+
+```console
+$ cargo login abcdefghijklmnopqrstuvwxyz012345
+```
+
+This command will inform Cargo of your API token and store it locally in your
+`~/.cargo/credentials` (previously it was `~/.cargo/config`).  Note that this
+token is a **secret** and should not be shared with anyone else. If it leaks for
+any reason, you should regenerate it immediately.
+
+### Before publishing a new crate
+
+Keep in mind that crate names on [crates.io] are allocated on a first-come-first-
+serve basis. Once a crate name is taken, it cannot be used for another crate.
+
+#### Packaging a crate
+
+The next step is to package up your crate into a format that can be uploaded to
+[crates.io]. For this we’ll use the `cargo package` subcommand. This will take
+our entire crate and package it all up into a `*.crate` file in the
+`target/package` directory.
+
+```console
+$ cargo package
+```
+
+As an added bonus, the `*.crate` will be verified independently of the current
+source tree. After the `*.crate` is created, it’s unpacked into
+`target/package` and then built from scratch to ensure that all necessary files
+are there for the build to succeed. This behavior can be disabled with the
+`--no-verify` flag.
+
+Now’s a good time to take a look at the `*.crate` file to make sure you didn’t
+accidentally package up that 2GB video asset, or large data files used for code
+generation, integration tests, or benchmarking.  There is currently a 10MB
+upload size limit on `*.crate` files. So, if the size of `tests` and `benches`
+directories and their dependencies are up to a couple of MBs, you can keep them
+in your package; otherwise, better to exclude them.
+
+Cargo will automatically ignore files ignored by your version control system
+when packaging, but if you want to specify an extra set of files to ignore you
+can use the `exclude` key in the manifest:
+
+```toml
+[package]
+# ...
+exclude = [
+    "public/assets/*",
+    "videos/*",
+]
+```
+
+The syntax of each element in this array is what
+[rust-lang/glob](https://github.com/rust-lang/glob) accepts. If you’d rather
+roll with a whitelist instead of a blacklist, Cargo also supports an `include`
+key, which if set, overrides the `exclude` key:
+
+```toml
+[package]
+# ...
+include = [
+    "**/*.rs",
+    "Cargo.toml",
+]
+```
+
+### Uploading the crate
+
+Now that we’ve got a `*.crate` file ready to go, it can be uploaded to
+[crates.io] with the `cargo publish` command. And that’s it, you’ve now published
+your first crate!
+
+```console
+$ cargo publish
+```
+
+If you’d like to skip the `cargo package` step, the `cargo publish` subcommand
+will automatically package up the local crate if a copy isn’t found already.
+
+Be sure to check out the [metadata you can
+specify](reference/manifest.html#package-metadata) to ensure your crate can be
+discovered more easily!
+
+### Publishing a new version of an existing crate
+
+In order to release a new version, change the `version` value specified in your
+`Cargo.toml` manifest. Keep in mind [the semver
+rules](reference/manifest.html#the-version-field). Then optionally run `cargo package` if
+you want to inspect the `*.crate` file for the new version before publishing,
+and run `cargo publish` to upload the new version.
+
+### Managing a crates.io-based crate
+
+Management of crates is primarily done through the command line `cargo` tool
+rather than the [crates.io] web interface. For this, there are a few subcommands
+to manage a crate.
+
+#### `cargo yank`
+
+Occasions may arise where you publish a version of a crate that actually ends up
+being broken for one reason or another (syntax error, forgot to include a file,
+etc.). For situations such as this, Cargo supports a “yank” of a version of a
+crate.
+
+```console
+$ cargo yank --vers 1.0.1
+$ cargo yank --vers 1.0.1 --undo
+```
+
+A yank **does not** delete any code. This feature is not intended for deleting
+accidentally uploaded secrets, for example. If that happens, you must reset
+those secrets immediately.
+
+The semantics of a yanked version are that no new dependencies can be created
+against that version, but all existing dependencies continue to work. One of the
+major goals of [crates.io] is to act as a permanent archive of crates that does
+not change over time, and allowing deletion of a version would go against this
+goal. Essentially a yank means that all packages with a `Cargo.lock` will not
+break, while any future `Cargo.lock` files generated will not list the yanked
+version.
+
+#### `cargo owner`
+
+A crate is often developed by more than one person, or the primary maintainer
+may change over time! The owner of a crate is the only person allowed to publish
+new versions of the crate, but an owner may designate additional owners.
+
+```console
+$ cargo owner --add my-buddy
+$ cargo owner --remove my-buddy
+$ cargo owner --add github:rust-lang:owners
+$ cargo owner --remove github:rust-lang:owners
+```
+
+The owner IDs given to these commands must be GitHub user names or GitHub teams.
+
+If a user name is given to `--add`, that user becomes a “named” owner, with
+full rights to the crate. In addition to being able to publish or yank versions
+of the crate, they have the ability to add or remove owners, *including* the
+owner that made *them* an owner. Needless to say, you shouldn’t make people you
+don’t fully trust into a named owner. In order to become a named owner, a user
+must have logged into [crates.io] previously.
+
+If a team name is given to `--add`, that team becomes a “team” owner, with
+restricted right to the crate. While they have permission to publish or yank
+versions of the crate, they *do not* have the ability to add or remove owners.
+In addition to being more convenient for managing groups of owners, teams are
+just a bit more secure against owners becoming malicious.
+
+The syntax for teams is currently `github:org:team` (see examples above).
+In order to add a team as an owner one must be a member of that team. No
+such restriction applies to removing a team as an owner.
+
+### GitHub permissions
+
+Team membership is not something GitHub provides simple public access to, and it
+is likely for you to encounter the following message when working with them:
+
+> It looks like you don’t have permission to query a necessary property from
+GitHub to complete this request. You may need to re-authenticate on [crates.io]
+to grant permission to read GitHub org memberships. Just go to
+https://crates.io/login
+
+This is basically a catch-all for “you tried to query a team, and one of the
+five levels of membership access control denied this”. That is not an
+exaggeration. GitHub’s support for team access control is Enterprise Grade.
+
+The most likely cause of this is simply that you last logged in before this
+feature was added. We originally requested *no* permissions from GitHub when
+authenticating users, because we didn’t actually ever use the user’s token for
+anything other than logging them in. However to query team membership on your
+behalf, we now require [the `read:org` scope][oauth-scopes].
+
+You are free to deny us this scope, and everything that worked before teams
+were introduced will keep working. However you will never be able to add a team
+as an owner, or publish a crate as a team owner. If you ever attempt to do this,
+you will get the error above. You may also see this error if you ever try to
+publish a crate that you don’t own at all, but otherwise happens to have a team.
+
+If you ever change your mind, or just aren’t sure if [crates.io] has sufficient
+permission, you can always go to https://crates.io/login, which will prompt you
+for permission if [crates.io] doesn’t have all the scopes it would like to.
+
+An additional barrier to querying GitHub is that the organization may be
+actively denying third party access. To check this, you can go to:
+
+    https://github.com/organizations/:org/settings/oauth_application_policy
+
+where `:org` is the name of the organization (e.g. rust-lang). You may see
+something like:
+
+![Organization Access Control](images/org-level-acl.png)
+
+Where you may choose to explicitly remove [crates.io] from your organization’s
+blacklist, or simply press the “Remove Restrictions” button to allow all third
+party applications to access this data.
+
+Alternatively, when [crates.io] requested the `read:org` scope, you could have
+explicitly whitelisted [crates.io] querying the org in question by pressing
+the “Grant Access” button next to its name:
+
+![Authentication Access Control](images/auth-level-acl.png)
+
+[crates.io]: https://crates.io/
+[oauth-scopes]: https://developer.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/
diff --git a/src/doc/src/reference/source-replacement.md b/src/doc/src/reference/source-replacement.md
new file mode 100644 (file)
index 0000000..3467330
--- /dev/null
@@ -0,0 +1,139 @@
+## Source Replacement
+
+This document is about replacing the crate index. You can read about overriding
+dependencies in the [overriding dependencies][overriding] section of this
+documentation.
+
+Cargo supports the ability to **replace one source with another** to express
+strategies along the lines of mirrors or vendoring dependencies. Configuration
+is currently done through the [`.cargo/config` configuration][config] mechanism,
+like so:
+
+[config]: reference/config.html
+
+```toml
+# The `source` table is where all keys related to source-replacement
+# are stored.
+[source]
+
+# Under the `source` table are a number of other tables whose keys are a
+# name for the relevant source. For example this section defines a new
+# source, called `my-awesome-source`, which comes from a directory
+# located at `vendor` relative to the directory containing this `.cargo/config`
+# file
+[source.my-awesome-source]
+directory = "vendor"
+
+# Git sources can optionally specify a branch/tag/rev as well
+git = "https://example.com/path/to/repo"
+# branch = "master"
+# tag = "v1.0.1"
+# rev = "313f44e8"
+
+# The crates.io default source for crates is available under the name
+# "crates-io", and here we use the `replace-with` key to indicate that it's
+# replaced with our source above.
+[source.crates-io]
+replace-with = "my-awesome-source"
+```
+
+With this configuration Cargo attempts to look up all crates in the directory
+"vendor" rather than querying the online registry at crates.io. Using source
+replacement Cargo can express:
+
+* Vendoring - custom sources can be defined which represent crates on the local
+  filesystem. These sources are subsets of the source that they're replacing and
+  can be checked into packages if necessary.
+
+* Mirroring - sources can be replaced with an equivalent version which acts as a
+  cache for crates.io itself.
+
+Cargo has a core assumption about source replacement that the source code is
+exactly the same from both sources. In our above example Cargo assumes that all
+of the crates coming from `my-awesome-source` are the exact same as the copies
+from `crates-io`. Note that this also means that `my-awesome-source` is not
+allowed to have crates which are not present in the `crates-io` source.
+
+As a consequence, source replacement is not appropriate for situations such as
+patching a dependency or a private registry. Cargo supports patching
+dependencies through the usage of [the `[replace]` key][replace-section], and
+private registry support is planned for a future version of Cargo.
+
+[replace-section]: reference/manifest.html#the-replace-section
+[overriding]: reference/specifying-dependencies.html#overriding-dependencies
+
+### Configuration
+
+Configuration of replacement sources is done through [`.cargo/config`][config]
+and the full set of available keys are:
+
+```toml
+# Each source has its own table where the key is the name of the source
+[source.the-source-name]
+
+# Indicate that `the-source-name` will be replaced with `another-source`,
+# defined elsewhere
+replace-with = "another-source"
+
+# Available kinds of sources that can be specified (described below)
+registry = "https://example.com/path/to/index"
+local-registry = "path/to/registry"
+directory = "path/to/vendor"
+```
+
+The `crates-io` represents the crates.io online registry (default source of
+crates) and can be replaced with:
+
+```toml
+[source.crates-io]
+replace-with = 'another-source'
+```
+
+### Registry Sources
+
+A "registry source" is one that is the same as crates.io itself. That is, it has
+an index served in a git repository which matches the format of the
+[crates.io index](https://github.com/rust-lang/crates.io-index). That repository
+then has configuration indicating where to download crates from.
+
+Currently there is not an already-available project for setting up a mirror of
+crates.io. Stay tuned though!
+
+### Local Registry Sources
+
+A "local registry source" is intended to be a subset of another registry
+source, but available on the local filesystem (aka vendoring). Local registries
+are downloaded ahead of time, typically sync'd with a `Cargo.lock`, and are
+made up of a set of `*.crate` files and an index like the normal registry is.
+
+The primary way to manage and create local registry sources is through the
+[`cargo-local-registry`][cargo-local-registry] subcommand, available on
+crates.io and can be installed with `cargo install cargo-local-registry`.
+
+[cargo-local-registry]: https://crates.io/crates/cargo-local-registry
+
+Local registries are contained within one directory and contain a number of
+`*.crate` files downloaded from crates.io as well as an `index` directory with
+the same format as the crates.io-index project (populated with just entries for
+the crates that are present).
+
+### Directory Sources
+
+A "directory source" is similar to a local registry source where it contains a
+number of crates available on the local filesystem, suitable for vendoring
+dependencies. Also like local registries, directory sources can primarily be
+managed by an external subcommand, [`cargo-vendor`][cargo-vendor], which can be
+installed with `cargo install cargo-vendor`.
+
+[cargo-vendor]: https://crates.io/crates/cargo-vendor
+
+Directory sources are distinct from local registries though in that they contain
+the unpacked version of `*.crate` files, making it more suitable in some
+situations to check everything into source control. A directory source is just a
+directory containing a number of other directories which contain the source code
+for crates (the unpacked version of `*.crate` files). Currently no restriction
+is placed on the name of each directory.
+
+Each crate in a directory source also has an associated metadata file indicating
+the checksum of each file in the crate to protect against accidental
+modifications.
diff --git a/src/doc/src/reference/specifying-dependencies.md b/src/doc/src/reference/specifying-dependencies.md
new file mode 100644 (file)
index 0000000..293d321
--- /dev/null
@@ -0,0 +1,595 @@
+## Specifying Dependencies
+
+Your crates can depend on other libraries from [crates.io], `git` repositories, or
+subdirectories on your local file system. You can also temporarily override the
+location of a dependency— for example, to be able to test out a bug fix in the
+dependency that you are working on locally. You can have different
+dependencies for different platforms, and dependencies that are only used during
+development. Let's take a look at how to do each of these.
+
+### Specifying dependencies from crates.io
+
+Cargo is configured to look for dependencies on [crates.io] by default. Only
+the name and a version string are required in this case. In [the cargo
+guide](guide/index.html), we specified a dependency on the `time` crate:
+
+```toml
+[dependencies]
+time = "0.1.12"
+```
+
+The string `"0.1.12"` is a [semver] version requirement. Since this
+string does not have any operators in it, it is interpreted the same way as
+if we had specified `"^0.1.12"`, which is called a caret requirement.
+
+[semver]: https://github.com/steveklabnik/semver#requirements
+
+### Caret requirements
+
+**Caret requirements** allow SemVer compatible updates to a specified version.
+An update is allowed if the new version number does not modify the left-most
+non-zero digit in the major, minor, patch grouping. In this case, if we ran
+`cargo update -p time`, cargo should update us to version `0.1.13` if it is the
+latest `0.1.z` release, but would not update us to `0.2.0`. If instead we had
+specified the version string as `^1.0`, cargo should update to `1.1` if it is
+the latest `1.y` release, but not `2.0`. The version `0.0.x` is not considered
+compatible with any other version.
+
+Here are some more examples of caret requirements and the versions that would
+be allowed with them:
+
+```notrust
+^1.2.3 := >=1.2.3 <2.0.0
+^1.2 := >=1.2.0 <2.0.0
+^1 := >=1.0.0 <2.0.0
+^0.2.3 := >=0.2.3 <0.3.0
+^0.2 := >= 0.2.0 < 0.3.0
+^0.0.3 := >=0.0.3 <0.0.4
+^0.0 := >=0.0.0 <0.1.0
+^0 := >=0.0.0 <1.0.0
+```
+
+This compatibility convention is different from SemVer in the way it treats
+versions before 1.0.0. While SemVer says there is no compatibility before
+1.0.0, Cargo considers `0.x.y` to be compatible with `0.x.z`, where `y ≥ z`
+and `x > 0`.
+
+### Tilde requirements
+
+**Tilde requirements** specify a minimal version with some ability to update.
+If you specify a major, minor, and patch version or only a major and minor
+version, only patch-level changes are allowed. If you only specify a major
+version, then minor- and patch-level changes are allowed.
+
+`~1.2.3` is an example of a tilde requirement.
+
+```notrust
+~1.2.3 := >=1.2.3 <1.3.0
+~1.2 := >=1.2.0 <1.3.0
+~1 := >=1.0.0 <2.0.0
+```
+
+### Wildcard requirements
+
+**Wildcard requirements** allow for any version where the wildcard is
+positioned.
+
+`*`, `1.*` and `1.2.*` are examples of wildcard requirements.
+
+```notrust
+* := >=0.0.0
+1.* := >=1.0.0 <2.0.0
+1.2.* := >=1.2.0 <1.3.0
+```
+
+### Inequality requirements
+
+**Inequality requirements** allow manually specifying a version range or an
+exact version to depend on.
+
+Here are some examples of inequality requirements:
+
+```notrust
+>= 1.2.0
+> 1
+< 2
+= 1.2.3
+```
+
+### Multiple requirements
+
+Multiple version requirements can also be separated with a comma, e.g. `>= 1.2,
+< 1.5`.
+
+### Specifying dependencies from `git` repositories
+
+To depend on a library located in a `git` repository, the minimum information
+you need to specify is the location of the repository with the `git` key:
+
+```toml
+[dependencies]
+rand = { git = "https://github.com/rust-lang-nursery/rand" }
+```
+
+Cargo will fetch the `git` repository at this location then look for a
+`Cargo.toml` for the requested crate anywhere inside the `git` repository
+(not necessarily at the root).
+
+Since we haven’t specified any other information, Cargo assumes that
+we intend to use the latest commit on the `master` branch to build our package.
+You can combine the `git` key with the `rev`, `tag`, or `branch` keys to
+specify something else. Here's an example of specifying that you want to use
+the latest commit on a branch named `next`:
+
+```toml
+[dependencies]
+rand = { git = "https://github.com/rust-lang-nursery/rand", branch = "next" }
+```
+
+### Specifying path dependencies
+
+Over time, our `hello_world` package from [the guide](guide/index.html) has
+grown significantly in size! It’s gotten to the point that we probably want to
+split out a separate crate for others to use. To do this Cargo supports **path
+dependencies** which are typically sub-crates that live within one repository.
+Let’s start off by making a new crate inside of our `hello_world` package:
+
+```console
+# inside of hello_world/
+$ cargo new hello_utils
+```
+
+This will create a new folder `hello_utils` inside of which a `Cargo.toml` and
+`src` folder are ready to be configured. In order to tell Cargo about this, open
+up `hello_world/Cargo.toml` and add `hello_utils` to your dependencies:
+
+```toml
+[dependencies]
+hello_utils = { path = "hello_utils" }
+```
+
+This tells Cargo that we depend on a crate called `hello_utils` which is found
+in the `hello_utils` folder (relative to the `Cargo.toml` it’s written in).
+
+And that’s it! The next `cargo build` will automatically build `hello_utils` and
+all of its own dependencies, and others can also start using the crate as well.
+However, crates that use dependencies specified with only a path are not
+permitted on [crates.io]. If we wanted to publish our `hello_world` crate, we
+would need to publish a version of `hello_utils` to [crates.io](https://crates.io)
+and specify its version in the dependencies line as well:
+
+```toml
+[dependencies]
+hello_utils = { path = "hello_utils", version = "0.1.0" }
+```
+
+### Overriding dependencies
+
+There are a number of methods in Cargo to support overriding dependencies and
+otherwise controlling the dependency graph. These options are typically, though,
+only available at the workspace level and aren't propagated through
+dependencies. In other words, "applications" have the ability to override
+dependencies but "libraries" do not.
+
+The desire to override a dependency or otherwise alter some dependencies can
+arise through a number of scenarios. Most of them, however, boil down to the
+ability to work with a crate before it's been published to crates.io. For
+example:
+
+* A crate you're working on is also used in a much larger application you're
+  working on, and you'd like to test a bug fix to the library inside of the
+  larger application.
+* An upstream crate you don't work on has a new feature or a bug fix on the
+  master branch of its git repository which you'd like to test out.
+* You're about to publish a new major version of your crate, but you'd like to
+  do integration testing across an entire package to ensure the new major
+  version works.
+* You've submitted a fix to an upstream crate for a bug you found, but you'd
+  like to immediately have your application start depending on the fixed version
+  of the crate to avoid blocking on the bug fix getting merged.
+
+These scenarios are currently all solved with the [`[patch]` manifest
+section][patch-section]. Historically some of these scenarios have been solved
+with [the `[replace]` section][replace-section], but we'll document the `[patch]`
+section here.
+
+[patch-section]: reference/manifest.html#the-patch-section
+[replace-section]: reference/manifest.html#the-replace-section
+
+### Testing a bugfix
+
+Let's say you're working with the [`uuid` crate] but while you're working on it
+you discover a bug. You are, however, quite enterprising so you decide to also
+try out to fix the bug! Originally your manifest will look like:
+
+[`uuid` crate]: https://crates.io/crates/uuid
+
+```toml
+[package]
+name = "my-library"
+version = "0.1.0"
+authors = ["..."]
+
+[dependencies]
+uuid = "1.0"
+```
+
+First thing we'll do is to clone the [`uuid` repository][uuid-repository]
+locally via:
+
+```console
+$ git clone https://github.com/rust-lang-nursery/uuid
+```
+
+Next we'll edit the manifest of `my-library` to contain:
+
+```toml
+[patch.crates-io]
+uuid = { path = "../path/to/uuid" }
+```
+
+Here we declare that we're *patching* the source `crates-io` with a new
+dependency. This will effectively add the local checked out version of `uuid` to
+the crates.io registry for our local package.
+
+Next up we need to ensure that our lock file is updated to use this new version
+of `uuid` so our package uses the locally checked out copy instead of one from
+crates.io. The way `[patch]` works is that it'll load the dependency at
+`../path/to/uuid` and then whenever crates.io is queried for versions of `uuid`
+it'll *also* return the local version.
+
+This means that the version number of the local checkout is significant and will
+affect whether the patch is used. Our manifest declared `uuid = "1.0"` which
+means we'll only resolve to `>= 1.0.0, < 2.0.0`, and Cargo's greedy resolution
+algorithm also means that we'll resolve to the maximum version within that
+range. Typically this doesn't matter as the version of the git repository will
+already be greater or match the maximum version published on crates.io, but it's
+important to keep this in mind!
+
+In any case, typically all you need to do now is:
+
+```console
+$ cargo build
+   Compiling uuid v1.0.0 (.../uuid)
+   Compiling my-library v0.1.0 (.../my-library)
+    Finished dev [unoptimized + debuginfo] target(s) in 0.32 secs
+```
+
+And that's it! You're now building with the local version of `uuid` (note the
+path in parentheses in the build output). If you don't see the local path version getting
+built then you may need to run `cargo update -p uuid --precise $version` where
+`$version` is the version of the locally checked out copy of `uuid`.
+
+Once you've fixed the bug you originally found the next thing you'll want to do
+is to likely submit that as a pull request to the `uuid` crate itself. Once
+you've done this then you can also update the `[patch]` section. The listing
+inside of `[patch]` is just like the `[dependencies]` section, so once your pull
+request is merged you could change your `path` dependency to:
+
+```toml
+[patch.crates-io]
+uuid = { git = 'https://github.com/rust-lang-nursery/uuid' }
+```
+
+[uuid-repository]: https://github.com/rust-lang-nursery/uuid
+
+### Working with an unpublished minor version
+
+Let's now shift gears a bit from bug fixes to adding features. While working on
+`my-library` you discover that a whole new feature is needed in the `uuid`
+crate. You've implemented this feature, tested it locally above with `[patch]`,
+and submitted a pull request. Let's go over how you continue to use and test it
+before it's actually published.
+
+Let's also say that the current version of `uuid` on crates.io is `1.0.0`, but
+since then the master branch of the git repository has updated to `1.0.1`. This
+branch includes your new feature you submitted previously. To use this
+repository we'll edit our `Cargo.toml` to look like
+
+```toml
+[package]
+name = "my-library"
+version = "0.1.0"
+authors = ["..."]
+
+[dependencies]
+uuid = "1.0.1"
+
+[patch.crates-io]
+uuid = { git = 'https://github.com/rust-lang-nursery/uuid' }
+```
+
+Note that our local dependency on `uuid` has been updated to `1.0.1` as it's
+what we'll actually require once the crate is published. This version doesn't
+exist on crates.io, though, so we provide it with the `[patch]` section of the
+manifest.
+
+Now when our library is built it'll fetch `uuid` from the git repository and
+resolve to 1.0.1 inside the repository instead of trying to download a version
+from crates.io. Once 1.0.1 is published on crates.io the `[patch]` section can
+be deleted.
+
+It's also worth noting that `[patch]` applies *transitively*. Let's say you use
+`my-library` in a larger package, such as:
+
+```toml
+[package]
+name = "my-binary"
+version = "0.1.0"
+authors = ["..."]
+
+[dependencies]
+my-library = { git = 'https://example.com/git/my-library' }
+uuid = "1.0"
+
+[patch.crates-io]
+uuid = { git = 'https://github.com/rust-lang-nursery/uuid' }
+```
+
+Remember that `[patch]` is applicable *transitively* but can only be defined at
+the *top level* so we consumers of `my-library` have to repeat the `[patch]` section
+if necessary. Here, though, the new `uuid` crate applies to *both* our dependency on
+`uuid` and the `my-library -> uuid` dependency. The `uuid` crate will be resolved to
+one version for this entire crate graph, 1.0.1, and it'll be pulled from the git
+repository.
+
+#### Overriding repository URL
+
+In case the dependency you want to override isn't loaded from `crates.io`, you'll have to change a bit how you use `[patch]`:
+
+```toml
+[patch."https://github.com/your/repository"]
+my-library = { path = "../my-library/path" }
+```
+
+And that's it!
+
+### Prepublishing a breaking change
+
+As a final scenario, let's take a look at working with a new major version of a
+crate, typically accompanied with breaking changes. Sticking with our previous
+crates, this means that we're going to be creating version 2.0.0 of the `uuid`
+crate. After we've submitted all changes upstream we can update our manifest for
+`my-library` to look like:
+
+```toml
+[dependencies]
+uuid = "2.0"
+
+[patch.crates-io]
+uuid = { git = "https://github.com/rust-lang-nursery/uuid", branch = "2.0.0" }
+```
+
+And that's it! Like with the previous example the 2.0.0 version doesn't actually
+exist on crates.io but we can still put it in through a git dependency through
+the usage of the `[patch]` section. As a thought exercise let's take another
+look at the `my-binary` manifest from above again as well:
+
+```toml
+[package]
+name = "my-binary"
+version = "0.1.0"
+authors = ["..."]
+
+[dependencies]
+my-library = { git = 'https://example.com/git/my-library' }
+uuid = "1.0"
+
+[patch.crates-io]
+uuid = { git = 'https://github.com/rust-lang-nursery/uuid', branch = '2.0.0' }
+```
+
+Note that this will actually resolve to two versions of the `uuid` crate. The
+`my-binary` crate will continue to use the 1.x.y series of the `uuid` crate but
+the `my-library` crate will use the 2.0.0 version of `uuid`. This will allow you
+to gradually roll out breaking changes to a crate through a dependency graph
+without being force to update everything all at once.
+
+### Overriding with local dependencies
+
+Sometimes you're only temporarily working on a crate and you don't want to have
+to modify `Cargo.toml` like with the `[patch]` section above. For this use
+case Cargo offers a much more limited version of overrides called **path
+overrides**.
+
+Path overrides are specified through `.cargo/config` instead of `Cargo.toml`,
+and you can find [more documentation about this configuration][config-docs].
+Inside of `.cargo/config` you'll specify a key called `paths`:
+
+[config-docs]: reference/config.html
+
+```toml
+paths = ["/path/to/uuid"]
+```
+
+This array should be filled with directories that contain a `Cargo.toml`. In
+this instance, we’re just adding `uuid`, so it will be the only one that’s
+overridden. This path can be either absolute or relative to the directory that
+contains the `.cargo` folder.
+
+Path overrides are more restricted than the `[patch]` section, however, in
+that they cannot change the structure of the dependency graph. When a
+path replacement is used then the previous set of dependencies
+must all match exactly to the new `Cargo.toml` specification. For example this
+means that path overrides cannot be used to test out adding a dependency to a
+crate, instead `[patch]` must be used in that situation. As a result usage of a
+path override is typically isolated to quick bug fixes rather than larger
+changes.
+
+Note: using a local configuration to override paths will only work for crates
+that have been published to [crates.io]. You cannot use this feature to tell
+Cargo how to find local unpublished crates.
+
+### Platform specific dependencies
+
+
+Platform-specific dependencies take the same format, but are listed under a
+`target` section. Normally Rust-like `#[cfg]` syntax will be used to define
+these sections:
+
+```toml
+[target.'cfg(windows)'.dependencies]
+winhttp = "0.4.0"
+
+[target.'cfg(unix)'.dependencies]
+openssl = "1.0.1"
+
+[target.'cfg(target_arch = "x86")'.dependencies]
+native = { path = "native/i686" }
+
+[target.'cfg(target_arch = "x86_64")'.dependencies]
+native = { path = "native/x86_64" }
+```
+
+Like with Rust, the syntax here supports the `not`, `any`, and `all` operators
+to combine various cfg name/value pairs. Note that the `cfg` syntax has only
+been available since Cargo 0.9.0 (Rust 1.8.0).
+
+In addition to `#[cfg]` syntax, Cargo also supports listing out the full target
+the dependencies would apply to:
+
+```toml
+[target.x86_64-pc-windows-gnu.dependencies]
+winhttp = "0.4.0"
+
+[target.i686-unknown-linux-gnu.dependencies]
+openssl = "1.0.1"
+```
+
+If you’re using a custom target specification, quote the full path and file
+name:
+
+```toml
+[target."x86_64/windows.json".dependencies]
+winhttp = "0.4.0"
+
+[target."i686/linux.json".dependencies]
+openssl = "1.0.1"
+native = { path = "native/i686" }
+
+[target."x86_64/linux.json".dependencies]
+openssl = "1.0.1"
+native = { path = "native/x86_64" }
+```
+
+### Development dependencies
+
+You can add a `[dev-dependencies]` section to your `Cargo.toml` whose format
+is equivalent to `[dependencies]`:
+
+```toml
+[dev-dependencies]
+tempdir = "0.3"
+```
+
+Dev-dependencies are not used when compiling
+a package for building, but are used for compiling tests, examples, and
+benchmarks.
+
+These dependencies are *not* propagated to other packages which depend on this
+package.
+
+You can also have target-specific development dependencies by using
+`dev-dependencies` in the target section header instead of `dependencies`. For
+example:
+
+```toml
+[target.'cfg(unix)'.dev-dependencies]
+mio = "0.0.1"
+```
+
+[crates.io]: https://crates.io/
+
+### Build dependencies
+
+You can depend on other Cargo-based crates for use in your build scripts.
+Dependencies are declared through the `build-dependencies` section of the
+manifest:
+
+```toml
+[build-dependencies]
+cc = "1.0.3"
+```
+
+The build script **does not** have access to the dependencies listed
+in the `dependencies` or `dev-dependencies` section. Build
+dependencies will likewise not be available to the package itself
+unless listed under the `dependencies` section as well. A package
+itself and its build script are built separately, so their
+dependencies need not coincide. Cargo is kept simpler and cleaner by
+using independent dependencies for independent purposes.
+
+### Choosing features
+
+If a package you depend on offers conditional features, you can
+specify which to use:
+
+```toml
+[dependencies.awesome]
+version = "1.3.5"
+default-features = false # do not include the default features, and optionally
+                         # cherry-pick individual features
+features = ["secure-password", "civet"]
+```
+
+More information about features can be found in the
+[manifest documentation](reference/manifest.html#the-features-section).
+
+### Renaming dependencies in `Cargo.toml`
+
+When writing a `[dependencies]` section in `Cargo.toml` the key you write for a
+dependency typically matches up to the name of the crate you import from in the
+code. For some projects, though, you may wish to reference the crate with a
+different name in the code regardless of how it's published on crates.io. For
+example you may wish to:
+
+* Avoid the need to  `use foo as bar` in Rust source.
+* Depend on multiple versions of a crate.
+* Depend on crates with the same name from different registries.
+
+To support this Cargo supports a `package` key in the `[dependencies]` section
+of which package should be depended on:
+
+```toml
+[package]
+name = "mypackage"
+version = "0.0.1"
+
+[dependencies]
+foo = "0.1"
+bar = { git = "https://github.com/example/project", package = "foo" }
+baz = { version = "0.1", registry = "custom", package = "foo" }
+```
+
+In this example, three crates are now available in your Rust code:
+
+```rust
+extern crate foo; // crates.io
+extern crate bar; // git repository
+extern crate baz; // registry `custom`
+```
+
+All three of these crates have the package name of `foo` in their own
+`Cargo.toml`, so we're explicitly using the `package` key to inform Cargo that
+we want the `foo` package even though we're calling it something else locally.
+The `package` key, if not specified, defaults to the name of the dependency
+being requested.
+
+Note that if you have an optional dependency like:
+
+```toml
+[dependencies]
+foo = { version = "0.1", package = 'bar', optional = true }
+```
+
+you're depending on the crate `bar` from crates.io, but your crate has a `foo`
+feature instead of a `bar` feature. That is, names of features take after the
+name of the dependency, not the package name, when renamed.
+
+Enabling transitive dependencies works similarly, for example we could add the
+following to the above manifest:
+
+```toml
+[features]
+log-debug = ['foo/log-debug'] # using 'bar/log-debug' would be an error!
+```
diff --git a/src/doc/src/reference/unstable.md b/src/doc/src/reference/unstable.md
new file mode 100644 (file)
index 0000000..8fddb82
--- /dev/null
@@ -0,0 +1,285 @@
+## Unstable Features
+
+Experimental Cargo features are only available on the nightly channel.  You
+typically use one of the `-Z` flags to enable them.  Run `cargo -Z help` to
+see a list of flags available.
+
+`-Z unstable-options` is a generic flag for enabling other unstable
+command-line flags.  Options requiring this will be called out below.
+
+Some unstable features will require you to specify the `cargo-features` key in
+`Cargo.toml`.
+
+### Alternate Registries
+* RFC: [#2141](https://github.com/rust-lang/rfcs/blob/master/text/2141-alternative-registries.md)
+* Tracking Issue: [rust-lang/rust#44931](https://github.com/rust-lang/rust/issues/44931)
+
+Alternate registries allow you to use registries other than crates.io.
+
+The name of a registry is defined in `.cargo/config` under the `registries`
+table:
+
+```toml
+[registries]
+my-registry = { index = "https://my-intranet:8080/index" }
+```
+
+Authentication information for alternate registries can be added to
+`.cargo/credentials`:
+
+```toml
+[my-registry]
+token = "api-token"
+```
+
+Inside `Cargo.toml` you can specify which registry a dependency comes from
+using the `registry` key. First you need to include the appropriate
+`cargo-features` at the top of the file:
+
+```toml
+cargo-features = ["alternative-registries"]
+
+[package]
+...
+
+[dependencies]
+other-create = { version = "1.0", registry = "my-registry"}
+```
+
+A `--registry` flag has been added to commands that interact with registries
+such as `publish`, `login`, etc.  Example:
+
+```
+cargo +nightly publish -Z unstable-options --registry my-registry
+```
+
+The `publish` field in `Cargo.toml` has been extended to accept a list of
+registries that will restrict publishing only to those registries.
+
+```toml
+[package]
+...
+publish = ["my-registry"]
+```
+
+
+### publish-lockfile
+* Original Issue: [#2263](https://github.com/rust-lang/cargo/issues/2263)
+* PR: [#5093](https://github.com/rust-lang/cargo/pull/5093)
+* Tracking Issue: [#5654](https://github.com/rust-lang/cargo/issues/5654)
+
+When creating a `.crate` file for distribution, Cargo has historically
+not included the `Cargo.lock` file.  This can cause problems with
+using `cargo install` with a binary.  You can specify that your package
+should include the `Cargo.lock` file when using `cargo package` or `cargo publish`
+by specifying the `publish-lockfile` key in `Cargo.toml`.  This also requires the
+appropriate `cargo-features`:
+
+```toml
+cargo-features = ["publish-lockfile"]
+
+[package]
+...
+publish-lockfile = true
+```
+
+
+### Offline Mode
+* Original Issue: [#4686](https://github.com/rust-lang/cargo/issues/4686)
+* Tracking Issue: [#5655](https://github.com/rust-lang/cargo/issues/5655)
+
+The `-Z offline` flag prevents Cargo from attempting to access the network for
+any reason.  Typically Cargo will stop with an error if it wants to access the
+network and it is not available.
+
+Beware that this may result in different dependency resolution than online
+mode.  Cargo will restrict itself to crates that are available locally, even
+if there might be a newer version as indicated in the local copy of the index.
+
+### no-index-update
+* Original Issue: [#3479](https://github.com/rust-lang/cargo/issues/3479)
+
+The `-Z no-index-update` flag ensures that Cargo does not attempt to update
+the registry index.  This is intended for tools such as Crater that issue many
+Cargo commands, and you want to avoid the network latency for updating the
+index each time.
+
+### avoid-dev-deps
+* Original Issue: [#4988](https://github.com/rust-lang/cargo/issues/4988)
+* Stabilization Issue: [#5133](https://github.com/rust-lang/cargo/issues/5133)
+
+When running commands such as `cargo install` or `cargo build`, Cargo
+currently requires dev-dependencies to be downloaded, even if they are not
+used.  The `-Z avoid-dev-deps` flag allows Cargo to avoid downloading
+dev-dependencies if they are not needed.  The `Cargo.lock` file will not be
+generated if dev-dependencies are skipped.
+
+### minimal-versions
+* Original Issue: [#4100](https://github.com/rust-lang/cargo/issues/4100)
+* Tracking Issue: [#5657](https://github.com/rust-lang/cargo/issues/5657)
+
+When a `Cargo.lock` file is generated, the `-Z minimal-versions` flag will
+resolve the dependencies to the minimum semver version that will satisfy the
+requirements (instead of the greatest version).
+
+The intended use-case of this flag is to check, during continuous integration,
+that the versions specified in Cargo.toml are a correct reflection of the
+minimum versions that you are actually using. That is, if Cargo.toml says
+`foo = "1.0.0"` that you don't accidentally depend on features added only in
+`foo 1.5.0`.
+
+### out-dir
+* Original Issue: [#4875](https://github.com/rust-lang/cargo/issues/4875)
+
+This feature allows you to specify the directory where artifacts will be
+copied to after they are built.  Typically artifacts are only written to the
+`target/release` or `target/debug` directories.  However, determining the
+exact filename can be tricky since you need to parse JSON output. The
+`--out-dir` flag makes it easier to predictably access the artifacts. Note
+that the artifacts are copied, so the originals are still in the `target`
+directory.  Example:
+
+```
+cargo +nightly build --out-dir=out -Z unstable-options
+```
+
+
+### Profile Overrides
+* Tracking Issue: [rust-lang/rust#48683](https://github.com/rust-lang/rust/issues/48683)
+* RFC: [#2282](https://github.com/rust-lang/rfcs/blob/master/text/2282-profile-dependencies.md)
+
+Profiles can be overridden for specific packages and custom build scripts.
+The general format looks like this:
+
+```toml
+cargo-features = ["profile-overrides"]
+
+[package]
+...
+
+[profile.dev]
+opt-level = 0
+debug = true
+
+# the `image` crate will be compiled with -Copt-level=3
+[profile.dev.overrides.image]
+opt-level = 3
+
+# All dependencies (but not this crate itself or any workspace member)
+# will be compiled with -Copt-level=2 . This includes build dependencies.
+[profile.dev.overrides."*"]
+opt-level = 2
+
+# Build scripts and their dependencies will be compiled with -Copt-level=3
+# By default, build scripts use the same rules as the rest of the profile
+[profile.dev.build-override]
+opt-level = 3
+```
+
+Overrides can only be specified for dev and release profiles.
+
+
+### Config Profiles
+* Tracking Issue: [rust-lang/rust#48683](https://github.com/rust-lang/rust/issues/48683)
+* RFC: [#2282](https://github.com/rust-lang/rfcs/blob/master/text/2282-profile-dependencies.md)
+
+Profiles can be specified in `.cargo/config` files.  The `-Z config-profile`
+command-line flag is required to use this feature.  The format is the same as
+in a `Cargo.toml` manifest.  If found in multiple config files, settings will
+be merged using the regular [config hierarchy](reference/config.html#hierarchical-structure).
+Config settings take precedence over manifest settings.
+
+```toml
+[profile.dev]
+opt-level = 3
+```
+
+```
+cargo +nightly build -Z config-profile
+```
+
+
+### Namespaced features
+* Original issue: [#1286](https://github.com/rust-lang/cargo/issues/1286)
+* Tracking Issue: [rust-lang/cargo#5565](https://github.com/rust-lang/cargo/issues/5565)
+
+Currently, it is not possible to have a feature and a dependency with the same
+name in the manifest. If you set `namespaced-features` to `true`, the namespaces
+for features and dependencies are separated. The effect of this is that, in the
+feature requirements, dependencies have to be prefixed with `crate:`. Like this:
+
+```toml
+[package]
+namespaced-features = true
+
+[features]
+bar = ["crate:baz", "foo"]
+foo = []
+
+[dependencies]
+baz = { version = "0.1", optional = true }
+```
+
+To prevent unnecessary boilerplate from having to explicitly declare features
+for each optional dependency, implicit features get created for any optional
+dependencies where a feature of the same name is not defined. However, if
+a feature of the same name as a dependency is defined, that feature must
+include the dependency as a requirement, as `foo = ["crate:foo"]`.
+
+
+### Build-plan
+* Tracking Issue: [rust-lang/cargo#5579](https://github.com/rust-lang/cargo/issues/5579)
+
+The `--build-plan` argument for the `build` command will output JSON with
+information about which commands would be run without actually executing
+anything. This can be useful when integrating with another build tool.
+Example:
+
+```
+cargo +nightly build --build-plan -Z unstable-options
+```
+
+### default-run
+* Original issue: [#2200](https://github.com/rust-lang/cargo/issues/2200)
+
+The `default-run` option in the `[package]` section of the manifest can be used
+to specify a default binary picked by `cargo run`. For example, when there is
+both `src/bin/a.rs` and `src/bin/b.rs`:
+
+```toml
+[package]
+default-run = "a"
+```
+
+### Metabuild
+* Tracking Issue: [rust-lang/rust#49803](https://github.com/rust-lang/rust/issues/49803)
+* RFC: [#2196](https://github.com/rust-lang/rfcs/blob/master/text/2196-metabuild.md)
+
+Metabuild is a feature to have declarative build scripts.  Instead of writing
+a `build.rs` script, you specify a list of build dependencies in the
+`metabuild` key in `Cargo.toml`.  A build script is automatically generated
+that runs each build dependency in order.  Metabuild packages can then read
+metadata from `Cargo.toml` to specify their behavior.
+
+Include `cargo-features` at the top of `Cargo.toml`, a `metadata` key in the
+`package`, list the dependencies in `build-dependencies`, and add any metadata
+that the metabuild packages require.  Example:
+
+```toml
+cargo-features = ["metabuild"]
+
+[package]
+name = "mypackage"
+version = "0.0.1"
+metabuild = ["foo", "bar"]
+
+[build-dependencies]
+foo = "1.0"
+bar = "1.0"
+
+[package.metadata.foo]
+extra-info = "qwerty"
+```
+
+Metabuild packages should have a public function called `metabuild` that
+performs the same actions as a regular `build.rs` script would perform.
diff --git a/src/doc/theme/favicon.png b/src/doc/theme/favicon.png
new file mode 100644 (file)
index 0000000..a91ad69
Binary files /dev/null and b/src/doc/theme/favicon.png differ
diff --git a/src/etc/_cargo b/src/etc/_cargo
new file mode 100644 (file)
index 0000000..395c517
--- /dev/null
@@ -0,0 +1,544 @@
+#compdef cargo
+
+autoload -U regexp-replace
+
+zstyle -T ':completion:*:*:cargo:*' tag-order && \
+  zstyle ':completion:*:*:cargo:*' tag-order 'common-commands'
+
+_cargo() {
+local context state state_descr line
+typeset -A opt_args
+
+# leading items in parentheses are an exclusion list for the arguments following that arg
+# See: http://zsh.sourceforge.net/Doc/Release/Completion-System.html#Completion-Functions
+#   - => exclude all other options
+#   1 => exclude positional arg 1
+#   * => exclude all other args
+#   +blah => exclude +blah
+_arguments \
+    '(- 1 *)'{-h,--help}'[show help message]' \
+    '(- 1 *)--list[list installed commands]' \
+    '(- 1 *)'{-V,--version}'[show version information]' \
+    {-v,--verbose}'[use verbose output]' \
+    --color'[colorization option]' \
+    '(+beta +nightly)+stable[use the stable toolchain]' \
+    '(+stable +nightly)+beta[use the beta toolchain]' \
+    '(+stable +beta)+nightly[use the nightly toolchain]' \
+    '1: :->command' \
+    '*:: :->args'
+
+case $state in
+    command)
+      _alternative 'common-commands:common:_cargo_cmds' 'all-commands:all:_cargo_all_cmds'
+      ;;
+
+    args)
+        case $words[1] in
+            bench)
+                _arguments \
+                    '--features=[space separated feature list]' \
+                    '--all-features[enable all available features]' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \
+                    "${command_scope_spec[@]}" \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '--no-default-features[do not build the default features]' \
+                    '--no-run[compile but do not run]' \
+                    '(-p,--package)'{-p=,--package=}'[package to run benchmarks for]:packages:_get_package_names' \
+                    '--target=[target triple]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            build)
+                _arguments \
+                    '--features=[space separated feature list]' \
+                    '--all-features[enable all available features]' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \
+                    "${command_scope_spec[@]}" \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '--no-default-features[do not build the default features]' \
+                    '(-p,--package)'{-p=,--package=}'[package to build]:packages:_get_package_names' \
+                    '--release=[build in release mode]' \
+                    '--target=[target triple]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            check)
+                _arguments \
+                    '--features=[space separated feature list]' \
+                    '--all-features[enable all available features]' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \
+                    "${command_scope_spec[@]}" \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '--no-default-features[do not check the default features]' \
+                    '(-p,--package)'{-p=,--package=}'[package to check]:packages:_get_package_names' \
+                    '--release=[check in release mode]' \
+                    '--target=[target triple]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            clean)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '(-p,--package)'{-p=,--package=}'[package to clean]:packages:_get_package_names' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--release[whether or not to clean release artifacts]' \
+                    '--target=[target triple(default:all)]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            doc)
+                _arguments \
+                    '--features=[space separated feature list]' \
+                    '--all-features[enable all available features]' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '--no-deps[do not build docs for dependencies]' \
+                    '--no-default-features[do not build the default features]' \
+                    '--open[open docs in browser after the build]' \
+                    '(-p, --package)'{-p,--package}'=[package to document]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--release[build artifacts in release mode, with optimizations]' \
+                    '--target=[build for the target triple]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            fetch)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            generate-lockfile)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            git-checkout)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--reference=[REF]' \
+                    '--url=[URL]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            help)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '*: :_cargo_cmds' \
+                    ;;
+
+            init)
+                _arguments \
+                    '--bin[use binary template]' \
+                    '--vcs:initialize a new repo with a given VCS:(git hg none)' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--name=[set the resulting package name]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            install)
+                _arguments \
+                    '--bin=[only install the specified binary]' \
+                    '--branch=[branch to use when installing from git]' \
+                    '--color=:colorization option:(auto always never)' \
+                    '--debug[build in debug mode instead of release mode]' \
+                    '--example[install the specified example instead of binaries]' \
+                    '--features=[space separated feature list]' \
+                    '--all-features[enable all available features]' \
+                    '--git=[URL from which to install the crate]' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \
+                    '--no-default-features[do not build the default features]' \
+                    '--path=[local filesystem path to crate to install]: :_files -/' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--rev=[specific commit to use when installing from git]' \
+                    '--root=[directory to install packages into]: :_files -/' \
+                    '--tag=[tag to use when installing from git]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--vers=[version to install from crates.io]' \
+                    ;;
+
+            locate-project)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    ;;
+
+            login)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--host=[Host to set the token for]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            metadata)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    "--no-deps[output information only about the root package and don't fetch dependencies]" \
+                    '--no-default-features[do not include the default feature]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '--features=[space separated feature list]' \
+                    '--all-features[enable all available features]' \
+                    '--format-version=[format version(default: 1)]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            new)
+                _arguments \
+                    '--bin[use binary template]' \
+                    '--vcs:initialize a new repo with a given VCS:(git hg none)' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--name=[set the resulting package name]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            owner)
+                _arguments \
+                    '(-a, --add)'{-a,--add}'[add owner LOGIN]' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--index[registry index]' \
+                    '(-l, --list)'{-l,--list}'[list owners of a crate]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-r, --remove)'{-r,--remove}'[remove owner LOGIN]' \
+                    '--token[API token to use when authenticating]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            package)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-l, --list)'{-l,--list}'[print files included in a package without making one]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '--no-metadata[ignore warnings about a lack of human-usable metadata]' \
+                    '--no-verify[do not build to verify contents]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            pkgid)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            publish)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--host=[Host to set the token for]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '--no-verify[Do not verify tarball until before publish]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--token[token to use when uploading]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            read-manifest)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            run)
+                _arguments \
+                    '--example=[name of the bin target]' \
+                    '--features=[space separated feature list]' \
+                    '--all-features[enable all available features]' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '--bin=[name of the bin target]' \
+                    '--no-default-features[do not build the default features]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--release=[build in release mode]' \
+                    '--target=[target triple]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    '*: :_normal' \
+                    ;;
+
+            rustc)
+                _arguments \
+                    '--color=:colorization option:(auto always never)' \
+                    '--features=[features to compile for the package]' \
+                    '--all-features[enable all available features]' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-j, --jobs)'{-j,--jobs}'=[number of parallel jobs, defaults to # of CPUs]' \
+                    '--manifest-path=[path to the manifest to fetch dependencies for]: :_files -/' \
+                    '--no-default-features[do not compile default features for the package]' \
+                    '(-p, --package)'{-p,--package}'=[profile to compile for]' \
+                    '--profile=[profile to build the selected target for]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--release[build artifacts in release mode, with optimizations]' \
+                    '--target=[target triple which compiles will be for]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    "${command_scope_spec[@]}" \
+                    ;;
+
+            rustdoc)
+                _arguments \
+                    '--color=:colorization option:(auto always never)' \
+                    '--features=[space-separated list of features to also build]' \
+                    '--all-features[enable all available features]' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-j, --jobs)'{-j,--jobs}'=[number of parallel jobs, defaults to # of CPUs]' \
+                    '--manifest-path=[path to the manifest to document]: :_files -/' \
+                    '--no-default-features[do not build the `default` feature]' \
+                    '--open[open the docs in a browser after the operation]' \
+                    '(-p, --package)'{-p,--package}'=[package to document]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--release[build artifacts in release mode, with optimizations]' \
+                    '--target=[build for the target triple]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    "${command_scope_spec[@]}" \
+                    ;;
+
+            search)
+                _arguments \
+                    '--color=:colorization option:(auto always never)' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--host=[host of a registry to search in]' \
+                    '--limit=[limit the number of results]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    ;;
+
+            test)
+                _arguments \
+                    '--features=[space separated feature list]' \
+                    '--all-features[enable all available features]' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '--test=[test name]: :_test_names' \
+                    '--no-default-features[do not build the default features]' \
+                    '--no-fail-fast[run all tests regardless of failure]' \
+                    '--no-run[compile but do not run]' \
+                    '(-p,--package)'{-p=,--package=}'[package to run tests for]:packages:_get_package_names' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--release[build artifacts in release mode, with optimizations]' \
+                    '--target=[target triple]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    '1: :_test_names' \
+                    '(--doc --bin --example --test --bench)--lib[only test library]' \
+                    '(--lib --bin --example --test --bench)--doc[only test documentation]' \
+                    '(--lib --doc --example --test --bench)--bin=[binary name]' \
+                    '(--lib --doc --bin --test --bench)--example=[example name]' \
+                    '(--lib --doc --bin --example --bench)--test=[test name]' \
+                    '(--lib --doc --bin --example --test)--bench=[benchmark name]' \
+                    '--message-format:error format:(human json short)' \
+                    '--frozen[require lock and cache up to date]' \
+                    '--locked[require lock up to date]'
+                    ;;
+
+            uninstall)
+                _arguments \
+                    '--bin=[only uninstall the binary NAME]' \
+                    '--color=:colorization option:(auto always never)' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-q, --quiet)'{-q,--quiet}'[less output printed to stdout]' \
+                    '--root=[directory to uninstall packages from]: :_files -/' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    ;;
+
+            update)
+                _arguments \
+                    '--aggressive=[force dependency update]' \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '(-p,--package)'{-p=,--package=}'[package to update]:packages:__get_package_names' \
+                    '--precise=[update single dependency to PRECISE]: :' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            verify-project)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--manifest-path=[path to manifest]: :_files -/' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            version)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    ;;
+
+            yank)
+                _arguments \
+                    '(-h, --help)'{-h,--help}'[show help message]' \
+                    '--index[registry index]' \
+                    '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \
+                    '--token[API token to use when authenticating]' \
+                    '--undo[undo a yank, putting a version back into the index]' \
+                    '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \
+                    '--color=:colorization option:(auto always never)' \
+                    '--vers[yank version]' \
+                    ;;
+        esac
+        ;;
+esac
+}
+
+_cargo_cmds(){
+local -a commands;commands=(
+'bench:execute all benchmarks of a local package'
+'build:compile the current package'
+'check:check the current package without compiling'
+'clean:remove generated artifacts'
+'doc:build package documentation'
+'fetch:fetch package dependencies'
+'generate-lockfile:create lockfile'
+'git-checkout:git checkout'
+'help:get help for commands'
+'init:create new package in current directory'
+'install:install a Rust binary'
+'locate-project:print "Cargo.toml" location'
+'login:login to remote server'
+'metadata:the metadata for a package in json'
+'new:create a new package'
+'owner:manage the owners of a crate on the registry'
+'package:assemble local package into a distributable tarball'
+'pkgid:print a fully qualified package specification'
+'publish:upload package to the registry'
+'read-manifest:print manifest in JSON format'
+'run:run the main binary of the local package'
+'rustc:compile a package and all of its dependencies'
+'rustdoc:build documentation for a package'
+'search:search packages on crates.io'
+'test:execute all unit and tests of a local package'
+'uninstall:remove a Rust binary'
+'update:update dependencies'
+'verify-project:check Cargo.toml'
+'version:show version information'
+'yank:remove pushed file from index'
+)
+_describe -t common-commands 'common commands' commands
+}
+
+_cargo_all_cmds(){
+local -a commands;commands=($(cargo --list))
+_describe -t all-commands 'all commands' commands
+}
+
+
+#FIXME: Disabled until fixed
+#gets package names from the manifest file
+_get_package_names()
+{
+}
+
+#TODO:see if it makes sense to have 'locate-project' to have non-json output.
+#strips package name from json stuff
+_locate_manifest(){
+local manifest=`cargo locate-project 2>/dev/null`
+regexp-replace manifest '\{"root":"|"\}' ''
+echo $manifest
+}
+
+# Extracts the values of "name" from the array given in $1 and shows them as
+# command line options for completion
+_get_names_from_array()
+{
+    local -a filelist;
+    local manifest=$(_locate_manifest)
+    if [[ -z $manifest ]]; then
+        return 0
+    fi
+
+    local last_line
+    local -a names;
+    local in_block=false
+    local block_name=$1
+    names=()
+    while read line
+    do
+        if [[ $last_line == "[[$block_name]]" ]]; then
+            in_block=true
+        else
+            if [[ $last_line =~ '.*\[\[.*' ]]; then
+                in_block=false
+            fi
+        fi
+
+        if [[ $in_block == true ]]; then
+            if [[ $line =~ '.*name.*=' ]]; then
+                regexp-replace line '^.*name *= *|"' ""
+                names+=$line
+            fi
+        fi
+
+        last_line=$line
+    done < $manifest
+    _describe $block_name names
+
+}
+
+#Gets the test names from the manifest file
+_test_names()
+{
+    _get_names_from_array "test"
+}
+
+#Gets the bench names from the manifest file
+_benchmark_names()
+{
+    _get_names_from_array "bench"
+}
+
+# These flags are mutually exclusive specifiers for the scope of a command; as
+# they are used in multiple places without change, they are expanded into the
+# appropriate command's `_arguments` where appropriate.
+set command_scope_spec
+command_scope_spec=(
+    '(--bin --example --test --lib)--bench=[benchmark name]: :_benchmark_names'
+    '(--bench --bin --test --lib)--example=[example name]'
+    '(--bench --example --test --lib)--bin=[binary name]'
+    '(--bench --bin --example --test)--lib=[library name]'
+    '(--bench --bin --example --lib)--test=[test name]'
+)
+
+_cargo
diff --git a/src/etc/cargo.bashcomp.sh b/src/etc/cargo.bashcomp.sh
new file mode 100644 (file)
index 0000000..5bfdd20
--- /dev/null
@@ -0,0 +1,248 @@
+command -v cargo >/dev/null 2>&1 &&
+_cargo()
+{
+       local cur prev words cword
+       _get_comp_words_by_ref cur prev words cword
+
+       COMPREPLY=()
+
+       # Skip past - and + options to find the command.
+       local nwords=${#words[@]}
+       local cmd_i cmd
+       for (( cmd_i=1; cmd_i<$nwords; cmd_i++ ));
+       do
+               if [[ ! "${words[$cmd_i]}" =~ ^[+-] ]]; then
+                       cmd="${words[$cmd_i]}"
+                       break
+               fi
+       done
+
+       local vcs='git hg none'
+       local color='auto always never'
+       local msg_format='human json'
+
+       local opt_help='-h --help'
+       local opt_verbose='-v --verbose'
+       local opt_quiet='-q --quiet'
+       local opt_color='--color'
+       local opt_common="$opt_help $opt_verbose $opt_quiet $opt_color"
+       local opt_pkg='-p --package'
+       local opt_feat='--features --all-features --no-default-features'
+       local opt_mani='--manifest-path'
+       local opt_jobs='-j --jobs'
+       local opt_force='-f --force'
+       local opt_test='--test --bench'
+       local opt_lock='--frozen --locked'
+
+       local opt___nocmd="$opt_common -V --version --list"
+       local opt__bench="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --no-run"
+       local opt__build="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --release"
+       local opt__check="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --release"
+       local opt__clean="$opt_common $opt_pkg $opt_mani $opt_lock --target --release"
+       local opt__doc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs --message-format --bin --lib --target --open --no-deps --release"
+       local opt__fetch="$opt_common $opt_mani $opt_lock"
+       local opt__generate_lockfile="${opt__fetch}"
+       local opt__git_checkout="$opt_common $opt_lock --reference --url"
+       local opt__help="$opt_help"
+       local opt__init="$opt_common $opt_lock --bin --lib --name --vcs"
+       local opt__install="$opt_common $opt_feat $opt_jobs $opt_lock $opt_force --bin --branch --debug --example --git --list --path --rev --root --tag --vers"
+       local opt__locate_project="$opt_mani -h --help"
+       local opt__login="$opt_common $opt_lock --host"
+       local opt__metadata="$opt_common $opt_feat $opt_mani $opt_lock --format-version --no-deps"
+       local opt__new="$opt_common $opt_lock --vcs --bin --lib --name"
+       local opt__owner="$opt_common $opt_lock -a --add -r --remove -l --list --index --token"
+       local opt__package="$opt_common $opt_mani $opt_lock $opt_jobs --allow-dirty -l --list --no-verify --no-metadata"
+       local opt__pkgid="${opt__fetch} $opt_pkg"
+       local opt__publish="$opt_common $opt_mani $opt_lock $opt_jobs --allow-dirty --dry-run --host --token --no-verify"
+       local opt__read_manifest="$opt_help $opt_verbose $opt_mani $opt_color --no-deps"
+       local opt__run="$opt_common $opt_feat $opt_mani $opt_lock $opt_jobs --message-format --target --bin --example --release"
+       local opt__rustc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --profile --target --lib --bin --example --release"
+       local opt__rustdoc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --target --lib --bin --example --release --open"
+       local opt__search="$opt_common $opt_lock --host --limit"
+       local opt__test="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test --message-format --all --doc --target --lib --bin --example --no-run --release --no-fail-fast"
+       local opt__uninstall="$opt_common $opt_lock --bin --root"
+       local opt__update="$opt_common $opt_pkg $opt_mani $opt_lock --aggressive --precise"
+       local opt__verify_project="${opt__fetch}"
+       local opt__version="$opt_help $opt_verbose $opt_color"
+       local opt__yank="$opt_common $opt_lock --vers --undo --index --token"
+
+       if [[ $cmd_i -ge $nwords-1 ]]; then
+               # Completion before or at the command.
+               if [[ "$cur" == -* ]]; then
+                       COMPREPLY=( $( compgen -W "${opt___nocmd}" -- "$cur" ) )
+               elif [[ "$cur" == +* ]]; then
+                       COMPREPLY=( $( compgen -W "$(_toolchains)" -- "$cur" ) )
+               else
+                       COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) )
+               fi
+       else
+               case "${prev}" in
+                       --vcs)
+                               COMPREPLY=( $( compgen -W "$vcs" -- "$cur" ) )
+                               ;;
+                       --color)
+                               COMPREPLY=( $( compgen -W "$color" -- "$cur" ) )
+                               ;;
+                       --message-format)
+                               COMPREPLY=( $( compgen -W "$msg_format" -- "$cur" ) )
+                               ;;
+                       --manifest-path)
+                               _filedir toml
+                               ;;
+                       --bin)
+                               COMPREPLY=( $( compgen -W "$(_bin_names)" -- "$cur" ) )
+                               ;;
+                       --test)
+                               COMPREPLY=( $( compgen -W "$(_test_names)" -- "$cur" ) )
+                               ;;
+                       --bench)
+                               COMPREPLY=( $( compgen -W "$(_benchmark_names)" -- "$cur" ) )
+                               ;;
+                       --example)
+                               COMPREPLY=( $( compgen -W "$(_get_examples)" -- "$cur" ) )
+                               ;;
+                       --target)
+                               COMPREPLY=( $( compgen -W "$(_get_targets)" -- "$cur" ) )
+                               ;;
+                       help)
+                               COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) )
+                               ;;
+                       *)
+                               local opt_var=opt__${cmd//-/_}
+                               COMPREPLY=( $( compgen -W "${!opt_var}" -- "$cur" ) )
+                               ;;
+               esac
+       fi
+
+       # compopt does not work in bash version 3
+
+       return 0
+} &&
+complete -F _cargo cargo
+
+__cargo_commands=$(cargo --list 2>/dev/null | tail -n +2)
+
+_locate_manifest(){
+       local manifest=`cargo locate-project 2>/dev/null`
+       # regexp-replace manifest '\{"root":"|"\}' ''
+       echo ${manifest:9:${#manifest}-11}
+}
+
+# Extracts the values of "name" from the array given in $1 and shows them as
+# command line options for completion
+_get_names_from_array()
+{
+       local manifest=$(_locate_manifest)
+       if [[ -z $manifest ]]; then
+               return 0
+       fi
+
+       local last_line
+       local -a names
+       local in_block=false
+       local block_name=$1
+       while read line
+       do
+               if [[ $last_line == "[[$block_name]]" ]]; then
+                       in_block=true
+               else
+                       if [[ $last_line =~ .*\[\[.* ]]; then
+                               in_block=false
+                       fi
+               fi
+
+               if [[ $in_block == true ]]; then
+                       if [[ $line =~ .*name.*\= ]]; then
+                               line=${line##*=}
+                               line=${line%%\"}
+                               line=${line##*\"}
+                               names+=($line)
+                       fi
+               fi
+
+               last_line=$line
+       done < $manifest
+       echo "${names[@]}"
+}
+
+#Gets the bin names from the manifest file
+_bin_names()
+{
+       _get_names_from_array "bin"
+}
+
+#Gets the test names from the manifest file
+_test_names()
+{
+       _get_names_from_array "test"
+}
+
+#Gets the bench names from the manifest file
+_benchmark_names()
+{
+       _get_names_from_array "bench"
+}
+
+_get_examples(){
+       local files=($(dirname $(_locate_manifest))/examples/*.rs)
+       local names=("${files[@]##*/}")
+       local names=("${names[@]%.*}")
+       # "*" means no examples found
+       if [[ "${names[@]}" != "*" ]]; then
+               echo "${names[@]}"
+       fi
+}
+
+_get_targets(){
+       local CURRENT_PATH
+       if [ `uname -o` == "Cygwin" -a -f "$PWD"/Cargo.toml ]; then
+               CURRENT_PATH=$PWD
+       else
+               CURRENT_PATH=$(_locate_manifest)
+       fi
+       if [[ -z "$CURRENT_PATH" ]]; then
+               return 1
+       fi
+       local TARGETS=()
+       local FIND_PATHS=( "/" )
+       local FIND_PATH LINES LINE
+       while [[ "$CURRENT_PATH" != "/" ]]; do
+               FIND_PATHS+=( "$CURRENT_PATH" )
+               CURRENT_PATH=$(dirname $CURRENT_PATH)
+       done
+       for FIND_PATH in ${FIND_PATHS[@]}; do
+               if [[ -f "$FIND_PATH"/.cargo/config ]]; then
+               LINES=( `grep "$FIND_PATH"/.cargo/config -e "^\[target\."` )
+               for LINE in ${LINES[@]}; do
+                       TARGETS+=(`sed 's/^\[target\.\(.*\)\]$/\1/' <<< $LINE`)
+               done
+               fi
+       done
+       echo "${TARGETS[@]}"
+}
+
+_toolchains(){
+       local result=()
+       local toolchains=$(rustup toolchain list)
+       local channels="nightly|beta|stable|[0-9]\.[0-9]{1,2}\.[0-9]"
+       local date="[0-9]{4}-[0-9]{2}-[0-9]{2}"
+       while read line
+       do
+               # Strip " (default)"
+               line=${line%% *}
+               if [[ "$line" =~ ^($channels)(-($date))?(-.*) ]]; then
+                       if [[ -z ${BASH_REMATCH[3]} ]]; then
+                               result+=("+${BASH_REMATCH[1]}")
+                       else
+                               # channel-date
+                               result+=("+${BASH_REMATCH[1]}-${BASH_REMATCH[3]}")
+                       fi
+                       result+=("+$line")
+               else
+                       result+=("+$line")
+               fi
+       done <<< "$toolchains"
+       echo "${result[@]}"
+}
+
+# vim:ft=sh
diff --git a/src/etc/man/cargo-bench.1 b/src/etc/man/cargo-bench.1
new file mode 100644 (file)
index 0000000..74c1675
--- /dev/null
@@ -0,0 +1,143 @@
+.TH "CARGO\-BENCH" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-bench \- Execute benchmarks of a package
+.SH SYNOPSIS
+.PP
+\f[I]cargo bench\f[] [OPTIONS] [\-\-] [<ARGS>...]
+.SH DESCRIPTION
+.PP
+Execute all benchmarks of a local package.
+.PP
+All of the trailing arguments are passed to the benchmark binaries
+generated for filtering benchmarks and generally providing options
+configuring how they run.
+.PP
+If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a
+package id specification which indicates which package should be built.
+If it is not given, then the current package is built.
+For more information on \f[I]SPEC\f[] and its format, see the "cargo
+help pkgid" command.
+.PP
+The \f[B]\-\-jobs\f[] argument affects the building of the benchmark
+executable but does not affect how many jobs are used when running the
+benchmarks.
+.PP
+Compilation can be customized with the \[aq]bench\[aq] profile in the
+manifest.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-lib
+Benchmark only this package\[aq]s library.
+.RS
+.RE
+.TP
+.B \-\-bin \f[I]NAME\f[]
+Benchmark only the specified binary.
+.RS
+.RE
+.TP
+.B \-\-example \f[I]NAME\f[]
+Benchmark only the specified example.
+.RS
+.RE
+.TP
+.B \-\-test \f[I]NAME\f[]
+Benchmark only the specified test target.
+.RS
+.RE
+.TP
+.B \-\-bench \f[I]NAME\f[]
+Benchmark only the specified bench target.
+.RS
+.RE
+.TP
+.B \-\-no\-run
+Compile, but don\[aq]t run benchmarks.
+.RS
+.RE
+.TP
+.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[]
+Package to benchmarks for.
+.RS
+.RE
+.TP
+.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[]
+Number of parallel jobs, defaults to # of CPUs.
+.RS
+.RE
+.TP
+.B \-\-release
+Build artifacts in release mode, with optimizations.
+.RS
+.RE
+.TP
+.B \-\-features \f[I]FEATURES\f[]
+Space\-separated list of features to also build.
+.RS
+.RE
+.TP
+.B \-\-all\-features
+Build all available features.
+.RS
+.RE
+.TP
+.B \-\-no\-default\-features
+Do not build the \f[C]default\f[] feature.
+.RS
+.RE
+.TP
+.B \-\-target \f[I]TRIPLE\f[]
+Build for the target triple.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path \f[I]PATH\f[]
+Path to the Cargo.toml to compile.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Execute all the benchmarks of the current package
+.IP
+.nf
+\f[C]
+$\ cargo\ bench
+\f[]
+.fi
+.PP
+Execute the BENCH benchmark
+.IP
+.nf
+\f[C]
+$\ cargo\ bench\ \-\-bench\ BENCH
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-build(1), cargo\-test(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-build.1 b/src/etc/man/cargo-build.1
new file mode 100644 (file)
index 0000000..13c5ac6
--- /dev/null
@@ -0,0 +1,132 @@
+.TH "CARGO\-BUILD" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-build \- Compile the current package
+.SH SYNOPSIS
+.PP
+\f[I]cargo build\f[] [OPTIONS]
+.SH DESCRIPTION
+.PP
+Compile a local package and all of its dependencies.
+.PP
+If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a
+package id specification which indicates which package should be built.
+If it is not given, then the current package is built.
+For more information on \f[I]SPEC\f[] and its format, see the "cargo
+help pkgid" command.
+.PP
+Compilation can be configured via the use of profiles which are
+configured in the manifest.
+The default profile for this command is \f[I]dev\f[], but passing the
+\f[B]\-\-release\f[] flag will use the \f[I]release\f[] profile instead.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[]
+Package to build.
+.RS
+.RE
+.TP
+.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[]
+Number of parallel jobs, defaults to # of CPUs.
+.RS
+.RE
+.TP
+.B \-\-lib
+Build only this package\[aq]s library.
+.RS
+.RE
+.TP
+.B \-\-bin \f[I]NAME\f[]
+Build only the specified binary.
+.RS
+.RE
+.TP
+.B \-\-example \f[I]NAME\f[]
+Build only the specified example.
+.RS
+.RE
+.TP
+.B \-\-test \f[I]NAME\f[]
+Build only the specified test target.
+.RS
+.RE
+.TP
+.B \-\-bench \f[I]NAME\f[]
+Build only the specified benchmark target.
+.RS
+.RE
+.TP
+.B \-\-release
+Build artifacts in release mode, with optimizations.
+.RS
+.RE
+.TP
+.B \-\-all\-features
+Build all available features.
+.RS
+.RE
+.TP
+.B \-\-features \f[I]FEATURES\f[]
+Space\-separated list of features to also build.
+.RS
+.RE
+.TP
+.B \-\-no\-default\-features
+Do not build the \f[C]default\f[] feature.
+.RS
+.RE
+.TP
+.B \-\-target \f[I]TRIPLE\f[]
+Build for the target triple.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path \f[I]PATH\f[]
+Path to the Cargo.toml to compile.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Build a local package and all of its dependencies
+.IP
+.nf
+\f[C]
+$\ cargo\ build
+\f[]
+.fi
+.PP
+Build a package with optimizations
+.IP
+.nf
+\f[C]
+$\ cargo\ build\ \-\-release
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+cargo(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-check.1 b/src/etc/man/cargo-check.1
new file mode 100644 (file)
index 0000000..9575c3c
--- /dev/null
@@ -0,0 +1,132 @@
+.TH "CARGO\-CHECK" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-check \- Check the current package
+.SH SYNOPSIS
+.PP
+\f[I]cargo check\f[] [OPTIONS]
+.SH DESCRIPTION
+.PP
+Check a local package and all of its dependencies.
+.PP
+If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a
+package id specification which indicates which package should be checked.
+If it is not given, then the current package is checked.
+For more information on \f[I]SPEC\f[] and its format, see the "cargo
+help pkgid" command.
+.PP
+Compilation can be configured via the use of profiles which are
+configured in the manifest.
+The default profile for this command is \f[I]dev\f[], but passing the
+\f[B]\-\-release\f[] flag will use the \f[I]release\f[] profile instead.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[]
+Package to check.
+.RS
+.RE
+.TP
+.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[]
+Number of parallel jobs, defaults to # of CPUs.
+.RS
+.RE
+.TP
+.B \-\-lib
+Check only this package\[aq]s library.
+.RS
+.RE
+.TP
+.B \-\-bin \f[I]NAME\f[]
+Check only the specified binary.
+.RS
+.RE
+.TP
+.B \-\-example \f[I]NAME\f[]
+Check only the specified example.
+.RS
+.RE
+.TP
+.B \-\-test \f[I]NAME\f[]
+Check only the specified test target.
+.RS
+.RE
+.TP
+.B \-\-bench \f[I]NAME\f[]
+Check only the specified benchmark target.
+.RS
+.RE
+.TP
+.B \-\-release
+Check artifacts in release mode.
+.RS
+.RE
+.TP
+.B \-\-all\-features
+Check with all available features.
+.RS
+.RE
+.TP
+.B \-\-features \f[I]FEATURES\f[]
+Space\-separated list of features to also check.
+.RS
+.RE
+.TP
+.B \-\-no\-default\-features
+Do not check the \f[C]default\f[] feature.
+.RS
+.RE
+.TP
+.B \-\-target \f[I]TRIPLE\f[]
+Check for the target triple.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path \f[I]PATH\f[]
+Path to the Cargo.toml to compile.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Check a local package and all of its dependencies
+.IP
+.nf
+\f[C]
+$\ cargo\ check
+\f[]
+.fi
+.PP
+Check a package with optimizations
+.IP
+.nf
+\f[C]
+$\ cargo\ check\ \-\-release
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+cargo(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-clean.1 b/src/etc/man/cargo-clean.1
new file mode 100644 (file)
index 0000000..6777c98
--- /dev/null
@@ -0,0 +1,82 @@
+.TH "CARGO\-CLEAN" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-clean \- Remove generated artifacts
+.SH SYNOPSIS
+.PP
+\f[I]cargo clean\f[] [OPTIONS]
+.SH DESCRIPTION
+.PP
+Remove artifacts that cargo has generated in the past.
+.PP
+If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a
+package id specification which indicates which package should be built.
+If it is not given, then the current package is built.
+For more information on \f[I]SPEC\f[] and its format, see the "cargo
+help pkgid" command.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[]
+Package to clean artifacts for.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path PATH
+Path to the manifest to the package to clean.
+.RS
+.RE
+.TP
+.B \-\-target TRIPLE
+Target triple to clean output for (default all).
+.RS
+.RE
+.TP
+.B \-\-release
+Whether or not to clean release artifacts.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Remove local package generated artifacts
+.IP
+.nf
+\f[C]
+$\ cargo\ clean
+\f[]
+.fi
+.PP
+Clean release artifacts
+.IP
+.nf
+\f[C]
+$\ cargo\ clean\ \-\-release
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-build(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-doc.1 b/src/etc/man/cargo-doc.1
new file mode 100644 (file)
index 0000000..d4af424
--- /dev/null
@@ -0,0 +1,109 @@
+.TH "CARGO\-DOC" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-doc \- Build a package\[aq]s documentation
+.SH SYNOPSIS
+.PP
+\f[I]cargo doc\f[] [OPTIONS]
+.SH DESCRIPTION
+.PP
+Build a package\[aq]s documentation.
+.PP
+By default the documentation for the local package and all dependencies
+is built.
+The output is all placed in \[aq]target/doc\[aq] in rustdoc\[aq]s usual
+format.
+.PP
+If the \f[B]\-\-package\f[] argument is given, then \f[I]SPEC\f[] is a
+package id specification which indicates which package should be built.
+If it is not given, then the current package is built.
+For more information on \f[I]SPEC\f[] and its format, see the "cargo
+help pkgid" command.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[]
+Package to document.
+.RS
+.RE
+.TP
+.B \-\-open
+Opens the docs in a browser after the operation.
+.RS
+.RE
+.TP
+.B \-\-no\-deps
+Don\[aq]t build documentation for dependencies.
+.RS
+.RE
+.TP
+.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[]
+Number of parallel jobs, defaults to # of CPUs.
+.RS
+.RE
+.TP
+.B \-\-release
+Build artifacts in release mode, with optimizations.
+.RS
+.RE
+.TP
+.B \-\-features \f[I]FEATURES\f[]
+Space\-separated list of features to also build.
+.RS
+.RE
+.TP
+.B \-\-all\-features
+Build all available features.
+.RS
+.RE
+.TP
+.B \-\-no\-default\-features
+Do not build the \f[C]default\f[] feature.
+.RS
+.RE
+.TP
+.B \-\-target \f[I]TRIPLE\f[]
+Build for the target triple.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path \f[I]PATH\f[]
+Path to the Cargo.toml to compile.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Build a local package documentation in \[aq]target/doc\[aq]
+.IP
+.nf
+\f[C]
+$\ cargo\ doc
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-build(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-fetch.1 b/src/etc/man/cargo-fetch.1
new file mode 100644 (file)
index 0000000..96c49ab
--- /dev/null
@@ -0,0 +1,52 @@
+.TH "CARGO\-FETCH" "1" "July 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-fetch \- Fetch dependencies of a package from the network
+.SH SYNOPSIS
+.PP
+\f[I]cargo fetch\f[] [OPTIONS]
+.SH DESCRIPTION
+.PP
+If a lockfile is available, this command will ensure that all of the git
+dependencies and/or registries dependencies are downloaded and locally
+available. The network is never touched after a `cargo fetch` unless
+the lockfile changes.
+
+If the lockfile is not available, then this is the equivalent of
+`cargo generate-lockfile`. A lockfile is generated and dependencies are also
+all updated.
+.PP
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-manifest-path \f[I]PATH\f[]
+Path to the manifest to fetch dependencies for.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-update(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-generate-lockfile.1 b/src/etc/man/cargo-generate-lockfile.1
new file mode 100644 (file)
index 0000000..73edd25
--- /dev/null
@@ -0,0 +1,41 @@
+.TH "CARGO\-GENERATE LOCKFILE" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-generate-lockfile \- Generate the lockfile for a package
+.SH SYNOPSIS
+.PP
+\f[I]cargo generate-lockfile\f[] [OPTIONS]
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-manifest-path \f[I]PATH\f[]
+Path to the manifest to generate a lockfile for.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-init.1 b/src/etc/man/cargo-init.1
new file mode 100644 (file)
index 0000000..a2b392a
--- /dev/null
@@ -0,0 +1,68 @@
+.TH "CARGO\-INIT" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-init \- Create a new cargo package in the current directory
+.SH SYNOPSIS
+.PP
+\f[I]cargo init\f[] [OPTIONS]
+.SH DESCRIPTION
+.PP
+Create a new cargo package in the current directory.
+.PP
+Use the \f[B]\-\-vcs\f[] option to control the version control system to
+use.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-vcs \f[I]VCS\f[]
+Initialize a new repository for the given version control system (git or
+hg) or do not initialize any version control at all (none) overriding a
+global configuration.
+.RS
+.RE
+.TP
+.B \-\-bin
+Use a binary instead of a library template.
+.RS
+.RE
+.TP
+.B \-\-name \f[I]NAME\f[]
+Set the resulting package name.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Initialize a binary cargo package in the current directory
+.IP
+.nf
+\f[C]
+$\ cargo\ init\ \-\-bin
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-new(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-install.1 b/src/etc/man/cargo-install.1
new file mode 100644 (file)
index 0000000..f90ad08
--- /dev/null
@@ -0,0 +1,161 @@
+.TH "CARGO\-INSTALL" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-install \- Install a Rust binary
+.SH SYNOPSIS
+.PP
+\f[I]cargo install\f[] [OPTIONS] <CRATE>
+.PP
+\f[I]cargo install\f[] [OPTIONS] \-\-list
+.SH DESCRIPTION
+.PP
+Install a Rust binary
+.PP
+This command manages Cargo\[aq]s local set of install binary crates.
+Only packages which have [[bin]] targets can be installed, and all
+binaries are installed into the installation root\[aq]s \f[I]bin\f[]
+folder.
+The installation root is determined, in order of precedence, by
+\f[B]\-\-root\f[], \f[I]$CARGO_INSTALL_ROOT\f[], the
+\f[I]install.root\f[] configuration key, and finally the home directory
+(which is either \f[I]$CARGO_HOME\f[] if set or \f[I]$HOME/.cargo\f[] by
+default).
+.PP
+There are multiple sources from which a crate can be installed.
+The default location is crates.io but the \f[B]\-\-git\f[] and
+\f[B]\-\-path\f[] flags can change this source.
+If the source contains more than one package (such as \f[I]crates.io\f[]
+or a git repository with multiple crates) the \f[B]\f[] argument is
+required to indicate which crate should be installed.
+.PP
+Crates from crates.io can optionally specify the version they wish to
+install via the \f[B]\-\-vers\f[] flags, and similarly packages from git
+repositories can optionally specify the branch, tag, or revision that
+should be installed.
+If a crate has multiple binaries, the \f[B]\-\-bin\f[] argument can
+selectively install only one of them, and if you\[aq]d rather install
+examples the \f[B]\-\-example\f[] argument can be used as well.
+.PP
+As a special convenience, omitting the <crate> specification entirely
+will install the crate in the current directory.
+That is, \f[I]install\f[] is equivalent to the more explicit "install
+\-\-path .".
+.PP
+If the source is crates.io or \f[B]\-\-git\f[] then by default the crate will be built in a temporary target directory.
+To avoid this, the target directory can be specified by setting the \f[B]CARGO_TARGET_DIR\f[] environment variable to a relative path.
+In particular, this can be useful for caching build artifacts on continuous integration systems.
+.PP
+The \f[B]\-\-list\f[] option will list all installed packages (and their
+versions).
+.SH OPTIONS
+.SS Query options
+.TP
+.B \-\-list
+List all installed packages (and their versions).
+.RS
+.RE
+.SS Specifying what crate to install
+.TP
+.B \-\-vers \f[I]VERS\f[]
+Specify a version to install from crates.io.
+.RS
+.RE
+.TP
+.B \-\-git \f[I]URL\f[]
+Git URL to install the specified crate from.
+.RS
+.RE
+.TP
+.B \-\-branch \f[I]BRANCH\f[]
+Branch to use when installing from git.
+.RS
+.RE
+.TP
+.B \-\-tag \f[I]TAG\f[]
+Tag to use when installing from git.
+.RS
+.RE
+.TP
+.B \-\-rev \f[I]SHA\f[]
+Specific commit to use when installing from git.
+.RS
+.RE
+.TP
+.B \-\-path \f[I]PATH\f[]
+Filesystem path to local crate to install.
+.RS
+.RE
+.SS Built and install options
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[]
+Number of parallel jobs, defaults to # of CPUs.
+.RS
+.RE
+.TP
+.B \-\-features \f[I]FEATURES\f[]
+Space\-separated list of features to activate.
+.RS
+.RE
+.TP
+.B \-\-all\-features
+Build all available features.
+.RS
+.RE
+.TP
+.B \-f, \-\-force
+Force overwriting existing crates or binaries
+.RS
+.RE
+.TP
+.B \-\-no\-default\-features
+Do not build the \f[C]default\f[] feature.
+.RS
+.RE
+.TP
+.B \-\-debug
+Build in debug mode instead of release mode.
+.RS
+.RE
+.TP
+.B \-\-bin \f[I]NAME\f[]
+Only install the binary NAME.
+.RS
+.RE
+.TP
+.B \-\-example \f[I]EXAMPLE\f[]
+Install the example EXAMPLE instead of binaries.
+.RS
+.RE
+.TP
+.B \-\-root \f[I]DIR\f[]
+Directory to install packages into.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-search(1), cargo\-publish(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-login.1 b/src/etc/man/cargo-login.1
new file mode 100644 (file)
index 0000000..a82c828
--- /dev/null
@@ -0,0 +1,41 @@
+.TH "CARGO\-LOGIN" "1" "July 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-login \- Save an API token from the registry locally
+.SH SYNOPSIS
+.PP
+\f[I]cargo login\f[] [OPTIONS] [<TOKEN>]
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-host \f[I]HOST\f[]
+Host to set the token for
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-publish(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-metadata.1 b/src/etc/man/cargo-metadata.1
new file mode 100644 (file)
index 0000000..deeb632
--- /dev/null
@@ -0,0 +1,71 @@
+.TH "CARGO\-METADATA" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-metadata \- Machine-readable metadata about the current package
+.SH SYNOPSIS
+.PP
+\f[I]cargo metadata\f[] [OPTIONS]
+.SH DESCRIPTION
+.PP
+Output the resolved dependencies of a package, the concrete used versions
+including overrides, in machine-readable format.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-features \f[I]FEATURES\f[]
+Space-separated list of features.
+.RS
+.RE
+.TP
+.B \-\-all\-features
+Build all available features.
+.RS
+.RE
+.TP
+.B \-\-no\-default\-features
+Do not include the \f[C]default\f[] feature.
+.RS
+.RE
+.TP
+.B \-\-no\-deps
+Output information only about the root package and don\[aq]t fetch
+dependencies.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path \f[I]PATH\f[]
+Path to the manifest.
+.RS
+.RE
+.TP
+.B \-\-format\-version \f[I]VERSION\f[]
+Format version [default: 1]. Valid values: 1.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-new.1 b/src/etc/man/cargo-new.1
new file mode 100644 (file)
index 0000000..7325c5b
--- /dev/null
@@ -0,0 +1,68 @@
+.TH "CARGO\-NEW" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-new \- Create a new cargo package
+.SH SYNOPSIS
+.PP
+\f[I]cargo new\f[] [OPTIONS] <PATH>
+.SH DESCRIPTION
+.PP
+Create a new cargo package at <PATH>.
+.PP
+Use the \f[B]\-\-vcs\f[] option to control the version control system to
+use.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-vcs \f[I]VCS\f[]
+Initialize a new repository for the given version control system (git or
+hg) or do not initialize any version control at all (none) overriding a
+global configuration.
+.RS
+.RE
+.TP
+.B \-\-bin
+Use a binary instead of a library template.
+.RS
+.RE
+.TP
+.B \-\-name \f[I]NAME\f[]
+Set the resulting package name.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Create a binary cargo package in the current directory
+.IP
+.nf
+\f[C]
+$\ cargo\ new\ \-\-bin\ ./
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-init(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-owner.1 b/src/etc/man/cargo-owner.1
new file mode 100644 (file)
index 0000000..c690dc0
--- /dev/null
@@ -0,0 +1,88 @@
+.TH "CARGO\-OWNER" "1" "July 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-owner \- Manage the owners of a crate of the registry
+.SH SYNOPSIS
+.PP
+\f[I]cargo owner\f[] [OPTIONS] [<CRATE>]
+.SH DESCRIPTION
+.PP
+This command will modify the owners for a package on the specified
+registry (or default). Note that owners of a package can upload new
+versions, and yank old versions. Explicitly named owners can also modify
+the set of owners, so take caution!
+.PP
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-a, \-\-add \f[I]LOGIN\f[]
+Name of a user or team to add as an owner.
+.RS
+.RE
+.TP
+.B \-r, \-\-remove \f[I]LOGIN\f[]
+Name of a user or team to remove as an owner.
+.RS
+.RE
+.TP
+.B \-l, \-\-list
+List owners of a crate.
+.RS
+.RE
+.TP
+.B \-\-index \f[I]INDEX\f[]
+Registry index to modify owners for.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Add user as an owner of the current package
+.IP
+.nf
+\f[C]
+$\ cargo\ owner\ \-\-add\ user
+\f[]
+.fi
+.PP
+Remove user as an owner of the current package
+.IP
+.nf
+\f[C]
+$\ cargo\ owner\ \-\-remove\ user
+\f[]
+.fi
+.PP
+Use a certain API token to authenticate with
+.IP
+.nf
+\f[C]
+$\ cargo\ owner\ \-\-token\ U6WHXacP3Qqwd5kze1fohr4JEOmGCuRK2
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-publish(1), cargo\-login(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-package.1 b/src/etc/man/cargo-package.1
new file mode 100644 (file)
index 0000000..f541f51
--- /dev/null
@@ -0,0 +1,59 @@
+.TH "CARGO\-PACKAGE" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-package \- Create a distributable tarball
+.SH SYNOPSIS
+.PP
+\f[I]cargo package\f[] [OPTIONS]
+.SH DESCRIPTION
+.PP
+Assemble the local package into a distributable tarball.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-l, \-\-list
+Print files included in a package without making one.
+.RS
+.RE
+.TP
+.B \-\-no\-verify
+Don\[aq]t verify the contents by building them.
+.RS
+.RE
+.TP
+.B \-\-no\-metadata
+Ignore warnings about a lack of human\-usable metadata.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path \f[I]PATH\f[]
+Path to the Cargo.toml to compile.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-build(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-pkgid.1 b/src/etc/man/cargo-pkgid.1
new file mode 100644 (file)
index 0000000..d06da2d
--- /dev/null
@@ -0,0 +1,75 @@
+.TH "CARGO\-PKGID" "1" "July 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-pkgid \- Print a fully qualified package specification
+.SH SYNOPSIS
+.PP
+\f[I]cargo pkgid\f[] [OPTIONS] [<SPEC>]
+.SH DESCRIPTION
+.PP
+Given a <SPEC> argument, print out the fully qualified package id
+specifier.  This command will generate an error if <SPEC> is ambiguous as
+to which package it refers to in the dependency graph.  If no <SPEC> is
+given, then the pkgid for the local package is printed.
+.PP
+This command requires that a lockfile is available and dependencies have
+been fetched.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path \f[I]PATH\f[]
+Path to the manifest to the package to clean.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Retrieve package specification for foo package
+.IP
+.nf
+\f[C]
+$\ cargo\ pkgid\ foo
+\f[]
+.fi
+.PP
+Retrieve package specification for version 1.0.0 of foo
+.IP
+.nf
+\f[C]
+$\ cargo\ pkgid\ foo:1.0.0
+\f[]
+.fi
+.PP
+Retrieve package specification for foo from crates.io
+.IP
+.nf
+\f[C]
+$\ cargo\ pkgid\ crates.io/foo
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-generate\-lockfile(1), cargo-search(1), cargo-metadata(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-publish.1 b/src/etc/man/cargo-publish.1
new file mode 100644 (file)
index 0000000..2f50631
--- /dev/null
@@ -0,0 +1,59 @@
+.TH "CARGO\-PUBLISH" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-publish \- Upload a package to the registry.
+.SH SYNOPSIS
+.PP
+\f[I]cargo publish\f[] [OPTIONS]
+.SH DESCRIPTION
+.PP
+Upload a package to the registry.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-host \f[I]HOST\f[]
+Host to upload the package to.
+.RS
+.RE
+.TP
+.B \-\-token \f[I]TOKEN\f[]
+Token to use when uploading.
+.RS
+.RE
+.TP
+.B \-\-no\-verify
+Don\[aq]t verify package tarball before publish.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path \f[I]PATH\f[]
+Path to the manifest of the package to publish.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-install(1), cargo\-search(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-run.1 b/src/etc/man/cargo-run.1
new file mode 100644 (file)
index 0000000..9ad5a1a
--- /dev/null
@@ -0,0 +1,103 @@
+.TH "CARGO\-RUN" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-run \- Run the current package
+.SH SYNOPSIS
+.PP
+\f[I]cargo run\f[] [OPTIONS] [\-\-] [<ARGS>...]
+.SH DESCRIPTION
+.PP
+Run the main binary of the local package (src/main.rs).
+.PP
+If neither \f[B]\-\-bin\f[] nor \f[B]\-\-example\f[] are given, then if
+the package only has one bin target it will be run.
+Otherwise \f[B]\-\-bin\f[] specifies the bin target to run, and
+\f[B]\-\-example\f[] specifies the example target to run.
+At most one of \f[B]\-\-bin\f[] or \f[B]\-\-example\f[] can be provided.
+.PP
+All of the trailing arguments are passed to the binary to run.
+If you\[aq]re passing arguments to both Cargo and the binary, the ones
+after \f[B]\-\-\f[] go to the binary, the ones before go to Cargo.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-bin \f[I]NAME\f[]
+Name of the bin target to run.
+.RS
+.RE
+.TP
+.B \-\-example \f[I]NAME\f[]
+Name of the example target to run.
+.RS
+.RE
+.TP
+.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[]
+Number of parallel jobs, defaults to # of CPUs.
+.RS
+.RE
+.TP
+.B \-\-release
+Build artifacts in release mode, with optimizations.
+.RS
+.RE
+.TP
+.B \-\-features \f[I]FEATURES\f[]
+Space\-separated list of features to also build.
+.RS
+.RE
+.TP
+.B \-\-all\-features
+Build all available features.
+.RS
+.RE
+.TP
+.B \-\-no\-default\-features
+Do not build the \f[C]default\f[] feature.
+.RS
+.RE
+.TP
+.B \-\-target \f[I]TRIPLE\f[]
+Build for the target triple.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path \f[I]PATH\f[]
+Path to the Cargo.toml to compile.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Run the main binary of the current package
+.IP
+.nf
+\f[C]
+$\ cargo\ run
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-new(1), cargo\-init(1), cargo\-build(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-rustc.1 b/src/etc/man/cargo-rustc.1
new file mode 100644 (file)
index 0000000..f5d9a35
--- /dev/null
@@ -0,0 +1,126 @@
+.TH "CARGO\-RUSTC" "1" "July 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-rustc \- Compile a package and all of its dependencies
+.SH SYNOPSIS
+.PP
+\f[I]cargo rustc\f[] [OPTIONS] [\-\-] [<OPTS>...]
+.SH DESCRIPTION
+.PP
+.PP
+The specified target for the current package (or package specified by
+SPEC if provided) will be compiled along with all of its dependencies.
+The specified ...
+will all be passed to the final compiler invocation, not any of the
+dependencies.
+Note that the compiler will still unconditionally receive arguments such
+as \-L, \-\-extern, and \-\-crate\-type, and the specified ...
+will simply be added to the compiler invocation.
+.PP
+This command requires that only one target is being compiled.
+If more than one target is available for the current package the filters
+of \-\-lib, \-\-bin, etc, must be used to select which target is
+compiled.
+To pass flags to all compiler processes spawned by Cargo, use the
+$RUSTFLAGS environment variable or the \f[C]build.rustflags\f[]
+configuration option.
+.PP
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-p \f[I]SPEC\f[], \-\-package SPEC\f[]
+The profile to compiler for.
+.RS
+.RE
+.TP
+.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[]
+Number of parallel jobs, defaults to # of CPUs.
+.RS
+.RE
+.TP
+.B \-\-lib
+Build only this package\[aq]s library.
+.RS
+.RE
+.TP
+.B \-\-bin \f[I]NAME\f[]
+Build only the specified binary.
+.RS
+.RE
+.TP
+.B \-\-example \f[I]NAME\f[]
+Build only the specified example.
+.RS
+.RE
+.TP
+.B \-\-test \f[I]NAME\f[]
+Build only the specified test target.
+.RS
+.RE
+.TP
+.B \-\-bench \f[I]NAME\f[]
+Build only the specified benchmark target.
+.RS
+.RE
+.TP
+.B \-\-release
+Build artifacts in release mode, with optimizations.
+.RS
+.RE
+.TP
+.B \-\-profile \f[I]PROFILE
+Profile to build the selected target for.
+.RS
+.RE
+.TP
+.B \-\-features \f[I]FEATURES\f[]
+The version to yank or un\-yank.
+.RS
+.RE
+.TP
+.B \-\-all\-features
+Build all available features.
+.RS
+.RE
+.TP
+.B \-\-no\-default\-features
+Do not compile default features for the package.
+.RS
+.RE
+.TP
+.B \-\-target \f[I]TRIPLE\f[]
+Target triple which compiles will be for.
+.RS
+.RE
+.TP
+.B \-\-manifest-path \f[I]PATH\f[]
+Path to the manifest to fetch dependencies for.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-run(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-rustdoc.1 b/src/etc/man/cargo-rustdoc.1
new file mode 100644 (file)
index 0000000..3a898a3
--- /dev/null
@@ -0,0 +1,124 @@
+.TH "CARGO\-RUSTDOC" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-rustdoc \- Build a package\[aq]s documentation, using specified
+custom flags.
+
+.SH SYNOPSIS
+.PP
+\f[I]cargo rustdoc\f[] [OPTIONS] [\-\-] [<OPTS>...]
+.SH DESCRIPTION
+.PP
+The specified target for the current package (or package specified by
+SPEC if provided) will be documented with the specified <OPTS>...
+being passed to the final rustdoc invocation.
+Dependencies will not be documented as part of this command.
+Note that rustdoc will still unconditionally receive arguments such as
+\-L, \-\-extern, and \-\-crate\-type, and the specified <OPTS>...
+will simply be added to the rustdoc invocation.
+.PP
+If the \-\-package argument is given, then SPEC is a package id
+specification which indicates which package should be documented.
+If it is not given, then the current package is documented.
+For more information on SPEC and its format, see the
+\f[C]cargo\ help\ pkgid\f[] command.
+
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-open
+Open the docs in a browser after the operation.
+.RS
+.RE
+.TP
+.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC\f[]
+Package to document.
+.RS
+.RE
+.TP
+.B \-j \f[I]N\f[], \-\-jobs \f[I]N\f[]
+Number of parallel jobs, defaults to # of CPUs.
+.RS
+.RE
+.TP
+.B \-\-lib
+Build only this package\[aq]s library.
+.RS
+.RE
+.TP
+.B \-\-bin \f[I]NAME\f[]
+Build only the specified binary.
+.RS
+.RE
+.TP
+.B \-\-example \f[I]NAME\f[]
+Build only the specified example.
+.RS
+.RE
+.TP
+.B \-\-test \f[I]NAME\f[]
+Build only the specified test target.
+.RS
+.RE
+.TP
+.B \-\-bench \f[I]NAME\f[]
+Build only the specified benchmark target.
+.RS
+.RE
+.TP
+.B \-\-release
+Build artifacts in release mode, with optimizations.
+.RS
+.RE
+.TP
+.B \-\-features \f[I]FEATURES\f[]
+Space-separated list of features to also build.
+.RS
+.RE
+.TP
+.B \-\-all\-features
+Build all available features.
+.RS
+.RE
+.TP
+.B \-\-no\-default\-features
+Do not build the \f[C]default\f[] feature.
+.RS
+.RE
+.TP
+.B \-\-target \f[I]TRIPLE\f[]
+Build for the target triple.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path \f[I]PATH\f[]
+Path to the manifest to document.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1), cargo-doc(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-search.1 b/src/etc/man/cargo-search.1
new file mode 100644 (file)
index 0000000..e8b1da3
--- /dev/null
@@ -0,0 +1,49 @@
+.TH "CARGO\-SEARCH" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-search \- Search packages in crates.io
+.SH SYNOPSIS
+.PP
+\f[I]cargo search\f[] [OPTIONS] <QUERY>...
+.SH DESCRIPTION
+.PP
+Search packages in \f[I]crates.io\f[].
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-host \f[I]HOST\f[]
+Host of a registry to search in.
+.RS
+.RE
+.TP
+.B \-\-limit \f[I]LIMIT\f[]
+Limit the number of results (default: 10, max: 100).
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-install(1), cargo\-publish(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-test.1 b/src/etc/man/cargo-test.1
new file mode 100644 (file)
index 0000000..45b99e1
--- /dev/null
@@ -0,0 +1,172 @@
+.TH "CARGO\-TEST" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-test \- Execute unit and integration tests of a package
+.SH SYNOPSIS
+.PP
+\f[I]cargo test\f[] [OPTIONS] [\-\-] [<ARGS>...]
+.SH DESCRIPTION
+.PP
+Execute all unit and integration tests of a local package.
+.PP
+All of the trailing arguments are passed to the test binaries generated
+for filtering tests and generally providing options configuring how they
+run.
+For example, this will run all tests with the name \[aq]foo\[aq] in
+their name:
+.IP
+.nf
+\f[C]
+cargo\ test\ foo
+\f[]
+.fi
+.PP
+If the \f[B]\-\-package\f[] argument is given, then \[aq]SPEC\[aq] is a
+package id specification which indicates which package should be tested.
+If it is not given, then the current package is tested.
+For more information on \[aq]SPEC\[aq] and its format, see the "cargo
+help pkgid" command.
+.PP
+The \f[B]\-\-jobs\f[] argument affects the building of the test
+executable but does not affect how many jobs are used when running the
+tests.
+.PP
+Compilation can be configured via the \[aq]test\[aq] profile in the
+manifest.
+.PP
+By default the rust test harness hides output from test execution to
+keep results readable.
+Test output can be recovered (e.g.
+for debugging) by passing \f[B]\-\-nocapture\f[] to the test binaries:
+.IP
+.nf
+\f[C]
+cargo\ test\ \-\-\ \-\-nocapture
+\f[]
+.fi
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-lib
+Test only this package\[aq]s library.
+.RS
+.RE
+.TP
+.B \-\-doc
+Test only this library\[aq]s documentation
+.RS
+.RE
+.TP
+.B \-\-bin \f[I]NAME\f[]
+Test only the specified binary.
+.RS
+.RE
+.TP
+.B \-\-example \f[I]NAME\f[]
+Test only the specified example.
+.RS
+.RE
+.TP
+.B \-\-test \f[I]NAME\f[]
+Test only the specified integration test target.
+.RS
+.RE
+.TP
+.B \-\-bench \f[I]NAME\f[]
+Test only the specified benchmark target.
+.RS
+.RE
+.TP
+.B \-\-no\-run
+Compile, but don\[aq]t run tests.
+.RS
+.RE
+.TP
+.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[]
+Package to run tests for.
+.RS
+.RE
+.TP
+.B \-j \f[I]IN\f[], \-\-jobs \f[I]IN\f[]
+Number of parallel jobs, defaults to # of CPUs.
+.RS
+.RE
+.TP
+.B \-\-release
+Build artifacts in release mode, with optimizations.
+.RS
+.RE
+.TP
+.B \-\-features \f[I]FEATURES\f[]
+Space\-separated list of features to also build.
+.RS
+.RE
+.TP
+.B \-\-all\-features
+Build all available features.
+.RS
+.RE
+.TP
+.B \-\-no\-default\-features
+Do not build the \f[C]default\f[] feature.
+.RS
+.RE
+.TP
+.B \-\-target \f[I]TRIPLE\f[]
+Build for the target triple.
+.RS
+.RE
+.TP
+.B \-\-manifest\-path \f[I]PATH\f[]
+Path to the Cargo.toml to compile.
+.RS
+.RE
+.TP
+.B \-\-no\-fail\-fast
+Run all tests regardless of failure.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Execute all the unit and integration tests of the current package
+.IP
+.nf
+\f[C]
+$\ cargo\ test
+\f[]
+.fi
+.PP
+Execute the BENCH benchmark
+.IP
+.nf
+\f[C]
+$\ cargo\ test\ \-\-bench\ BENCH
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-build(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-uninstall.1 b/src/etc/man/cargo-uninstall.1
new file mode 100644 (file)
index 0000000..64e9aa7
--- /dev/null
@@ -0,0 +1,56 @@
+.TH "CARGO\-UNINSTALL" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-uninstall \- Remove a Rust binary
+.SH SYNOPSIS
+.PP
+\f[I]cargo uninstall\f[] [OPTIONS] <SPEC>
+.PP
+\f[I]cargo uninstall\f[] (\-h | \-\-help)
+.SH DESCRIPTION
+.PP
+The argument SPEC is a package id specification (see
+\f[C]cargo\ help\ pkgid\f[]) to specify which crate should be
+uninstalled.
+By default all binaries are uninstalled for a crate but the
+\f[C]\-\-bin\f[] and \f[C]\-\-example\f[] flags can be used to only
+uninstall particular binaries.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-root \f[I]DIR\f[]
+Directory to uninstall packages from.
+.RS
+.RE
+.TP
+.B \-\-bin \f[I]NAME\f[]
+Only uninstall the binary NAME.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1), cargo-install(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-update.1 b/src/etc/man/cargo-update.1
new file mode 100644 (file)
index 0000000..14b6437
--- /dev/null
@@ -0,0 +1,80 @@
+.TH "CARGO\-UPDATE" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-update \- Update the package dependencies
+.SH SYNOPSIS
+.PP
+\f[I]cargo update\f[] [OPTIONS]
+.SH DESCRIPTION
+.PP
+Update dependencies as recorded in the local lock file.
+.PP
+This command requires that a \f[I]Cargo.lock\f[] already exists as
+generated by \f[I]cargo build\f[] or related commands.
+.PP
+If \f[I]SPEC\f[] is given, then a conservative update of the
+\f[I]lockfile\f[] will be performed.
+This means that only the dependency specified by \f[I]SPEC\f[] will be
+updated.
+Its transitive dependencies will be updated only if \f[I]SPEC\f[] cannot
+be updated without updating dependencies.
+All other dependencies will remain locked at their currently recorded
+versions.
+.PP
+If \f[I]PRECISE\f[] is specified, then \f[B]\-\-aggressive\f[] must not
+also be specified.
+The argument \f[I]PRECISE\f[] is a string representing a precise
+revision that the package being updated should be updated to.
+For example, if the package comes from a git repository, then
+\f[I]PRECISE\f[] would be the exact revision that the repository should
+be updated to.
+.PP
+If \f[I]SPEC\f[] is not given, then all dependencies will be
+re\-resolved and updated.
+.PP
+For more information about package id specifications, see "cargo help
+pkgid".
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-p \f[I]SPEC\f[], \-\-package \f[I]SPEC ...\f[]
+Package to update.
+.RS
+.RE
+.TP
+.B \-\-aggressive
+Force updating all dependencies of <name> as well.
+.RS
+.RE
+.TP
+.B \-\-precise \f[I]PRECISE\f[]
+Update a single dependency to exactly \f[I]PRECISE\f[].
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-version.1 b/src/etc/man/cargo-version.1
new file mode 100644 (file)
index 0000000..c78344d
--- /dev/null
@@ -0,0 +1,31 @@
+.TH "CARGO\-VERSION" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-version \- Show version information
+.SH SYNOPSIS
+.PP
+\f[I]cargo version\f[] [OPTIONS]
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo-yank.1 b/src/etc/man/cargo-yank.1
new file mode 100644 (file)
index 0000000..f54b2bd
--- /dev/null
@@ -0,0 +1,68 @@
+.TH "CARGO\-YANK" "1" "July 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo\-yank \- Remove a pushed crate from the index
+.SH SYNOPSIS
+.PP
+\f[I]cargo yank\f[] [OPTIONS] [<CRATE>]
+.SH DESCRIPTION
+.PP
+The yank command removes a previously pushed crate\[aq]s version from
+the server\[aq]s index.
+This command does not delete any data, and the crate will still be
+available for download via the registry\[aq]s download link.
+.PP
+Note that existing crates locked to a yanked version will still be able
+to download the yanked version to use it.
+Cargo will, however, not allow any new crates to be locked to any yanked
+version.
+.PP
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this message.
+.RS
+.RE
+.TP
+.B \-\-vers \f[I]VERSION\f[]
+The version to yank or un-yank.
+.RS
+.RE
+.TP
+.B \-\-undo
+Undo a yank, putting a version back into the index.
+.RS
+.RE
+.TP
+.B \-\-index \f[I]INDEX\f[]
+Registry index to yank from.
+.RS
+.RE
+.TP
+.B \-\-token \f[I]TOKEN\f[]
+API token to use when authenticating.
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-q, \-\-quiet
+No output printed to stdout.
+.RS
+.RE
+.TP
+.B \-\-color \f[I]WHEN\f[]
+Coloring: auto, always, never.
+.RS
+.RE
+.SH SEE ALSO
+.PP
+cargo(1), cargo\-owner(1), cargo\-version(1)
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/src/etc/man/cargo.1 b/src/etc/man/cargo.1
new file mode 100644 (file)
index 0000000..b70b69a
--- /dev/null
@@ -0,0 +1,206 @@
+.TH "CARGO" "1" "May 2016" "The Rust package manager" "Cargo Manual"
+.hy
+.SH NAME
+.PP
+cargo \- The Rust package manager
+.SH SYNOPSIS
+.PP
+\f[I]cargo\f[] <COMMAND> [<ARGS>...]
+.SH DESCRIPTION
+.PP
+This program is a package manager for the Rust language, available at
+<http://rust-lang.org>.
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Display a help message.
+.RS
+.RE
+.TP
+.B \-V, \-\-version
+Print version information and exit.
+.RS
+.RE
+.TP
+.B \-\-list
+List all available cargo commands.
+.RS
+.RE
+.TP
+.B \-\-explain CODE
+Run \f[C]rustc\ \-\-explain\ CODE\f[]
+.RS
+.RE
+.TP
+.B \-v, \-\-verbose
+Use verbose output.
+.RS
+.RE
+.TP
+.B \-\-color
+Configure coloring of output.
+.RS
+.RE
+.SH COMMANDS
+.PP
+To get extended information about commands, run \f[I]cargo help
+<command>\f[] or \f[I]man cargo\-command\f[]
+.TP
+.B cargo\-build(1)
+Compile the current package.
+.RS
+.RE
+.TP
+.B cargo\-clean(1)
+Remove the target directory with build output.
+.RS
+.RE
+.TP
+.B cargo\-doc(1)
+Build this package\[aq]s and its dependencies\[aq] documentation.
+.RS
+.RE
+.TP
+.B cargo\-init(1)
+Create a new cargo package in the current directory.
+.RS
+.RE
+.TP
+.B cargo\-install(1)
+Install a Rust binary.
+.RS
+.RE
+.TP
+.B cargo\-new(1)
+Create a new cargo package.
+.RS
+.RE
+.TP
+.B cargo\-run(1)
+Build and execute src/main.rs.
+.RS
+.RE
+.TP
+.B cargo\-test(1)
+Run the tests for the package.
+.RS
+.RE
+.TP
+.B cargo\-bench(1)
+Run the benchmarks for the package.
+.RS
+.RE
+.TP
+.B cargo\-update(1)
+Update dependencies in Cargo.lock.
+.RS
+.RE
+.TP
+.B cargo\-rustc(1)
+Compile the current package, and optionally pass additional rustc parameters
+.RS
+.RE
+.TP
+.B cargo\-package(1)
+Generate a source tarball for the current package.
+.RS
+.RE
+.TP
+.B cargo\-publish(1)
+Package and upload this package to the registry.
+.RS
+.RE
+.TP
+.B cargo\-owner(1)
+Manage the owners of a crate on the registry.
+.RS
+.RE
+.TP
+.B cargo\-uninstall(1)
+Remove a Rust binary.
+.RS
+.RE
+.TP
+.B cargo\-search(1)
+Search registry for crates.
+.RS
+.RE
+.TP
+.B cargo\-help(1)
+Display help for a cargo command
+.RS
+.RE
+.TP
+.B cargo\-version(1)
+Print cargo\[aq]s version and exit.
+.RS
+.RE
+.SH FILES
+.TP
+.B ~/.cargo
+Directory in which Cargo stores repository data.
+Cargo can be instructed to use a \f[I]\&.cargo\f[] subdirectory in a
+different location by setting the \f[B]CARGO_HOME\f[] environment
+variable.
+.RS
+.RE
+.SH EXAMPLES
+.PP
+Build a local package and all of its dependencies
+.IP
+.nf
+\f[C]
+$\ cargo\ build
+\f[]
+.fi
+.PP
+Build a package with optimizations
+.IP
+.nf
+\f[C]
+$\ cargo\ build\ \-\-release
+\f[]
+.fi
+.PP
+Run tests for a cross\-compiled target
+.IP
+.nf
+\f[C]
+$\ cargo\ test\ \-\-target\ i686\-unknown\-linux\-gnu
+\f[]
+.fi
+.PP
+Create a new package that builds an executable
+.IP
+.nf
+\f[C]
+$\ cargo\ new\ \-\-bin\ foobar
+\f[]
+.fi
+.PP
+Create a package in the current directory
+.IP
+.nf
+\f[C]
+$\ mkdir\ foo\ &&\ cd\ foo
+$\ cargo\ init\ .
+\f[]
+.fi
+.PP
+Learn about a command\[aq]s options and usage
+.IP
+.nf
+\f[C]
+$\ cargo\ help\ clean
+\f[]
+.fi
+.SH SEE ALSO
+.PP
+rustc(1), rustdoc(1)
+.SH BUGS
+.PP
+See <https://github.com/rust-lang/cargo/issues> for issues.
+.SH COPYRIGHT
+.PP
+This work is dual\-licensed under Apache 2.0 and MIT terms.
+See \f[I]COPYRIGHT\f[] file in the cargo source distribution.
diff --git a/tests/testsuite/alt_registry.rs b/tests/testsuite/alt_registry.rs
new file mode 100644 (file)
index 0000000..b1669ff
--- /dev/null
@@ -0,0 +1,494 @@
+use std::fs::File;
+use std::io::Write;
+use support::registry::{self, alt_api_path, Package};
+use support::{basic_manifest, paths, project};
+
+#[test]
+fn is_feature_gated() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            version = "0.0.1"
+            registry = "alternative"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.0.1").alternative(true).publish();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains("  feature `alternative-registries` is required")
+        .run();
+}
+
+#[test]
+fn depend_on_alt_registry() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            version = "0.0.1"
+            registry = "alternative"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.0.1").alternative(true).publish();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(&format!(
+            "\
+[UPDATING] `{reg}` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+            reg = registry::alt_registry_path().to_str().unwrap()
+        )).run();
+
+    p.cargo("clean").masquerade_as_nightly_cargo().run();
+
+    // Don't download a second time
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(
+            "\
+[COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+}
+
+#[test]
+fn depend_on_alt_registry_depends_on_same_registry_no_index() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            version = "0.0.1"
+            registry = "alternative"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("baz", "0.0.1").alternative(true).publish();
+    Package::new("bar", "0.0.1")
+        .dep("baz", "0.0.1")
+        .alternative(true)
+        .publish();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(&format!(
+            "\
+[UPDATING] `{reg}` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] baz v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+            reg = registry::alt_registry_path().to_str().unwrap()
+        )).run();
+}
+
+#[test]
+fn depend_on_alt_registry_depends_on_same_registry() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            version = "0.0.1"
+            registry = "alternative"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("baz", "0.0.1").alternative(true).publish();
+    Package::new("bar", "0.0.1")
+        .registry_dep("baz", "0.0.1", registry::alt_registry().as_str())
+        .alternative(true)
+        .publish();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(&format!(
+            "\
+[UPDATING] `{reg}` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] baz v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+            reg = registry::alt_registry_path().to_str().unwrap()
+        )).run();
+}
+
+#[test]
+fn depend_on_alt_registry_depends_on_crates_io() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            version = "0.0.1"
+            registry = "alternative"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("baz", "0.0.1").publish();
+    Package::new("bar", "0.0.1")
+        .registry_dep("baz", "0.0.1", registry::registry().as_str())
+        .alternative(true)
+        .publish();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(&format!(
+            "\
+[UPDATING] `{alt_reg}` index
+[UPDATING] `{reg}` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] baz v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+            alt_reg = registry::alt_registry_path().to_str().unwrap(),
+            reg = registry::registry_path().to_str().unwrap()
+        )).run();
+}
+
+#[test]
+fn registry_and_path_dep_works() {
+    registry::init();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+            registry = "alternative"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(
+            "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+}
+
+#[test]
+fn registry_incompatible_with_git() {
+    registry::init();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            git = ""
+            registry = "alternative"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").masquerade_as_nightly_cargo().with_status(101)
+                .with_stderr_contains("  dependency (bar) specification is ambiguous. Only one of `git` or `registry` is allowed.").run();
+}
+
+#[test]
+fn cannot_publish_to_crates_io_with_registry_dependency() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            [dependencies.bar]
+            version = "0.0.1"
+            registry = "alternative"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.0.1").alternative(true).publish();
+
+    p.cargo("publish --index")
+        .arg(registry::registry().to_string())
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn publish_with_registry_dependency() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            version = "0.0.1"
+            registry = "alternative"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.0.1").alternative(true).publish();
+
+    // Login so that we have the token available
+    p.cargo("login --registry alternative TOKEN -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .run();
+
+    p.cargo("publish --registry alternative -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .run();
+}
+
+#[test]
+fn alt_registry_and_crates_io_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            crates_io_dep = "0.0.1"
+
+            [dependencies.alt_reg_dep]
+            version = "0.1.0"
+            registry = "alternative"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("crates_io_dep", "0.0.1").publish();
+    Package::new("alt_reg_dep", "0.1.0")
+        .alternative(true)
+        .publish();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_stderr_contains(format!(
+            "[UPDATING] `{}` index",
+            registry::alt_registry_path().to_str().unwrap()
+        )).with_stderr_contains(&format!(
+            "[UPDATING] `{}` index",
+            registry::registry_path().to_str().unwrap()))
+        .with_stderr_contains("[DOWNLOADED] crates_io_dep v0.0.1 (registry `[ROOT][..]`)")
+        .with_stderr_contains("[DOWNLOADED] alt_reg_dep v0.1.0 (registry `[ROOT][..]`)")
+        .with_stderr_contains("[COMPILING] alt_reg_dep v0.1.0 (registry `[ROOT][..]`)")
+        .with_stderr_contains("[COMPILING] crates_io_dep v0.0.1")
+        .with_stderr_contains("[COMPILING] foo v0.0.1 ([CWD])")
+        .with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s")
+        .run();
+}
+
+#[test]
+fn block_publish_due_to_no_token() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    // Setup the registry by publishing a package
+    Package::new("bar", "0.0.1").alternative(true).publish();
+
+    // Now perform the actual publish
+    p.cargo("publish --registry alternative -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains("error: no upload token found, please run `cargo login`")
+        .run();
+}
+
+#[test]
+fn publish_to_alt_registry() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    // Setup the registry by publishing a package
+    Package::new("bar", "0.0.1").alternative(true).publish();
+
+    // Login so that we have the token available
+    p.cargo("login --registry alternative TOKEN -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .run();
+
+    // Now perform the actual publish
+    p.cargo("publish --registry alternative -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .run();
+
+    // Ensure that the crate is uploaded
+    assert!(alt_api_path().join("api/v1/crates/new").exists());
+}
+
+#[test]
+fn publish_with_crates_io_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = ["me"]
+            license = "MIT"
+            description = "foo"
+
+            [dependencies.bar]
+            version = "0.0.1"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.0.1").publish();
+
+    // Login so that we have the token available
+    p.cargo("login --registry alternative TOKEN -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .run();
+
+    p.cargo("publish --registry alternative -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .run();
+}
+
+#[test]
+fn credentials_in_url_forbidden() {
+    registry::init();
+
+    let config = paths::home().join(".cargo/config");
+
+    File::create(config)
+        .unwrap()
+        .write_all(
+            br#"
+        [registries.alternative]
+        index = "ssh://git:secret@foobar.com"
+        "#,
+        ).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --registry alternative -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains("error: Registry URLs may not contain credentials")
+        .run();
+}
diff --git a/tests/testsuite/bad_config.rs b/tests/testsuite/bad_config.rs
new file mode 100644 (file)
index 0000000..bcef703
--- /dev/null
@@ -0,0 +1,1201 @@
+use support::registry::Package;
+use support::{basic_manifest, project};
+
+#[test]
+fn bad1() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+              [target]
+              nonexistent-target = "foo"
+        "#,
+        ).build();
+    p.cargo("build -v --target=nonexistent-target")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] expected table for configuration key `target.nonexistent-target`, \
+but found string in [..]config
+",
+        ).run();
+}
+
+#[test]
+fn bad2() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+              [http]
+                proxy = 3.0
+        "#,
+        ).build();
+    p.cargo("publish -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] could not load Cargo configuration
+
+Caused by:
+  failed to load TOML configuration from `[..]config`
+
+Caused by:
+  failed to parse key `http`
+
+Caused by:
+  failed to parse key `proxy`
+
+Caused by:
+  found TOML configuration value of unknown type `float`
+",
+        ).run();
+}
+
+#[test]
+fn bad3() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [http]
+              proxy = true
+        "#,
+        ).build();
+    Package::new("foo", "1.0.0").publish();
+
+    p.cargo("publish -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to update registry [..]
+
+Caused by:
+  error in [..]config: `http.proxy` expected a string, but found a boolean
+",
+        ).run();
+}
+
+#[test]
+fn bad4() {
+    let p = project()
+        .file(
+            ".cargo/config",
+            r#"
+            [cargo-new]
+              name = false
+        "#,
+        ).build();
+    p.cargo("new -v foo")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] Failed to create package `foo` at `[..]`
+
+Caused by:
+  error in [..]config: `cargo-new.name` expected a string, but found a boolean
+",
+        ).run();
+}
+
+#[test]
+fn bad6() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [http]
+              user-agent = true
+        "#,
+        ).build();
+    Package::new("foo", "1.0.0").publish();
+
+    p.cargo("publish -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to update registry [..]
+
+Caused by:
+  error in [..]config: `http.user-agent` expected a string, but found a boolean
+",
+        ).run();
+}
+
+#[test]
+fn bad_cargo_config_jobs() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            jobs = -1
+        "#,
+        ).build();
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] error in [..].cargo/config: \
+could not load config key `build.jobs`: \
+invalid value: integer `-1`, expected u32
+",
+        ).run();
+}
+
+#[test]
+fn default_cargo_config_jobs() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            jobs = 1
+        "#,
+        ).build();
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn good_cargo_config_jobs() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            jobs = 4
+        "#,
+        ).build();
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn invalid_global_config() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies]
+            foo = "0.1.0"
+        "#,
+        ).file(".cargo/config", "4")
+        .file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] could not load Cargo configuration
+
+Caused by:
+  could not parse TOML configuration in `[..]`
+
+Caused by:
+  could not parse input as TOML
+
+Caused by:
+  expected an equals, found eof at line 1
+",
+        ).run();
+}
+
+#[test]
+fn bad_cargo_lock() {
+    let p = project()
+        .file("Cargo.lock", "[[package]]\nfoo = 92")
+        .file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse lock file at: [..]Cargo.lock
+
+Caused by:
+  missing field `name` for key `package`
+",
+        ).run();
+}
+
+#[test]
+fn duplicate_packages_in_cargo_lock() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "Cargo.lock",
+            r#"
+            [[package]]
+            name = "foo"
+            version = "0.0.1"
+            dependencies = [
+             "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+            ]
+
+            [[package]]
+            name = "bar"
+            version = "0.1.0"
+            source = "registry+https://github.com/rust-lang/crates.io-index"
+
+            [[package]]
+            name = "bar"
+            version = "0.1.0"
+            source = "registry+https://github.com/rust-lang/crates.io-index"
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse lock file at: [..]
+
+Caused by:
+  package `bar` is specified twice in the lockfile
+",
+        ).run();
+}
+
+#[test]
+fn bad_source_in_cargo_lock() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "Cargo.lock",
+            r#"
+            [[package]]
+            name = "foo"
+            version = "0.0.1"
+            dependencies = [
+             "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+            ]
+
+            [[package]]
+            name = "bar"
+            version = "0.1.0"
+            source = "You shall not parse"
+        "#,
+        ).build();
+
+    p.cargo("build --verbose")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse lock file at: [..]
+
+Caused by:
+  invalid source `You shall not parse` for key `package.source`
+",
+        ).run();
+}
+
+#[test]
+fn bad_dependency_in_lockfile() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            "Cargo.lock",
+            r#"
+            [[package]]
+            name = "foo"
+            version = "0.0.1"
+            dependencies = [
+             "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+            ]
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn bad_git_dependency() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies]
+            foo = { git = "file:.." }
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] git repository `file:///`
+[ERROR] failed to load source for a dependency on `foo`
+
+Caused by:
+  Unable to update file:///
+
+Caused by:
+  failed to clone into: [..]
+
+Caused by:
+  [..]'file:///' is not a valid local file URI[..]
+",
+        ).run();
+}
+
+#[test]
+fn bad_crate_type() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [lib]
+            crate-type = ["bad_type", "rlib"]
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr_contains(
+            "error: failed to run `rustc` to learn about crate-type bad_type information",
+        ).run();
+}
+
+#[test]
+fn malformed_override() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [target.x86_64-apple-darwin.freetype]
+            native = {
+              foo: "bar"
+            }
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  could not parse input as TOML
+
+Caused by:
+  expected a table key, found a newline at line 8
+",
+        ).run();
+}
+
+#[test]
+fn duplicate_binary_names() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+           [package]
+           name = "qqq"
+           version = "0.1.0"
+           authors = ["A <a@a.a>"]
+
+           [[bin]]
+           name = "e"
+           path = "a.rs"
+
+           [[bin]]
+           name = "e"
+           path = "b.rs"
+        "#,
+        ).file("a.rs", r#"fn main() -> () {}"#)
+        .file("b.rs", r#"fn main() -> () {}"#)
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  found duplicate binary name e, but all binary targets must have a unique name
+",
+        ).run();
+}
+
+#[test]
+fn duplicate_example_names() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+           [package]
+           name = "qqq"
+           version = "0.1.0"
+           authors = ["A <a@a.a>"]
+
+           [[example]]
+           name = "ex"
+           path = "examples/ex.rs"
+
+           [[example]]
+           name = "ex"
+           path = "examples/ex2.rs"
+        "#,
+        ).file("examples/ex.rs", r#"fn main () -> () {}"#)
+        .file("examples/ex2.rs", r#"fn main () -> () {}"#)
+        .build();
+
+    p.cargo("build --example ex")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  found duplicate example name ex, but all example targets must have a unique name
+",
+        ).run();
+}
+
+#[test]
+fn duplicate_bench_names() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+           [package]
+           name = "qqq"
+           version = "0.1.0"
+           authors = ["A <a@a.a>"]
+
+           [[bench]]
+           name = "ex"
+           path = "benches/ex.rs"
+
+           [[bench]]
+           name = "ex"
+           path = "benches/ex2.rs"
+        "#,
+        ).file("benches/ex.rs", r#"fn main () {}"#)
+        .file("benches/ex2.rs", r#"fn main () {}"#)
+        .build();
+
+    p.cargo("bench")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  found duplicate bench name ex, but all bench targets must have a unique name
+",
+        ).run();
+}
+
+#[test]
+fn duplicate_deps() {
+    let p = project()
+        .file("shim-bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("shim-bar/src/lib.rs", "pub fn a() {}")
+        .file("linux-bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("linux-bar/src/lib.rs", "pub fn a() {}")
+        .file(
+            "Cargo.toml",
+            r#"
+           [package]
+           name = "qqq"
+           version = "0.0.1"
+           authors = []
+
+           [dependencies]
+           bar = { path = "shim-bar" }
+
+           [target.x86_64-unknown-linux-gnu.dependencies]
+           bar = { path = "linux-bar" }
+        "#,
+        ).file("src/main.rs", r#"fn main () {}"#)
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Dependency 'bar' has different source paths depending on the build target. Each dependency must \
+have a single canonical source path irrespective of build target.
+",
+        ).run();
+}
+
+#[test]
+fn duplicate_deps_diff_sources() {
+    let p = project()
+        .file("shim-bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("shim-bar/src/lib.rs", "pub fn a() {}")
+        .file("linux-bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("linux-bar/src/lib.rs", "pub fn a() {}")
+        .file(
+            "Cargo.toml",
+            r#"
+           [package]
+           name = "qqq"
+           version = "0.0.1"
+           authors = []
+
+           [target.i686-unknown-linux-gnu.dependencies]
+           bar = { path = "shim-bar" }
+
+           [target.x86_64-unknown-linux-gnu.dependencies]
+           bar = { path = "linux-bar" }
+        "#,
+        ).file("src/main.rs", r#"fn main () {}"#)
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Dependency 'bar' has different source paths depending on the build target. Each dependency must \
+have a single canonical source path irrespective of build target.
+",
+        ).run();
+}
+
+#[test]
+fn unused_keys() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+           [package]
+           name = "foo"
+           version = "0.1.0"
+           authors = []
+
+           [target.foo]
+           bar = "3"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+warning: unused manifest key: target.foo.bar
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+           [package]
+           name = "foo"
+           version = "0.1.0"
+           authors = []
+
+           [profile.debug]
+           debug = 1
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+warning: unused manifest key: profile.debug
+warning: use `[profile.dev]` to configure debug builds
+[..]
+[..]",
+        ).run();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+            bulid = "foo"
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .build();
+    p.cargo("build")
+        .with_stderr(
+            "\
+warning: unused manifest key: project.bulid
+[COMPILING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    let p = project()
+        .at("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [lib]
+            build = "foo"
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .build();
+    p.cargo("build")
+        .with_stderr(
+            "\
+warning: unused manifest key: lib.build
+[COMPILING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn unused_keys_in_virtual_manifest() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar"]
+            bulid = "foo"
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", r"")
+        .build();
+    p.cargo("build --all")
+        .with_stderr(
+            "\
+warning: unused manifest key: workspace.bulid
+[COMPILING] bar [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn empty_dependencies() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies]
+            bar = {}
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.0.1").publish();
+
+    p.cargo("build")
+        .with_stderr_contains(
+            "\
+warning: dependency (bar) specified without providing a local path, Git repository, or version \
+to use. This will be considered an error in future versions
+",
+        ).run();
+}
+
+#[test]
+fn invalid_toml_historically_allowed_is_warned() {
+    let p = project()
+        .file(".cargo/config", "[bar] baz = 2")
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+warning: TOML file found which contains invalid syntax and will soon not parse
+at `[..]config`.
+
+The TOML spec requires newlines after table definitions (e.g. `[a] b = 1` is
+invalid), but this file has a table header which does not have a newline after
+it. A newline needs to be added and this warning will soon become a hard error
+in the future.
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn ambiguous_git_reference() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies.bar]
+            git = "https://127.0.0.1"
+            branch = "master"
+            tag = "some-tag"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[WARNING] dependency (bar) specification is ambiguous. \
+Only one of `branch`, `tag` or `rev` is allowed. \
+This will be considered an error in future versions
+",
+        ).run();
+}
+
+#[test]
+fn bad_source_config1() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(".cargo/config", "[source.foo]")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr("error: no source URL specified for `source.foo`, need [..]")
+        .run();
+}
+
+#[test]
+fn bad_source_config2() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [source.crates-io]
+            registry = 'http://example.com'
+            replace-with = 'bar'
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to load source for a dependency on `bar`
+
+Caused by:
+  Unable to update registry `https://[..]`
+
+Caused by:
+  could not find a configured source with the name `bar` \
+    when attempting to lookup `crates-io` (configuration in [..])
+",
+        ).run();
+}
+
+#[test]
+fn bad_source_config3() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [source.crates-io]
+            registry = 'http://example.com'
+            replace-with = 'crates-io'
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to load source for a dependency on `bar`
+
+Caused by:
+  Unable to update registry `https://[..]`
+
+Caused by:
+  detected a cycle of `replace-with` sources, [..]
+",
+        ).run();
+}
+
+#[test]
+fn bad_source_config4() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [source.crates-io]
+            registry = 'http://example.com'
+            replace-with = 'bar'
+
+            [source.bar]
+            registry = 'http://example.com'
+            replace-with = 'crates-io'
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to load source for a dependency on `bar`
+
+Caused by:
+  Unable to update registry `https://[..]`
+
+Caused by:
+  detected a cycle of `replace-with` sources, the source `crates-io` is \
+    eventually replaced with itself (configuration in [..])
+",
+        ).run();
+}
+
+#[test]
+fn bad_source_config5() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [source.crates-io]
+            registry = 'http://example.com'
+            replace-with = 'bar'
+
+            [source.bar]
+            registry = 'not a url'
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: configuration key `source.bar.registry` specified an invalid URL (in [..])
+
+Caused by:
+  invalid url `not a url`: [..]
+",
+        ).run();
+}
+
+#[test]
+fn both_git_and_path_specified() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+        [package]
+        name = "foo"
+        version = "0.0.0"
+        authors = []
+
+        [dependencies.bar]
+        git = "https://127.0.0.1"
+        path = "bar"
+    "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    foo.cargo("build -v")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[WARNING] dependency (bar) specification is ambiguous. \
+Only one of `git` or `path` is allowed. \
+This will be considered an error in future versions
+",
+        ).run();
+}
+
+#[test]
+fn bad_source_config6() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [source.crates-io]
+            registry = 'http://example.com'
+            replace-with = ['not', 'a', 'string']
+        "#,
+        ).build();
+
+    p.cargo("build").with_status(101).with_stderr(
+            "error: expected a string, but found a array for `source.crates-io.replace-with` in [..]",
+        )
+        .run();
+}
+
+#[test]
+fn ignored_git_revision() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+        [package]
+        name = "foo"
+        version = "0.0.0"
+        authors = []
+
+        [dependencies.bar]
+        path = "bar"
+        branch = "spam"
+    "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    foo.cargo("build -v")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+             [WARNING] key `branch` is ignored for dependency (bar). \
+             This will be considered an error in future versions",
+        ).run();
+}
+
+#[test]
+fn bad_source_config7() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [source.foo]
+            registry = 'http://example.com'
+            local-registry = 'file:///another/file'
+        "#,
+        ).build();
+
+    Package::new("bar", "0.1.0").publish();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr("error: more than one source URL specified for `source.foo`")
+        .run();
+}
+
+#[test]
+fn bad_dependency() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies]
+            bar = 3
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  invalid type: integer `3`, expected a version string like [..]
+",
+        ).run();
+}
+
+#[test]
+fn bad_debuginfo() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [profile.dev]
+            debug = 'a'
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  invalid type: string \"a\", expected a boolean or an integer for [..]
+",
+        ).run();
+}
+
+#[test]
+fn bad_opt_level() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+            build = 3
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  invalid type: integer `3`, expected a boolean or a string for key [..]
+",
+        ).run();
+}
diff --git a/tests/testsuite/bad_manifest_path.rs b/tests/testsuite/bad_manifest_path.rs
new file mode 100644 (file)
index 0000000..41ba86b
--- /dev/null
@@ -0,0 +1,376 @@
+use support::{basic_bin_manifest, main_file, project};
+
+fn assert_not_a_cargo_toml(command: &str, manifest_path_argument: &str) {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo(command)
+        .arg("--manifest-path")
+        .arg(manifest_path_argument)
+        .cwd(p.root().parent().unwrap())
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] the manifest-path must be a path \
+             to a Cargo.toml file",
+        ).run();
+}
+
+fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) {
+    let p = project().build();
+    let expected_path = manifest_path_argument
+        .split('/')
+        .collect::<Vec<_>>()
+        .join("[..]");
+
+    p.cargo(command)
+        .arg("--manifest-path")
+        .arg(manifest_path_argument)
+        .cwd(p.root().parent().unwrap())
+        .with_status(101)
+        .with_stderr(format!(
+            "[ERROR] manifest path `{}` does not exist",
+            expected_path
+        )).run();
+}
+
+#[test]
+fn bench_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("bench", "foo");
+}
+
+#[test]
+fn bench_dir_plus_file() {
+    assert_not_a_cargo_toml("bench", "foo/bar");
+}
+
+#[test]
+fn bench_dir_plus_path() {
+    assert_not_a_cargo_toml("bench", "foo/bar/baz");
+}
+
+#[test]
+fn bench_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("bench", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn build_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("build", "foo");
+}
+
+#[test]
+fn build_dir_plus_file() {
+    assert_not_a_cargo_toml("bench", "foo/bar");
+}
+
+#[test]
+fn build_dir_plus_path() {
+    assert_not_a_cargo_toml("bench", "foo/bar/baz");
+}
+
+#[test]
+fn build_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("build", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn clean_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("clean", "foo");
+}
+
+#[test]
+fn clean_dir_plus_file() {
+    assert_not_a_cargo_toml("clean", "foo/bar");
+}
+
+#[test]
+fn clean_dir_plus_path() {
+    assert_not_a_cargo_toml("clean", "foo/bar/baz");
+}
+
+#[test]
+fn clean_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("clean", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn doc_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("doc", "foo");
+}
+
+#[test]
+fn doc_dir_plus_file() {
+    assert_not_a_cargo_toml("doc", "foo/bar");
+}
+
+#[test]
+fn doc_dir_plus_path() {
+    assert_not_a_cargo_toml("doc", "foo/bar/baz");
+}
+
+#[test]
+fn doc_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("doc", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn fetch_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("fetch", "foo");
+}
+
+#[test]
+fn fetch_dir_plus_file() {
+    assert_not_a_cargo_toml("fetch", "foo/bar");
+}
+
+#[test]
+fn fetch_dir_plus_path() {
+    assert_not_a_cargo_toml("fetch", "foo/bar/baz");
+}
+
+#[test]
+fn fetch_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("fetch", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn generate_lockfile_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("generate-lockfile", "foo");
+}
+
+#[test]
+fn generate_lockfile_dir_plus_file() {
+    assert_not_a_cargo_toml("generate-lockfile", "foo/bar");
+}
+
+#[test]
+fn generate_lockfile_dir_plus_path() {
+    assert_not_a_cargo_toml("generate-lockfile", "foo/bar/baz");
+}
+
+#[test]
+fn generate_lockfile_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("generate-lockfile", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn package_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("package", "foo");
+}
+
+#[test]
+fn package_dir_plus_file() {
+    assert_not_a_cargo_toml("package", "foo/bar");
+}
+
+#[test]
+fn package_dir_plus_path() {
+    assert_not_a_cargo_toml("package", "foo/bar/baz");
+}
+
+#[test]
+fn package_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("package", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn pkgid_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("pkgid", "foo");
+}
+
+#[test]
+fn pkgid_dir_plus_file() {
+    assert_not_a_cargo_toml("pkgid", "foo/bar");
+}
+
+#[test]
+fn pkgid_dir_plus_path() {
+    assert_not_a_cargo_toml("pkgid", "foo/bar/baz");
+}
+
+#[test]
+fn pkgid_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("pkgid", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn publish_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("publish", "foo");
+}
+
+#[test]
+fn publish_dir_plus_file() {
+    assert_not_a_cargo_toml("publish", "foo/bar");
+}
+
+#[test]
+fn publish_dir_plus_path() {
+    assert_not_a_cargo_toml("publish", "foo/bar/baz");
+}
+
+#[test]
+fn publish_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("publish", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn read_manifest_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("read-manifest", "foo");
+}
+
+#[test]
+fn read_manifest_dir_plus_file() {
+    assert_not_a_cargo_toml("read-manifest", "foo/bar");
+}
+
+#[test]
+fn read_manifest_dir_plus_path() {
+    assert_not_a_cargo_toml("read-manifest", "foo/bar/baz");
+}
+
+#[test]
+fn read_manifest_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("read-manifest", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn run_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("run", "foo");
+}
+
+#[test]
+fn run_dir_plus_file() {
+    assert_not_a_cargo_toml("run", "foo/bar");
+}
+
+#[test]
+fn run_dir_plus_path() {
+    assert_not_a_cargo_toml("run", "foo/bar/baz");
+}
+
+#[test]
+fn run_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("run", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn rustc_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("rustc", "foo");
+}
+
+#[test]
+fn rustc_dir_plus_file() {
+    assert_not_a_cargo_toml("rustc", "foo/bar");
+}
+
+#[test]
+fn rustc_dir_plus_path() {
+    assert_not_a_cargo_toml("rustc", "foo/bar/baz");
+}
+
+#[test]
+fn rustc_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("rustc", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn test_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("test", "foo");
+}
+
+#[test]
+fn test_dir_plus_file() {
+    assert_not_a_cargo_toml("test", "foo/bar");
+}
+
+#[test]
+fn test_dir_plus_path() {
+    assert_not_a_cargo_toml("test", "foo/bar/baz");
+}
+
+#[test]
+fn test_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("test", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn update_dir_containing_cargo_toml() {
+    assert_not_a_cargo_toml("update", "foo");
+}
+
+#[test]
+fn update_dir_plus_file() {
+    assert_not_a_cargo_toml("update", "foo/bar");
+}
+
+#[test]
+fn update_dir_plus_path() {
+    assert_not_a_cargo_toml("update", "foo/bar/baz");
+}
+
+#[test]
+fn update_dir_to_nonexistent_cargo_toml() {
+    assert_cargo_toml_doesnt_exist("update", "foo/bar/baz/Cargo.toml");
+}
+
+#[test]
+fn verify_project_dir_containing_cargo_toml() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("verify-project --manifest-path foo")
+        .cwd(p.root().parent().unwrap())
+        .with_status(1)
+        .with_stdout(
+            "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
+             ",
+        ).run();
+}
+
+#[test]
+fn verify_project_dir_plus_file() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("verify-project --manifest-path foo/bar")
+        .cwd(p.root().parent().unwrap())
+        .with_status(1)
+        .with_stdout(
+            "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
+             ",
+        ).run();
+}
+
+#[test]
+fn verify_project_dir_plus_path() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("verify-project --manifest-path foo/bar/baz")
+        .cwd(p.root().parent().unwrap())
+        .with_status(1)
+        .with_stdout(
+            "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\
+             ",
+        ).run();
+}
+
+#[test]
+fn verify_project_dir_to_nonexistent_cargo_toml() {
+    let p = project().build();
+    p.cargo("verify-project --manifest-path foo/bar/baz/Cargo.toml")
+        .cwd(p.root().parent().unwrap())
+        .with_status(1)
+        .with_stdout(
+            "{\"invalid\":\"manifest path `foo[..]bar[..]baz[..]Cargo.toml` does not exist\"}\
+             ",
+        ).run();
+}
diff --git a/tests/testsuite/bench.rs b/tests/testsuite/bench.rs
new file mode 100644 (file)
index 0000000..5e57278
--- /dev/null
@@ -0,0 +1,1487 @@
+use support::is_nightly;
+use support::paths::CargoPathExt;
+use support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project};
+
+#[test]
+fn cargo_bench_simple() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/main.rs",
+            r#"
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate test;
+
+            fn hello() -> &'static str {
+                "hello"
+            }
+
+            pub fn main() {
+                println!("{}", hello())
+            }
+
+            #[bench]
+            fn bench_hello(_b: &mut test::Bencher) {
+                assert_eq!(hello(), "hello")
+            }"#,
+        ).build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("hello\n").run();
+
+    p.cargo("bench")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test bench_hello ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn bench_bench_implicit() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "src/main.rs",
+            r#"
+            #![cfg_attr(test, feature(test))]
+            #[cfg(test)]
+            extern crate test;
+            #[bench] fn run1(_ben: &mut test::Bencher) { }
+            fn main() { println!("Hello main!"); }"#,
+        ).file(
+            "tests/other.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run3(_ben: &mut test::Bencher) { }"#,
+        ).file(
+            "benches/mybench.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run2(_ben: &mut test::Bencher) { }"#,
+        ).build();
+
+    p.cargo("bench --benches")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]
+[RUNNING] target/release/deps/mybench-[..][EXE]
+",
+        ).with_stdout_contains("test run2 ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn bench_bin_implicit() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "src/main.rs",
+            r#"
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate test;
+            #[bench] fn run1(_ben: &mut test::Bencher) { }
+            fn main() { println!("Hello main!"); }"#,
+        ).file(
+            "tests/other.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run3(_ben: &mut test::Bencher) { }"#,
+        ).file(
+            "benches/mybench.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run2(_ben: &mut test::Bencher) { }"#,
+        ).build();
+
+    p.cargo("bench --bins")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]
+",
+        ).with_stdout_contains("test run1 ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn bench_tarname() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "benches/bin1.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+        ).file(
+            "benches/bin2.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run2(_ben: &mut test::Bencher) { }"#,
+        ).build();
+
+    p.cargo("bench --bench bin2")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/bin2-[..][EXE]
+",
+        ).with_stdout_contains("test run2 ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn bench_multiple_targets() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "benches/bin1.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+        ).file(
+            "benches/bin2.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run2(_ben: &mut test::Bencher) { }"#,
+        ).file(
+            "benches/bin3.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run3(_ben: &mut test::Bencher) { }"#,
+        ).build();
+
+    p.cargo("bench --bench bin1 --bench bin2")
+        .with_stdout_contains("test run1 ... bench: [..]")
+        .with_stdout_contains("test run2 ... bench: [..]")
+        .with_stdout_does_not_contain("[..]run3[..]")
+        .run();
+}
+
+#[test]
+fn cargo_bench_verbose() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/main.rs",
+            r#"
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate test;
+            fn main() {}
+            #[bench] fn bench_hello(_b: &mut test::Bencher) {}
+        "#,
+        ).build();
+
+    p.cargo("bench -v hello")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] src/main.rs [..]`
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `[..]target/release/deps/foo-[..][EXE] hello --bench`",
+        ).with_stdout_contains("test bench_hello ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn many_similar_names() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "src/lib.rs",
+            "
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate test;
+            pub fn foo() {}
+            #[bench] fn lib_bench(_b: &mut test::Bencher) {}
+        ",
+        ).file(
+            "src/main.rs",
+            "
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate foo;
+            #[cfg(test)]
+            extern crate test;
+            fn main() {}
+            #[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() }
+        ",
+        ).file(
+            "benches/foo.rs",
+            r#"
+            #![feature(test)]
+            extern crate foo;
+            extern crate test;
+            #[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() }
+        "#,
+        ).build();
+
+    p.cargo("bench")
+        .with_stdout_contains("test bin_bench ... bench:           0 ns/iter (+/- 0)")
+        .with_stdout_contains("test lib_bench ... bench:           0 ns/iter (+/- 0)")
+        .with_stdout_contains("test bench_bench ... bench:           0 ns/iter (+/- 0)")
+        .run();
+}
+
+#[test]
+fn cargo_bench_failing_test() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/main.rs",
+            r#"
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate test;
+            fn hello() -> &'static str {
+                "hello"
+            }
+
+            pub fn main() {
+                println!("{}", hello())
+            }
+
+            #[bench]
+            fn bench_hello(_b: &mut test::Bencher) {
+                assert_eq!(hello(), "nope")
+            }"#,
+        ).build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("hello\n").run();
+
+    // Force libtest into serial execution so that the test header will be printed.
+    p.cargo("bench -- --test-threads=1")
+        .with_stdout_contains("test bench_hello ...[..]")
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])[..]
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+        ).with_either_contains(
+            "[..]thread '[..]' panicked at 'assertion failed: `(left == right)`[..]",
+        ).with_either_contains("[..]left: `\"hello\"`[..]")
+        .with_either_contains("[..]right: `\"nope\"`[..]")
+        .with_either_contains("[..]src/main.rs:15[..]")
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn bench_with_lib_dep() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[bin]]
+            name = "baz"
+            path = "src/main.rs"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #![cfg_attr(test, feature(test))]
+            #[cfg(test)]
+            extern crate test;
+            ///
+            /// ```rust
+            /// extern crate foo;
+            /// fn main() {
+            ///     println!("{}", foo::foo());
+            /// }
+            /// ```
+            ///
+            pub fn foo(){}
+            #[bench] fn lib_bench(_b: &mut test::Bencher) {}
+        "#,
+        ).file(
+            "src/main.rs",
+            "
+            #![feature(test)]
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+            #[cfg(test)]
+            extern crate test;
+
+            fn main() {}
+
+            #[bench]
+            fn bin_bench(_b: &mut test::Bencher) {}
+        ",
+        ).build();
+
+    p.cargo("bench")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]
+[RUNNING] target/release/deps/baz-[..][EXE]",
+        ).with_stdout_contains("test lib_bench ... bench: [..]")
+        .with_stdout_contains("test bin_bench ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn bench_with_deep_lib_dep() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .at("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.foo]
+            path = "../foo"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "
+            #![cfg_attr(test, feature(test))]
+            #[cfg(test)]
+            extern crate foo;
+            #[cfg(test)]
+            extern crate test;
+            #[bench]
+            fn bar_bench(_b: &mut test::Bencher) {
+                foo::foo();
+            }
+        ",
+        ).build();
+    let _p2 = project()
+        .file(
+            "src/lib.rs",
+            "
+            #![cfg_attr(test, feature(test))]
+            #[cfg(test)]
+            extern crate test;
+
+            pub fn foo() {}
+
+            #[bench]
+            fn foo_bench(_b: &mut test::Bencher) {}
+        ",
+        ).build();
+
+    p.cargo("bench")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[COMPILING] bar v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/bar-[..][EXE]",
+        ).with_stdout_contains("test bar_bench ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn external_bench_explicit() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[bench]]
+            name = "bench"
+            path = "src/bench.rs"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #![cfg_attr(test, feature(test))]
+            #[cfg(test)]
+            extern crate test;
+            pub fn get_hello() -> &'static str { "Hello" }
+
+            #[bench]
+            fn internal_bench(_b: &mut test::Bencher) {}
+        "#,
+        ).file(
+            "src/bench.rs",
+            r#"
+            #![feature(test)]
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+            extern crate test;
+
+            #[bench]
+            fn external_bench(_b: &mut test::Bencher) {}
+        "#,
+        ).build();
+
+    p.cargo("bench")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]
+[RUNNING] target/release/deps/bench-[..][EXE]",
+        ).with_stdout_contains("test internal_bench ... bench: [..]")
+        .with_stdout_contains("test external_bench ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn external_bench_implicit() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            #![cfg_attr(test, feature(test))]
+            #[cfg(test)]
+            extern crate test;
+
+            pub fn get_hello() -> &'static str { "Hello" }
+
+            #[bench]
+            fn internal_bench(_b: &mut test::Bencher) {}
+        "#,
+        ).file(
+            "benches/external.rs",
+            r#"
+            #![feature(test)]
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+            extern crate test;
+
+            #[bench]
+            fn external_bench(_b: &mut test::Bencher) {}
+        "#,
+        ).build();
+
+    p.cargo("bench")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]
+[RUNNING] target/release/deps/external-[..][EXE]",
+        ).with_stdout_contains("test internal_bench ... bench: [..]")
+        .with_stdout_contains("test external_bench ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn bench_autodiscover_2015() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+                edition = "2015"
+
+                [[bench]]
+                name = "bench_magic"
+                required-features = ["magic"]
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "benches/bench_basic.rs",
+            r#"
+                #![feature(test)]
+                #[allow(unused_extern_crates)]
+                extern crate foo;
+                extern crate test;
+
+                #[bench]
+                fn bench_basic(_b: &mut test::Bencher) {}
+            "#,
+        ).file(
+            "benches/bench_magic.rs",
+            r#"
+                #![feature(test)]
+                #[allow(unused_extern_crates)]
+                extern crate foo;
+                extern crate test;
+
+                #[bench]
+                fn bench_magic(_b: &mut test::Bencher) {}
+            "#,
+        ).build();
+
+    p.cargo("bench bench_basic")
+        .with_stderr(
+            "warning: \
+An explicit [[bench]] section is specified in Cargo.toml which currently
+disables Cargo from automatically inferring other benchmark targets.
+This inference behavior will change in the Rust 2018 edition and the following
+files will be included as a benchmark target:
+
+* [..]bench_basic.rs
+
+This is likely to break cargo build or cargo test as these files may not be
+ready to be compiled as a benchmark target today. You can future-proof yourself
+and disable this warning by adding `autobenches = false` to your [package]
+section. You may also move the files to a location where Cargo would not
+automatically infer them to be a target, such as in subfolders.
+
+For more information on this warning you can consult
+https://github.com/rust-lang/cargo/issues/5330
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]
+",
+        ).run();
+}
+
+#[test]
+fn dont_run_examples() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file("src/lib.rs", r"")
+        .file(
+            "examples/dont-run-me-i-will-fail.rs",
+            r#"fn main() { panic!("Examples should not be run by 'cargo test'"); }"#,
+        ).build();
+    p.cargo("bench").run();
+}
+
+#[test]
+fn pass_through_command_line() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "src/lib.rs",
+            "
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate test;
+
+            #[bench] fn foo(_b: &mut test::Bencher) {}
+            #[bench] fn bar(_b: &mut test::Bencher) {}
+        ",
+        ).build();
+
+    p.cargo("bench bar")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test bar ... bench: [..]")
+        .run();
+
+    p.cargo("bench foo")
+        .with_stderr(
+            "[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test foo ... bench: [..]")
+        .run();
+}
+
+// Regression test for running cargo-bench twice with
+// tests in an rlib
+#[test]
+fn cargo_bench_twice() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file(
+            "src/foo.rs",
+            r#"
+            #![crate_type = "rlib"]
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate test;
+
+            #[bench]
+            fn dummy_bench(b: &mut test::Bencher) { }
+            "#,
+        ).build();
+
+    for _ in 0..2 {
+        p.cargo("bench").run();
+    }
+}
+
+#[test]
+fn lib_bin_same_name() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "foo"
+            [[bin]]
+            name = "foo"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "
+            #![cfg_attr(test, feature(test))]
+            #[cfg(test)]
+            extern crate test;
+            #[bench] fn lib_bench(_b: &mut test::Bencher) {}
+        ",
+        ).file(
+            "src/main.rs",
+            "
+            #![cfg_attr(test, feature(test))]
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+            #[cfg(test)]
+            extern crate test;
+
+            #[bench]
+            fn bin_bench(_b: &mut test::Bencher) {}
+        ",
+        ).build();
+
+    p.cargo("bench")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+        ).with_stdout_contains_n("test [..] ... bench: [..]", 2)
+        .run();
+}
+
+#[test]
+fn lib_with_standard_name() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file("Cargo.toml", &basic_manifest("syntax", "0.0.1"))
+        .file(
+            "src/lib.rs",
+            "
+            #![cfg_attr(test, feature(test))]
+            #[cfg(test)]
+            extern crate test;
+
+            /// ```
+            /// syntax::foo();
+            /// ```
+            pub fn foo() {}
+
+            #[bench]
+            fn foo_bench(_b: &mut test::Bencher) {}
+        ",
+        ).file(
+            "benches/bench.rs",
+            "
+            #![feature(test)]
+            extern crate syntax;
+            extern crate test;
+
+            #[bench]
+            fn bench(_b: &mut test::Bencher) { syntax::foo() }
+        ",
+        ).build();
+
+    p.cargo("bench")
+        .with_stderr(
+            "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/syntax-[..][EXE]
+[RUNNING] target/release/deps/bench-[..][EXE]",
+        ).with_stdout_contains("test foo_bench ... bench: [..]")
+        .with_stdout_contains("test bench ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn lib_with_standard_name2() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "syntax"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "syntax"
+            bench = false
+            doctest = false
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .file(
+            "src/main.rs",
+            "
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate syntax;
+            #[cfg(test)]
+            extern crate test;
+
+            fn main() {}
+
+            #[bench]
+            fn bench(_b: &mut test::Bencher) { syntax::foo() }
+        ",
+        ).build();
+
+    p.cargo("bench")
+        .with_stderr(
+            "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/syntax-[..][EXE]",
+        ).with_stdout_contains("test bench ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn bench_dylib() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "foo"
+            crate_type = ["dylib"]
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #![cfg_attr(test, feature(test))]
+            extern crate bar as the_bar;
+            #[cfg(test)]
+            extern crate test;
+
+            pub fn bar() { the_bar::baz(); }
+
+            #[bench]
+            fn foo(_b: &mut test::Bencher) {}
+        "#,
+        ).file(
+            "benches/bench.rs",
+            r#"
+            #![feature(test)]
+            extern crate foo as the_foo;
+            extern crate test;
+
+            #[bench]
+            fn foo(_b: &mut test::Bencher) { the_foo::bar(); }
+        "#,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "bar"
+            crate_type = ["dylib"]
+        "#,
+        ).file("bar/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    p.cargo("bench -v")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[RUNNING] [..] -C opt-level=3 [..]
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] [..] -C opt-level=3 [..]
+[RUNNING] [..] -C opt-level=3 [..]
+[RUNNING] [..] -C opt-level=3 [..]
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench`
+[RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`",
+        ).with_stdout_contains_n("test foo ... bench: [..]", 2)
+        .run();
+
+    p.root().move_into_the_past();
+    p.cargo("bench -v")
+        .with_stderr(
+            "\
+[FRESH] bar v0.0.1 ([CWD]/bar)
+[FRESH] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench`
+[RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`",
+        ).with_stdout_contains_n("test foo ... bench: [..]", 2)
+        .run();
+}
+
+#[test]
+fn bench_twice_with_build_cmd() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("build.rs", "fn main() {}")
+        .file(
+            "src/lib.rs",
+            "
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate test;
+            #[bench]
+            fn foo(_b: &mut test::Bencher) {}
+        ",
+        ).build();
+
+    p.cargo("bench")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test foo ... bench: [..]")
+        .run();
+
+    p.cargo("bench")
+        .with_stderr(
+            "[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test foo ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn bench_with_examples() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "6.6.6"
+            authors = []
+
+            [[example]]
+            name = "teste1"
+
+            [[bench]]
+            name = "testb1"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #![cfg_attr(test, feature(test))]
+            #[cfg(test)]
+            extern crate test;
+            #[cfg(test)]
+            use test::Bencher;
+
+            pub fn f1() {
+                println!("f1");
+            }
+
+            pub fn f2() {}
+
+            #[bench]
+            fn bench_bench1(_b: &mut Bencher) {
+                f2();
+            }
+        "#,
+        ).file(
+            "benches/testb1.rs",
+            "
+            #![feature(test)]
+            extern crate foo;
+            extern crate test;
+
+            use test::Bencher;
+
+            #[bench]
+            fn bench_bench2(_b: &mut Bencher) {
+                foo::f2();
+            }
+        ",
+        ).file(
+            "examples/teste1.rs",
+            r#"
+            extern crate foo;
+
+            fn main() {
+                println!("example1");
+                foo::f1();
+            }
+        "#,
+        ).build();
+
+    p.cargo("bench -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v6.6.6 ([CWD])
+[RUNNING] `rustc [..]`
+[RUNNING] `rustc [..]`
+[RUNNING] `rustc [..]`
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `[CWD]/target/release/deps/foo-[..][EXE] --bench`
+[RUNNING] `[CWD]/target/release/deps/testb1-[..][EXE] --bench`",
+        ).with_stdout_contains("test bench_bench1 ... bench: [..]")
+        .with_stdout_contains("test bench_bench2 ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn test_a_bench() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            authors = []
+            version = "0.1.0"
+
+            [lib]
+            name = "foo"
+            test = false
+            doctest = false
+
+            [[bench]]
+            name = "b"
+            test = true
+        "#,
+        ).file("src/lib.rs", "")
+        .file("benches/b.rs", "#[test] fn foo() {}")
+        .build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/b-[..][EXE]",
+        ).with_stdout_contains("test foo ... ok")
+        .run();
+}
+
+#[test]
+fn test_bench_no_run() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            "benches/bbaz.rs",
+            r#"
+            #![feature(test)]
+
+            extern crate test;
+
+            use test::Bencher;
+
+            #[bench]
+            fn bench_baz(_: &mut Bencher) {}
+        "#,
+        ).build();
+
+    p.cargo("bench --no-run")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn test_bench_no_fail_fast() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/foo.rs",
+            r#"
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate test;
+            fn hello() -> &'static str {
+                "hello"
+            }
+
+            pub fn main() {
+                println!("{}", hello())
+            }
+
+            #[bench]
+            fn bench_hello(_b: &mut test::Bencher) {
+                assert_eq!(hello(), "hello")
+            }
+
+            #[bench]
+            fn bench_nope(_b: &mut test::Bencher) {
+                assert_eq!("nope", hello())
+            }"#,
+        ).build();
+
+    p.cargo("bench --no-fail-fast -- --test-threads=1")
+        .with_status(101)
+        .with_stderr_contains("[RUNNING] target/release/deps/foo-[..][EXE]")
+        .with_stdout_contains("running 2 tests")
+        .with_stderr_contains("[RUNNING] target/release/deps/foo-[..][EXE]")
+        .with_stdout_contains("test bench_hello [..]")
+        .with_stdout_contains("test bench_nope [..]")
+        .run();
+}
+
+#[test]
+fn test_bench_multiple_packages() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            authors = []
+            version = "0.1.0"
+
+            [dependencies.bar]
+            path = "../bar"
+
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    let _bar = project()
+        .at("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            authors = []
+            version = "0.1.0"
+
+            [[bench]]
+            name = "bbar"
+            test = true
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "benches/bbar.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            use test::Bencher;
+
+            #[bench]
+            fn bench_bar(_b: &mut Bencher) {}
+        "#,
+        ).build();
+
+    let _baz = project()
+        .at("baz")
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "baz"
+            authors = []
+            version = "0.1.0"
+
+            [[bench]]
+            name = "bbaz"
+            test = true
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "benches/bbaz.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            use test::Bencher;
+
+            #[bench]
+            fn bench_baz(_b: &mut Bencher) {}
+        "#,
+        ).build();
+
+    p.cargo("bench -p bar -p baz")
+        .with_stderr_contains("[RUNNING] target/release/deps/bbaz-[..][EXE]")
+        .with_stdout_contains("test bench_baz ... bench: [..]")
+        .with_stderr_contains("[RUNNING] target/release/deps/bbar-[..][EXE]")
+        .with_stdout_contains("test bench_bar ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn bench_all_workspace() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "benches/foo.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            use test::Bencher;
+
+            #[bench]
+            fn bench_foo(_: &mut Bencher) -> () { () }
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file(
+            "bar/benches/bar.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            use test::Bencher;
+
+            #[bench]
+            fn bench_bar(_: &mut Bencher) -> () { () }
+        "#,
+        ).build();
+
+    p.cargo("bench --all")
+        .with_stderr_contains("[RUNNING] target/release/deps/bar-[..][EXE]")
+        .with_stdout_contains("test bench_bar ... bench: [..]")
+        .with_stderr_contains("[RUNNING] target/release/deps/foo-[..][EXE]")
+        .with_stdout_contains("test bench_foo ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn bench_all_exclude() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file(
+            "bar/src/lib.rs",
+            r#"
+            #![feature(test)]
+            #[cfg(test)]
+            extern crate test;
+
+            #[bench]
+            pub fn bar(b: &mut test::Bencher) {
+                b.iter(|| {});
+            }
+        "#,
+        ).file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file(
+            "baz/src/lib.rs",
+            "#[test] pub fn baz() { break_the_build(); }",
+        ).build();
+
+    p.cargo("bench --all --exclude baz")
+        .with_stdout_contains(
+            "\
+running 1 test
+test bar ... bench:           [..] ns/iter (+/- [..])",
+        ).run();
+}
+
+#[test]
+fn bench_all_virtual_manifest() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file(
+            "bar/benches/bar.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            use test::Bencher;
+
+            #[bench]
+            fn bench_bar(_: &mut Bencher) -> () { () }
+        "#,
+        ).file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .file(
+            "baz/benches/baz.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            use test::Bencher;
+
+            #[bench]
+            fn bench_baz(_: &mut Bencher) -> () { () }
+        "#,
+        ).build();
+
+    // The order in which bar and baz are built is not guaranteed
+    p.cargo("bench --all")
+        .with_stderr_contains("[RUNNING] target/release/deps/baz-[..][EXE]")
+        .with_stdout_contains("test bench_baz ... bench: [..]")
+        .with_stderr_contains("[RUNNING] target/release/deps/bar-[..][EXE]")
+        .with_stdout_contains("test bench_bar ... bench: [..]")
+        .run();
+}
+
+// https://github.com/rust-lang/cargo/issues/4287
+#[test]
+fn legacy_bench_name() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [[bench]]
+            name = "bench"
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .file(
+            "src/bench.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            use test::Bencher;
+
+            #[bench]
+            fn bench_foo(_: &mut Bencher) -> () { () }
+        "#,
+        ).build();
+
+    p.cargo("bench")
+        .with_stderr_contains(
+            "\
+[WARNING] path `[..]src/bench.rs` was erroneously implicitly accepted for benchmark `bench`,
+please set bench.path in Cargo.toml",
+        ).run();
+}
+
+#[test]
+fn bench_virtual_manifest_all_implied() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn foo() {}")
+        .file(
+            "bar/benches/bar.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            use test::Bencher;
+            #[bench]
+            fn bench_bar(_: &mut Bencher) -> () { () }
+        "#,
+        ).file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .file(
+            "baz/benches/baz.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            use test::Bencher;
+            #[bench]
+            fn bench_baz(_: &mut Bencher) -> () { () }
+        "#,
+        ).build();
+
+    // The order in which bar and baz are built is not guaranteed
+
+    p.cargo("bench")
+        .with_stderr_contains("[RUNNING] target/release/deps/baz-[..][EXE]")
+        .with_stdout_contains("test bench_baz ... bench: [..]")
+        .with_stderr_contains("[RUNNING] target/release/deps/bar-[..][EXE]")
+        .with_stdout_contains("test bench_bar ... bench: [..]")
+        .run();
+}
diff --git a/tests/testsuite/build.rs b/tests/testsuite/build.rs
new file mode 100644 (file)
index 0000000..928bddd
--- /dev/null
@@ -0,0 +1,4375 @@
+use std::env;
+use std::fs::{self, File};
+use std::io::prelude::*;
+
+use cargo::util::paths::dylib_path_envvar;
+use support::paths::{root, CargoPathExt};
+use support::registry::Package;
+use support::ProjectBuilder;
+use support::{
+    basic_bin_manifest, basic_lib_manifest, basic_manifest, is_nightly, rustc_host, sleep_ms,
+};
+use support::{main_file, project, Execs};
+
+#[test]
+fn cargo_compile_simple() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("i am foo\n").run();
+}
+
+#[test]
+fn cargo_fail_with_no_stderr() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &String::from("refusal"))
+        .build();
+    p.cargo("build --message-format=json")
+        .with_status(101)
+        .with_stderr_does_not_contain("--- stderr")
+        .run();
+}
+
+/// Check that the `CARGO_INCREMENTAL` environment variable results in
+/// `rustc` getting `-Zincremental` passed to it.
+#[test]
+fn cargo_compile_incremental() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("build -v")
+        .env("CARGO_INCREMENTAL", "1")
+        .with_stderr_contains(
+            "[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n",
+        ).run();
+
+    p.cargo("test -v")
+        .env("CARGO_INCREMENTAL", "1")
+        .with_stderr_contains(
+            "[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n",
+        ).run();
+}
+
+#[test]
+fn incremental_profile() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [profile.dev]
+            incremental = false
+
+            [profile.release]
+            incremental = true
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v")
+        .env_remove("CARGO_INCREMENTAL")
+        .with_stderr_does_not_contain("[..]C incremental=[..]")
+        .run();
+
+    p.cargo("build -v")
+        .env("CARGO_INCREMENTAL", "1")
+        .with_stderr_contains("[..]C incremental=[..]")
+        .run();
+
+    p.cargo("build --release -v")
+        .env_remove("CARGO_INCREMENTAL")
+        .with_stderr_contains("[..]C incremental=[..]")
+        .run();
+
+    p.cargo("build --release -v")
+        .env("CARGO_INCREMENTAL", "0")
+        .with_stderr_does_not_contain("[..]C incremental=[..]")
+        .run();
+}
+
+#[test]
+fn incremental_config() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            incremental = false
+        "#,
+        ).build();
+
+    p.cargo("build -v")
+        .env_remove("CARGO_INCREMENTAL")
+        .with_stderr_does_not_contain("[..]C incremental=[..]")
+        .run();
+
+    p.cargo("build -v")
+        .env("CARGO_INCREMENTAL", "1")
+        .with_stderr_contains("[..]C incremental=[..]")
+        .run();
+}
+
+#[test]
+fn cargo_compile_with_workspace_excluded() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    p.cargo("build --all --exclude foo")
+        .with_stderr_does_not_contain("[..]virtual[..]")
+        .with_stderr_contains("[..]no packages to compile")
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn cargo_compile_manifest_path() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("build --manifest-path foo/Cargo.toml")
+        .cwd(p.root().parent().unwrap())
+        .run();
+    assert!(p.bin("foo").is_file());
+}
+
+#[test]
+fn cargo_compile_with_invalid_manifest() {
+    let p = project().file("Cargo.toml", "").build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  virtual manifests must be configured with [workspace]
+",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_invalid_manifest2() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r"
+            [project]
+            foo = bar
+        ",
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  could not parse input as TOML
+
+Caused by:
+  invalid number at line 3
+",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_invalid_manifest3() {
+    let p = project().file("src/Cargo.toml", "a = bar").build();
+
+    p.cargo("build --manifest-path src/Cargo.toml")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  could not parse input as TOML
+
+Caused by:
+  invalid number at line 1
+",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_duplicate_build_targets() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "main"
+            path = "src/main.rs"
+            crate-type = ["dylib"]
+
+            [dependencies]
+        "#,
+        ).file("src/main.rs", "#![allow(warnings)] fn main() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+warning: file found to be present in multiple build targets: [..]main.rs
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_invalid_version() {
+    let p = project()
+        .file("Cargo.toml", &basic_manifest("foo", "1.0"))
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Expected dot for key `package.version`
+",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_empty_package_name() {
+    let p = project()
+        .file("Cargo.toml", &basic_manifest("", "0.0.0"))
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  package name cannot be an empty string
+",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_invalid_package_name() {
+    let p = project()
+        .file("Cargo.toml", &basic_manifest("foo::bar", "0.0.0"))
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Invalid character `:` in package name: `foo::bar`
+",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_invalid_bin_target_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+
+            [[bin]]
+            name = ""
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  binary target names cannot be empty
+",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_forbidden_bin_target_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+
+            [[bin]]
+            name = "build"
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  the binary target name `build` is forbidden
+",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_bin_and_crate_type() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+
+            [[bin]]
+            name = "the_foo_bin"
+            path = "src/foo.rs"
+            crate-type = ["cdylib", "rlib"]
+        "#,
+        ).file("src/foo.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  the target `the_foo_bin` is a binary and can't have any crate-types set \
+(currently \"cdylib, rlib\")",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_bin_and_proc() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+
+            [[bin]]
+            name = "the_foo_bin"
+            path = "src/foo.rs"
+            proc-macro = true
+        "#,
+        ).file("src/foo.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  the target `the_foo_bin` is a binary and can't have `proc-macro` set `true`",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_invalid_lib_target_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+
+            [lib]
+            name = ""
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  library target names cannot be empty
+",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_invalid_non_numeric_dep_version() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            crossbeam = "y"
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+  failed to parse the version requirement `y` for dependency `crossbeam`
+
+Caused by:
+  the given version requirement is invalid
+",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_without_manifest() {
+    let p = project().no_manifest().build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr("[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory")
+        .run();
+}
+
+#[test]
+fn cargo_compile_with_invalid_code() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", "invalid rust code!")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[ERROR] Could not compile `foo`.
+
+To learn more, run the command again with --verbose.\n",
+        ).run();
+    assert!(p.root().join("Cargo.lock").is_file());
+}
+
+#[test]
+fn cargo_compile_with_invalid_code_in_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file("src/main.rs", "invalid rust code!")
+        .build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_bin_manifest("bar"))
+        .file("src/lib.rs", "invalid rust code!")
+        .build();
+    let _baz = project()
+        .at("baz")
+        .file("Cargo.toml", &basic_bin_manifest("baz"))
+        .file("src/lib.rs", "invalid rust code!")
+        .build();
+    p.cargo("build").with_status(101).run();
+}
+
+#[test]
+fn cargo_compile_with_warnings_in_the_root_package() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", "fn main() {} fn dead() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr_contains("[..]function is never used: `dead`[..]")
+        .run();
+}
+
+#[test]
+fn cargo_compile_with_warnings_in_a_dep_package() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            path = "bar"
+
+            [[bin]]
+
+            name = "foo"
+        "#,
+        ).file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file(
+            "bar/src/bar.rs",
+            r#"
+            pub fn gimme() -> &'static str {
+                "test passed"
+            }
+
+            fn dead() {}
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_stderr_contains("[..]function is never used: `dead`[..]")
+        .run();
+
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+}
+
+#[test]
+fn cargo_compile_with_nested_deps_inferred() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            path = 'bar'
+
+            [[bin]]
+            name = "foo"
+        "#,
+        ).file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"
+            extern crate baz;
+
+            pub fn gimme() -> String {
+                baz::gimme()
+            }
+        "#,
+        ).file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0"))
+        .file(
+            "baz/src/lib.rs",
+            r#"
+            pub fn gimme() -> String {
+                "test passed".to_string()
+            }
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+
+    assert!(p.bin("foo").is_file());
+    assert!(!p.bin("libbar.rlib").is_file());
+    assert!(!p.bin("libbaz.rlib").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+}
+
+#[test]
+fn cargo_compile_with_nested_deps_correct_bin() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            path = "bar"
+
+            [[bin]]
+            name = "foo"
+        "#,
+        ).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"
+            extern crate baz;
+
+            pub fn gimme() -> String {
+                baz::gimme()
+            }
+        "#,
+        ).file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0"))
+        .file(
+            "baz/src/lib.rs",
+            r#"
+            pub fn gimme() -> String {
+                "test passed".to_string()
+            }
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+
+    assert!(p.bin("foo").is_file());
+    assert!(!p.bin("libbar.rlib").is_file());
+    assert!(!p.bin("libbaz.rlib").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+}
+
+#[test]
+fn cargo_compile_with_nested_deps_shorthand() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.baz]
+            path = "../baz"
+
+            [lib]
+
+            name = "bar"
+        "#,
+        ).file(
+            "bar/src/bar.rs",
+            r#"
+            extern crate baz;
+
+            pub fn gimme() -> String {
+                baz::gimme()
+            }
+        "#,
+        ).file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+        .file(
+            "baz/src/baz.rs",
+            r#"
+            pub fn gimme() -> String {
+                "test passed".to_string()
+            }
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+
+    assert!(p.bin("foo").is_file());
+    assert!(!p.bin("libbar.rlib").is_file());
+    assert!(!p.bin("libbaz.rlib").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+}
+
+#[test]
+fn cargo_compile_with_nested_deps_longhand() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            path = "bar"
+            version = "0.5.0"
+
+            [[bin]]
+
+            name = "foo"
+        "#,
+        ).file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.baz]
+            path = "../baz"
+            version = "0.5.0"
+
+            [lib]
+
+            name = "bar"
+        "#,
+        ).file(
+            "bar/src/bar.rs",
+            r#"
+            extern crate baz;
+
+            pub fn gimme() -> String {
+                baz::gimme()
+            }
+        "#,
+        ).file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+        .file(
+            "baz/src/baz.rs",
+            r#"
+            pub fn gimme() -> String {
+                "test passed".to_string()
+            }
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+
+    assert!(p.bin("foo").is_file());
+    assert!(!p.bin("libbar.rlib").is_file());
+    assert!(!p.bin("libbaz.rlib").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+}
+
+// Check that Cargo gives a sensible error if a dependency can't be found
+// because of a name mismatch.
+#[test]
+fn cargo_compile_with_dep_name_mismatch() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+
+            name = "foo"
+            version = "0.0.1"
+            authors = ["wycats@example.com"]
+
+            [[bin]]
+
+            name = "foo"
+
+            [dependencies.notquitebar]
+
+            path = "bar"
+        "#,
+        ).file("src/bin/foo.rs", &main_file(r#""i am foo""#, &["bar"]))
+        .file("bar/Cargo.toml", &basic_bin_manifest("bar"))
+        .file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[]))
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            r#"error: no matching package named `notquitebar` found
+location searched: [CWD]/bar
+required by package `foo v0.0.1 ([CWD])`
+"#,
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_filename() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            "src/bin/a.rs",
+            r#"
+            extern crate foo;
+            fn main() { println!("hello a.rs"); }
+        "#,
+        ).file("examples/a.rs", r#"fn main() { println!("example"); }"#)
+        .build();
+
+    p.cargo("build --bin bin.rs")
+        .with_status(101)
+        .with_stderr("[ERROR] no bin target named `bin.rs`")
+        .run();
+
+    p.cargo("build --bin a.rs")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] no bin target named `a.rs`
+
+Did you mean `a`?",
+        ).run();
+
+    p.cargo("build --example example.rs")
+        .with_status(101)
+        .with_stderr("[ERROR] no example target named `example.rs`")
+        .run();
+
+    p.cargo("build --example a.rs")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] no example target named `a.rs`
+
+Did you mean `a`?",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_path_with_offline() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -Zoffline")
+        .masquerade_as_nightly_cargo()
+        .run();
+}
+
+#[test]
+fn cargo_compile_with_downloaded_dependency_with_offline() {
+    Package::new("present_dep", "1.2.3")
+        .file("Cargo.toml", &basic_manifest("present_dep", "1.2.3"))
+        .file("src/lib.rs", "")
+        .publish();
+
+    {
+        // make package downloaded
+        let p = project()
+            .file(
+                "Cargo.toml",
+                r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [dependencies]
+            present_dep = "1.2.3"
+        "#,
+            ).file("src/lib.rs", "")
+            .build();
+        p.cargo("build").run();
+    }
+
+    let p2 = project()
+        .at("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+
+            [dependencies]
+            present_dep = "1.2.3"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p2.cargo("build -Zoffline")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(
+            "\
+[COMPILING] present_dep v1.2.3
+[COMPILING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_offline_not_try_update() {
+    let p = project()
+        .at("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+
+            [dependencies]
+            not_cached_dep = "1.2.5"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -Zoffline")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+error: no matching package named `not_cached_dep` found
+location searched: registry `[..]`
+required by package `bar v0.1.0 ([..])`
+As a reminder, you're using offline mode (-Z offline) \
+which can sometimes cause surprising resolution failures, \
+if this error is too confusing you may with to retry \
+without the offline flag.",
+        ).run();
+}
+
+#[test]
+fn compile_offline_without_maxvers_cached() {
+    Package::new("present_dep", "1.2.1").publish();
+    Package::new("present_dep", "1.2.2").publish();
+
+    Package::new("present_dep", "1.2.3")
+        .file("Cargo.toml", &basic_manifest("present_dep", "1.2.3"))
+        .file(
+            "src/lib.rs",
+            r#"pub fn get_version()->&'static str {"1.2.3"}"#,
+        ).publish();
+
+    Package::new("present_dep", "1.2.5")
+        .file("Cargo.toml", &basic_manifest("present_dep", "1.2.5"))
+        .file("src/lib.rs", r#"pub fn get_version(){"1.2.5"}"#)
+        .publish();
+
+    {
+        // make package cached
+        let p = project()
+            .file(
+                "Cargo.toml",
+                r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [dependencies]
+            present_dep = "=1.2.3"
+        "#,
+            ).file("src/lib.rs", "")
+            .build();
+        p.cargo("build").run();
+    }
+
+    let p2 = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [dependencies]
+            present_dep = "1.2"
+        "#,
+        ).file(
+            "src/main.rs",
+            "\
+extern crate present_dep;
+fn main(){
+    println!(\"{}\", present_dep::get_version());
+}",
+        ).build();
+
+    p2.cargo("run -Zoffline")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(
+            "\
+[COMPILING] present_dep v1.2.3
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+     Running `[..]`",
+        ).with_stdout("1.2.3")
+        .run();
+}
+
+#[test]
+fn incompatible_dependencies() {
+    Package::new("bad", "0.1.0").publish();
+    Package::new("bad", "1.0.0").publish();
+    Package::new("bad", "1.0.1").publish();
+    Package::new("bad", "1.0.2").publish();
+    Package::new("bar", "0.1.0").dep("bad", "0.1.0").publish();
+    Package::new("baz", "0.1.1").dep("bad", "=1.0.0").publish();
+    Package::new("baz", "0.1.0").dep("bad", "=1.0.0").publish();
+    Package::new("qux", "0.1.2").dep("bad", ">=1.0.1").publish();
+    Package::new("qux", "0.1.1").dep("bad", ">=1.0.1").publish();
+    Package::new("qux", "0.1.0").dep("bad", ">=1.0.1").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = "0.1.0"
+            baz = "0.1.0"
+            qux = "0.1.0"
+        "#,
+        ).file("src/main.rs", "fn main(){}")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: failed to select a version for `bad`.
+    ... required by package `qux v0.1.0`
+    ... which is depended on by `foo v0.0.1 ([..])`
+versions that meet the requirements `>= 1.0.1` are: 1.0.2, 1.0.1
+
+all possible versions conflict with previously selected packages.
+
+  previously selected package `bad v1.0.0`
+    ... which is depended on by `baz v0.1.0`
+    ... which is depended on by `foo v0.0.1 ([..])`
+
+failed to select a version for `bad` which could resolve this conflict",
+        ).run();
+}
+
+#[test]
+fn incompatible_dependencies_with_multi_semver() {
+    Package::new("bad", "1.0.0").publish();
+    Package::new("bad", "1.0.1").publish();
+    Package::new("bad", "2.0.0").publish();
+    Package::new("bad", "2.0.1").publish();
+    Package::new("bar", "0.1.0").dep("bad", "=1.0.0").publish();
+    Package::new("baz", "0.1.0").dep("bad", ">=2.0.1").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = "0.1.0"
+            baz = "0.1.0"
+            bad = ">=1.0.1, <=2.0.0"
+        "#,
+        ).file("src/main.rs", "fn main(){}")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: failed to select a version for `bad`.
+    ... required by package `foo v0.0.1 ([..])`
+versions that meet the requirements `>= 1.0.1, <= 2.0.0` are: 2.0.0, 1.0.1
+
+all possible versions conflict with previously selected packages.
+
+  previously selected package `bad v2.0.1`
+    ... which is depended on by `baz v0.1.0`
+    ... which is depended on by `foo v0.0.1 ([..])`
+
+  previously selected package `bad v1.0.0`
+    ... which is depended on by `bar v0.1.0`
+    ... which is depended on by `foo v0.0.1 ([..])`
+
+failed to select a version for `bad` which could resolve this conflict",
+        ).run();
+}
+
+#[test]
+fn compile_offline_while_transitive_dep_not_cached() {
+    let baz = Package::new("baz", "1.0.0");
+    let baz_path = baz.archive_dst();
+    baz.publish();
+
+    let mut content = Vec::new();
+
+    let mut file = File::open(baz_path.clone()).ok().unwrap();
+    let _ok = file.read_to_end(&mut content).ok().unwrap();
+    drop(file);
+    drop(File::create(baz_path.clone()).ok().unwrap());
+
+    Package::new("bar", "0.1.0").dep("baz", "1.0.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/main.rs", "fn main(){}")
+        .build();
+
+    // simulate download bar, but fail to download baz
+    p.cargo("build").with_status(101).run();
+
+    drop(File::create(baz_path).ok().unwrap().write_all(&content));
+
+    p.cargo("build -Zoffline")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+error: no matching package named `baz` found
+location searched: registry `[..]`
+required by package `bar v0.1.0`
+    ... which is depended on by `foo v0.0.1 ([CWD])`
+As a reminder, you're using offline mode (-Z offline) \
+which can sometimes cause surprising resolution failures, \
+if this error is too confusing you may with to retry \
+without the offline flag.",
+        ).run();
+}
+
+#[test]
+fn compile_path_dep_then_change_version() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    File::create(&p.root().join("bar/Cargo.toml"))
+        .unwrap()
+        .write_all(basic_manifest("bar", "0.0.2").as_bytes())
+        .unwrap();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn ignores_carriage_return_in_lockfile() {
+    let p = project()
+        .file("src/main.rs", r"mod a; fn main() {}")
+        .file("src/a.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    let lockfile = p.root().join("Cargo.lock");
+    let mut lock = String::new();
+    File::open(&lockfile)
+        .unwrap()
+        .read_to_string(&mut lock)
+        .unwrap();
+    let lock = lock.replace("\n", "\r\n");
+    File::create(&lockfile)
+        .unwrap()
+        .write_all(lock.as_bytes())
+        .unwrap();
+    p.cargo("build").run();
+}
+
+#[test]
+fn cargo_default_env_metadata_env_var() {
+    // Ensure that path dep + dylib + env_var get metadata
+    // (even though path_dep + dylib should not)
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/lib.rs", "// hi")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "bar"
+            crate_type = ["dylib"]
+        "#,
+        ).file("bar/src/lib.rs", "// hello")
+        .build();
+
+    // No metadata on libbar since it's a dylib path dependency
+    p.cargo("build -v")
+        .with_stderr(&format!(
+            "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs --color never --crate-type dylib \
+        --emit=dep-info,link \
+        -C prefer-dynamic -C debuginfo=2 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps`
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link -C debuginfo=2 \
+        -C metadata=[..] \
+        -C extra-filename=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps \
+        --extern bar=[CWD]/target/debug/deps/{prefix}bar{suffix}`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+            prefix = env::consts::DLL_PREFIX,
+            suffix = env::consts::DLL_SUFFIX,
+        )).run();
+
+    p.cargo("clean").run();
+
+    // If you set the env-var, then we expect metadata on libbar
+    p.cargo("build -v")
+        .env("__CARGO_DEFAULT_LIB_METADATA", "stable")
+        .with_stderr(&format!(
+            "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs --color never --crate-type dylib \
+        --emit=dep-info,link \
+        -C prefer-dynamic -C debuginfo=2 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps`
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link -C debuginfo=2 \
+        -C metadata=[..] \
+        -C extra-filename=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps \
+        --extern bar=[CWD]/target/debug/deps/{prefix}bar-[..]{suffix}`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            prefix = env::consts::DLL_PREFIX,
+            suffix = env::consts::DLL_SUFFIX,
+        )).run();
+}
+
+#[test]
+fn crate_env_vars() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+        [project]
+        name = "foo"
+        version = "0.5.1-alpha.1"
+        description = "This is foo"
+        homepage = "http://example.com"
+        repository = "http://example.com/repo.git"
+        authors = ["wycats@example.com"]
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            extern crate foo;
+
+
+            static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR");
+            static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR");
+            static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH");
+            static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE");
+            static VERSION: &'static str = env!("CARGO_PKG_VERSION");
+            static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR");
+            static PKG_NAME: &'static str = env!("CARGO_PKG_NAME");
+            static HOMEPAGE: &'static str = env!("CARGO_PKG_HOMEPAGE");
+            static REPOSITORY: &'static str = env!("CARGO_PKG_REPOSITORY");
+            static DESCRIPTION: &'static str = env!("CARGO_PKG_DESCRIPTION");
+
+            fn main() {
+                let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR,
+                                VERSION_MINOR, VERSION_PATCH, VERSION_PRE,
+                                CARGO_MANIFEST_DIR);
+                 assert_eq!(s, foo::version());
+                 println!("{}", s);
+                 assert_eq!("foo", PKG_NAME);
+                 assert_eq!("http://example.com", HOMEPAGE);
+                 assert_eq!("http://example.com/repo.git", REPOSITORY);
+                 assert_eq!("This is foo", DESCRIPTION);
+                let s = format!("{}.{}.{}-{}", VERSION_MAJOR,
+                                VERSION_MINOR, VERSION_PATCH, VERSION_PRE);
+                assert_eq!(s, VERSION);
+            }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            pub fn version() -> String {
+                format!("{}-{}-{} @ {} in {}",
+                        env!("CARGO_PKG_VERSION_MAJOR"),
+                        env!("CARGO_PKG_VERSION_MINOR"),
+                        env!("CARGO_PKG_VERSION_PATCH"),
+                        env!("CARGO_PKG_VERSION_PRE"),
+                        env!("CARGO_MANIFEST_DIR"))
+            }
+        "#,
+        ).build();
+
+    println!("build");
+    p.cargo("build -v").run();
+
+    println!("bin");
+    p.process(&p.bin("foo")).with_stdout("0-5-1 @ alpha.1 in [CWD]").run();
+
+    println!("test");
+    p.cargo("test -v").run();
+}
+
+#[test]
+fn crate_authors_env_vars() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.1-alpha.1"
+            authors = ["wycats@example.com", "neikos@example.com"]
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            extern crate foo;
+
+            static AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS");
+
+            fn main() {
+                let s = "wycats@example.com:neikos@example.com";
+                assert_eq!(AUTHORS, foo::authors());
+                println!("{}", AUTHORS);
+                assert_eq!(s, AUTHORS);
+            }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            pub fn authors() -> String {
+                format!("{}", env!("CARGO_PKG_AUTHORS"))
+            }
+        "#,
+        ).build();
+
+    println!("build");
+    p.cargo("build -v").run();
+
+    println!("bin");
+    p.process(&p.bin("foo"))
+        .with_stdout("wycats@example.com:neikos@example.com")
+        .run();
+
+    println!("test");
+    p.cargo("test -v").run();
+}
+
+// The tester may already have LD_LIBRARY_PATH=::/foo/bar which leads to a false positive error
+fn setenv_for_removing_empty_component(mut execs: Execs) -> Execs {
+    let v = dylib_path_envvar();
+    if let Ok(search_path) = env::var(v) {
+        let new_search_path =
+            env::join_paths(env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty()))
+                .expect("join_paths");
+        execs.env(v, new_search_path); // build_command() will override LD_LIBRARY_PATH accordingly
+    }
+    execs
+}
+
+// Regression test for #4277
+#[test]
+fn crate_library_path_env_var() {
+    let p = project()
+        .file(
+            "src/main.rs",
+            &format!(
+                r##"
+            fn main() {{
+                let search_path = env!("{}");
+                let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
+                assert!(!paths.contains(&"".into()));
+            }}
+        "##,
+                dylib_path_envvar()
+            ),
+        ).build();
+
+    setenv_for_removing_empty_component(p.cargo("run")).run();
+}
+
+// Regression test for #4277
+#[test]
+fn build_with_fake_libc_not_loading() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/lib.rs", r#" "#)
+        .file("libc.so.6", r#""#)
+        .build();
+
+    setenv_for_removing_empty_component(p.cargo("build")).run();
+}
+
+// this is testing that src/<pkg-name>.rs still works (for now)
+#[test]
+fn many_crate_types_old_style_lib_location() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [lib]
+
+            name = "foo"
+            crate_type = ["rlib", "dylib"]
+        "#,
+        ).file("src/foo.rs", "pub fn foo() {}")
+        .build();
+    p.cargo("build")
+        .with_stderr_contains(
+            "\
+[WARNING] path `[..]src/foo.rs` was erroneously implicitly accepted for library `foo`,
+please rename the file to `src/lib.rs` or set lib.path in Cargo.toml",
+        ).run();
+
+    assert!(p.root().join("target/debug/libfoo.rlib").is_file());
+    let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
+    assert!(p.root().join("target/debug").join(&fname).is_file());
+}
+
+#[test]
+fn many_crate_types_correct() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [lib]
+
+            name = "foo"
+            crate_type = ["rlib", "dylib"]
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .build();
+    p.cargo("build").run();
+
+    assert!(p.root().join("target/debug/libfoo.rlib").is_file());
+    let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
+    assert!(p.root().join("target/debug").join(&fname).is_file());
+}
+
+#[test]
+fn self_dependency() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+
+            name = "test"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies.test]
+
+            path = "."
+
+            [lib]
+            name = "test"
+            path = "src/test.rs"
+        "#,
+        ).file("src/test.rs", "fn main() {}")
+        .build();
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] cyclic package dependency: package `test v0.0.0 ([CWD])` depends on itself. Cycle:
+package `test v0.0.0 ([CWD])`",
+        ).run();
+}
+
+#[test]
+fn ignore_broken_symlinks() {
+    // windows and symlinks don't currently agree that well
+    if cfg!(windows) {
+        return;
+    }
+
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .symlink("Notafile", "bar")
+        .build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("i am foo\n").run();
+}
+
+#[test]
+fn missing_lib_and_bin() {
+    let p = project().build();
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]Cargo.toml`
+
+Caused by:
+  no targets specified in the manifest
+  either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n",
+        ).run();
+}
+
+#[test]
+fn lto_build() {
+    // FIXME: currently this hits a linker bug on 32-bit MSVC
+    if cfg!(all(target_env = "msvc", target_pointer_width = "32")) {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+
+            name = "test"
+            version = "0.0.0"
+            authors = []
+
+            [profile.release]
+            lto = true
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+    p.cargo("build -v --release")
+        .with_stderr(
+            "\
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/main.rs --color never --crate-type bin \
+        --emit=dep-info,link \
+        -C opt-level=3 \
+        -C lto \
+        -C metadata=[..] \
+        --out-dir [CWD]/target/release/deps \
+        -L dependency=[CWD]/target/release/deps`
+[FINISHED] release [optimized] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn verbose_build() {
+    let p = project().file("src/lib.rs", "").build();
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link -C debuginfo=2 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn verbose_release_build() {
+    let p = project().file("src/lib.rs", "").build();
+    p.cargo("build -v --release")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link \
+        -C opt-level=3 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/release/deps`
+[FINISHED] release [optimized] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn verbose_release_build_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+
+            name = "test"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies.foo]
+            path = "foo"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [lib]
+            name = "foo"
+            crate_type = ["dylib", "rlib"]
+        "#,
+        ).file("foo/src/lib.rs", "")
+        .build();
+    p.cargo("build -v --release")
+        .with_stderr(&format!(
+            "\
+[COMPILING] foo v0.0.0 ([CWD]/foo)
+[RUNNING] `rustc --crate-name foo foo/src/lib.rs --color never \
+        --crate-type dylib --crate-type rlib \
+        --emit=dep-info,link \
+        -C prefer-dynamic \
+        -C opt-level=3 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/release/deps`
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link \
+        -C opt-level=3 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/release/deps \
+        --extern foo=[CWD]/target/release/deps/{prefix}foo{suffix} \
+        --extern foo=[CWD]/target/release/deps/libfoo.rlib`
+[FINISHED] release [optimized] target(s) in [..]
+",
+            prefix = env::consts::DLL_PREFIX,
+            suffix = env::consts::DLL_SUFFIX
+        )).run();
+}
+
+#[test]
+fn explicit_examples() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "1.0.0"
+            authors = []
+
+            [lib]
+            name = "foo"
+            path = "src/lib.rs"
+
+            [[example]]
+            name = "hello"
+            path = "examples/ex-hello.rs"
+
+            [[example]]
+            name = "goodbye"
+            path = "examples/ex-goodbye.rs"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            pub fn get_hello() -> &'static str { "Hello" }
+            pub fn get_goodbye() -> &'static str { "Goodbye" }
+            pub fn get_world() -> &'static str { "World" }
+        "#,
+        ).file(
+            "examples/ex-hello.rs",
+            r#"
+            extern crate foo;
+            fn main() { println!("{}, {}!", foo::get_hello(), foo::get_world()); }
+        "#,
+        ).file(
+            "examples/ex-goodbye.rs",
+            r#"
+            extern crate foo;
+            fn main() { println!("{}, {}!", foo::get_goodbye(), foo::get_world()); }
+        "#,
+        ).build();
+
+    p.cargo("test -v").run();
+    p.process(&p.bin("examples/hello"))
+        .with_stdout("Hello, World!\n")
+        .run();
+    p.process(&p.bin("examples/goodbye"))
+        .with_stdout("Goodbye, World!\n")
+        .run();
+}
+
+#[test]
+fn non_existing_example() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "1.0.0"
+            authors = []
+
+            [lib]
+            name = "foo"
+            path = "src/lib.rs"
+
+            [[example]]
+            name = "hello"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("test -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  can't find `hello` example, specify example.path",
+        ).run();
+}
+
+#[test]
+fn non_existing_binary() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/lib.rs", "")
+        .file("src/bin/ehlo.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  can't find `foo` bin, specify bin.path",
+        ).run();
+}
+
+#[test]
+fn legacy_binary_paths_warnings() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "1.0.0"
+            authors = []
+
+            [[bin]]
+            name = "bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr_contains(
+            "\
+[WARNING] path `[..]src/main.rs` was erroneously implicitly accepted for binary `bar`,
+please set bin.path in Cargo.toml",
+        ).run();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "1.0.0"
+            authors = []
+
+            [[bin]]
+            name = "bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("src/bin/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr_contains(
+            "\
+[WARNING] path `[..]src/bin/main.rs` was erroneously implicitly accepted for binary `bar`,
+please set bin.path in Cargo.toml",
+        ).run();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "1.0.0"
+            authors = []
+
+            [[bin]]
+            name = "bar"
+        "#,
+        ).file("src/bar.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr_contains(
+            "\
+[WARNING] path `[..]src/bar.rs` was erroneously implicitly accepted for binary `bar`,
+please set bin.path in Cargo.toml",
+        ).run();
+}
+
+#[test]
+fn implicit_examples() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            pub fn get_hello() -> &'static str { "Hello" }
+            pub fn get_goodbye() -> &'static str { "Goodbye" }
+            pub fn get_world() -> &'static str { "World" }
+        "#,
+        ).file(
+            "examples/hello.rs",
+            r#"
+            extern crate foo;
+            fn main() {
+                println!("{}, {}!", foo::get_hello(), foo::get_world());
+            }
+        "#,
+        ).file(
+            "examples/goodbye.rs",
+            r#"
+            extern crate foo;
+            fn main() {
+                println!("{}, {}!", foo::get_goodbye(), foo::get_world());
+            }
+        "#,
+        ).build();
+
+    p.cargo("test").run();
+    p.process(&p.bin("examples/hello"))
+        .with_stdout("Hello, World!\n")
+        .run();
+    p.process(&p.bin("examples/goodbye"))
+        .with_stdout("Goodbye, World!\n")
+        .run();
+}
+
+#[test]
+fn standard_build_no_ndebug() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/foo.rs",
+            r#"
+            fn main() {
+                if cfg!(debug_assertions) {
+                    println!("slow")
+                } else {
+                    println!("fast")
+                }
+            }
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+    p.process(&p.bin("foo")).with_stdout("slow\n").run();
+}
+
+#[test]
+fn release_build_ndebug() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/foo.rs",
+            r#"
+            fn main() {
+                if cfg!(debug_assertions) {
+                    println!("slow")
+                } else {
+                    println!("fast")
+                }
+            }
+        "#,
+        ).build();
+
+    p.cargo("build --release").run();
+    p.process(&p.release_bin("foo")).with_stdout("fast\n").run();
+}
+
+#[test]
+fn inferred_main_bin() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    p.cargo("build").run();
+    p.process(&p.bin("foo")).run();
+}
+
+#[test]
+fn deletion_causes_failure() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", "extern crate bar; fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    p.change_file("Cargo.toml", &basic_manifest("foo", "0.0.1"));
+    p.cargo("build").with_status(101).run();
+}
+
+#[test]
+fn bad_cargo_toml_in_target_dir() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("target/Cargo.toml", "bad-toml")
+        .build();
+
+    p.cargo("build").run();
+    p.process(&p.bin("foo")).run();
+}
+
+#[test]
+fn lib_with_standard_name() {
+    let p = project()
+        .file("Cargo.toml", &basic_manifest("syntax", "0.0.1"))
+        .file("src/lib.rs", "pub fn foo() {}")
+        .file(
+            "src/main.rs",
+            "extern crate syntax; fn main() { syntax::foo() }",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn simple_staticlib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+              [package]
+              name = "foo"
+              authors = []
+              version = "0.0.1"
+
+              [lib]
+              name = "foo"
+              crate-type = ["staticlib"]
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .build();
+
+    // env var is a test for #1381
+    p.cargo("build").env("RUST_LOG", "nekoneko=trace").run();
+}
+
+#[test]
+fn staticlib_rlib_and_bin() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+              [package]
+              name = "foo"
+              authors = []
+              version = "0.0.1"
+
+              [lib]
+              name = "foo"
+              crate-type = ["staticlib", "rlib"]
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .file("src/main.rs", "extern crate foo; fn main() { foo::foo(); }")
+        .build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn opt_out_of_bin() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+              bin = []
+
+              [package]
+              name = "foo"
+              authors = []
+              version = "0.0.1"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("src/main.rs", "bad syntax")
+        .build();
+    p.cargo("build").run();
+}
+
+#[test]
+fn single_lib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+              [package]
+              name = "foo"
+              authors = []
+              version = "0.0.1"
+
+              [lib]
+              name = "foo"
+              path = "src/bar.rs"
+        "#,
+        ).file("src/bar.rs", "")
+        .build();
+    p.cargo("build").run();
+}
+
+#[test]
+fn freshness_ignores_excluded() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+            build = "build.rs"
+            exclude = ["src/b*.rs"]
+        "#,
+        ).file("build.rs", "fn main() {}")
+        .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+        .build();
+    foo.root().move_into_the_past();
+
+    foo.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    // Smoke test to make sure it doesn't compile again
+    println!("first pass");
+    foo.cargo("build").with_stdout("").run();
+
+    // Modify an ignored file and make sure we don't rebuild
+    println!("second pass");
+    File::create(&foo.root().join("src/bar.rs")).unwrap();
+    foo.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn rebuild_preserves_out_dir() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+            build = 'build.rs'
+        "#,
+        ).file(
+            "build.rs",
+            r#"
+            use std::env;
+            use std::fs::File;
+            use std::path::Path;
+
+            fn main() {
+                let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo");
+                if env::var_os("FIRST").is_some() {
+                    File::create(&path).unwrap();
+                } else {
+                    File::create(&path).unwrap();
+                }
+            }
+        "#,
+        ).file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+        .build();
+    foo.root().move_into_the_past();
+
+    foo.cargo("build")
+        .env("FIRST", "1")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    File::create(&foo.root().join("src/bar.rs")).unwrap();
+    foo.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn dep_no_libs() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.0"))
+        .file("bar/src/main.rs", "")
+        .build();
+    foo.cargo("build").run();
+}
+
+#[test]
+fn recompile_space_in_name() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [lib]
+            name = "foo"
+            path = "src/my lib.rs"
+        "#,
+        ).file("src/my lib.rs", "")
+        .build();
+    foo.cargo("build").run();
+    foo.root().move_into_the_past();
+    foo.cargo("build").with_stdout("").run();
+}
+
+#[cfg(unix)]
+#[test]
+fn ignore_bad_directories() {
+    use std::os::unix::prelude::*;
+    let foo = project()
+        .file("Cargo.toml", &basic_manifest("foo", "0.0.0"))
+        .file("src/lib.rs", "")
+        .build();
+    let dir = foo.root().join("tmp");
+    fs::create_dir(&dir).unwrap();
+    let stat = fs::metadata(&dir).unwrap();
+    let mut perms = stat.permissions();
+    perms.set_mode(0o644);
+    fs::set_permissions(&dir, perms.clone()).unwrap();
+    foo.cargo("build").run();
+    perms.set_mode(0o755);
+    fs::set_permissions(&dir, perms).unwrap();
+}
+
+#[test]
+fn bad_cargo_config() {
+    let foo = project()
+        .file("Cargo.toml", &basic_manifest("foo", "0.0.0"))
+        .file("src/lib.rs", "")
+        .file(".cargo/config", "this is not valid toml")
+        .build();
+    foo.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] could not load Cargo configuration
+
+Caused by:
+  could not parse TOML configuration in `[..]`
+
+Caused by:
+  could not parse input as TOML
+
+Caused by:
+  expected an equals, found an identifier at line 1
+",
+        ).run();
+}
+
+#[test]
+fn cargo_platform_specific_dependency() {
+    let host = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+            build = "build.rs"
+
+            [target.{host}.dependencies]
+            dep = {{ path = "dep" }}
+            [target.{host}.build-dependencies]
+            build = {{ path = "build" }}
+            [target.{host}.dev-dependencies]
+            dev = {{ path = "dev" }}
+        "#,
+                host = host
+            ),
+        ).file("src/main.rs", "extern crate dep; fn main() { dep::dep() }")
+        .file(
+            "tests/foo.rs",
+            "extern crate dev; #[test] fn foo() { dev::dev() }",
+        ).file(
+            "build.rs",
+            "extern crate build; fn main() { build::build(); }",
+        ).file("dep/Cargo.toml", &basic_manifest("dep", "0.5.0"))
+        .file("dep/src/lib.rs", "pub fn dep() {}")
+        .file("build/Cargo.toml", &basic_manifest("build", "0.5.0"))
+        .file("build/src/lib.rs", "pub fn build() {}")
+        .file("dev/Cargo.toml", &basic_manifest("dev", "0.5.0"))
+        .file("dev/src/lib.rs", "pub fn dev() {}")
+        .build();
+
+    p.cargo("build").run();
+
+    assert!(p.bin("foo").is_file());
+    p.cargo("test").run();
+}
+
+#[test]
+fn bad_platform_specific_dependency() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [target.wrong-target.dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+        .file(
+            "bar/src/lib.rs",
+            r#"extern crate baz; pub fn gimme() -> String { format!("") }"#,
+        ).build();
+
+    p.cargo("build").with_status(101).run();
+}
+
+#[test]
+fn cargo_platform_specific_dependency_wrong_platform() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [target.non-existing-triplet.dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+        .file(
+            "bar/src/lib.rs",
+            "invalid rust file, should not be compiled",
+        ).build();
+
+    p.cargo("build").run();
+
+    assert!(p.bin("foo").is_file());
+    p.process(&p.bin("foo")).run();
+
+    let loc = p.root().join("Cargo.lock");
+    let mut lockfile = String::new();
+    File::open(&loc)
+        .unwrap()
+        .read_to_string(&mut lockfile)
+        .unwrap();
+    assert!(lockfile.contains("bar"));
+}
+
+#[test]
+fn example_as_lib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[example]]
+            name = "ex"
+            crate-type = ["lib"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("examples/ex.rs", "")
+        .build();
+
+    p.cargo("build --example=ex").run();
+    assert!(p.example_lib("ex", "lib").is_file());
+}
+
+#[test]
+fn example_as_rlib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[example]]
+            name = "ex"
+            crate-type = ["rlib"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("examples/ex.rs", "")
+        .build();
+
+    p.cargo("build --example=ex").run();
+    assert!(p.example_lib("ex", "rlib").is_file());
+}
+
+#[test]
+fn example_as_dylib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[example]]
+            name = "ex"
+            crate-type = ["dylib"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("examples/ex.rs", "")
+        .build();
+
+    p.cargo("build --example=ex").run();
+    assert!(p.example_lib("ex", "dylib").is_file());
+}
+
+#[test]
+fn example_as_proc_macro() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[example]]
+            name = "ex"
+            crate-type = ["proc-macro"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("examples/ex.rs", "#![feature(proc_macro)]")
+        .build();
+
+    p.cargo("build --example=ex").run();
+    assert!(p.example_lib("ex", "proc-macro").is_file());
+}
+
+#[test]
+fn example_bin_same_name() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("examples/foo.rs", "fn main() {}")
+        .build();
+
+    p.cargo("test --no-run -v").run();
+
+    assert!(!p.bin("foo").is_file());
+    // We expect a file of the form bin/foo-{metadata_hash}
+    assert!(p.bin("examples/foo").is_file());
+
+    p.cargo("test --no-run -v").run();
+
+    assert!(!p.bin("foo").is_file());
+    // We expect a file of the form bin/foo-{metadata_hash}
+    assert!(p.bin("examples/foo").is_file());
+}
+
+#[test]
+fn compile_then_delete() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    p.cargo("run -v").run();
+    assert!(p.bin("foo").is_file());
+    if cfg!(windows) {
+        // On windows unlinking immediately after running often fails, so sleep
+        sleep_ms(100);
+    }
+    fs::remove_file(&p.bin("foo")).unwrap();
+    p.cargo("run -v").run();
+}
+
+#[test]
+fn transitive_dependencies_not_available() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.aaaaa]
+            path = "a"
+        "#,
+        ).file(
+            "src/main.rs",
+            "extern crate bbbbb; extern crate aaaaa; fn main() {}",
+        ).file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name = "aaaaa"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bbbbb]
+            path = "../b"
+        "#,
+        ).file("a/src/lib.rs", "extern crate bbbbb;")
+        .file("b/Cargo.toml", &basic_manifest("bbbbb", "0.0.1"))
+        .file("b/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr_contains("[..] can't find crate for `bbbbb`[..]")
+        .run();
+}
+
+#[test]
+fn cyclic_deps_rejected() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.foo]
+            path = ".."
+        "#,
+        ).file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+"[ERROR] cyclic package dependency: package `a v0.0.1 ([CWD]/a)` depends on itself. Cycle:
+package `a v0.0.1 ([CWD]/a)`
+    ... which is depended on by `foo v0.0.1 ([CWD])`",
+        ).run();
+}
+
+#[test]
+fn predictable_filenames() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "foo"
+            crate-type = ["dylib", "rlib"]
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v").run();
+    assert!(p.root().join("target/debug/libfoo.rlib").is_file());
+    let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
+    assert!(p.root().join("target/debug").join(dylib_name).is_file());
+}
+
+#[test]
+fn dashes_to_underscores() {
+    let p = project()
+        .file("Cargo.toml", &basic_manifest("foo-bar", "0.0.1"))
+        .file("src/lib.rs", "")
+        .file("src/main.rs", "extern crate foo_bar; fn main() {}")
+        .build();
+
+    p.cargo("build -v").run();
+    assert!(p.bin("foo-bar").is_file());
+}
+
+#[test]
+fn dashes_in_crate_name_bad() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "foo-bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("src/main.rs", "extern crate foo_bar; fn main() {}")
+        .build();
+
+    p.cargo("build -v").with_status(101).run();
+}
+
+#[test]
+fn rustc_env_var() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("build -v")
+        .env("RUSTC", "rustc-that-does-not-exist")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] could not execute process `rustc-that-does-not-exist -vV` ([..])
+
+Caused by:
+[..]
+",
+        ).run();
+    assert!(!p.bin("a").is_file());
+}
+
+#[test]
+fn filtering() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/bin/a.rs", "fn main() {}")
+        .file("src/bin/b.rs", "fn main() {}")
+        .file("examples/a.rs", "fn main() {}")
+        .file("examples/b.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --lib").run();
+    assert!(!p.bin("a").is_file());
+
+    p.cargo("build --bin=a --example=a").run();
+    assert!(p.bin("a").is_file());
+    assert!(!p.bin("b").is_file());
+    assert!(p.bin("examples/a").is_file());
+    assert!(!p.bin("examples/b").is_file());
+}
+
+#[test]
+fn filtering_implicit_bins() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/bin/a.rs", "fn main() {}")
+        .file("src/bin/b.rs", "fn main() {}")
+        .file("examples/a.rs", "fn main() {}")
+        .file("examples/b.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --bins").run();
+    assert!(p.bin("a").is_file());
+    assert!(p.bin("b").is_file());
+    assert!(!p.bin("examples/a").is_file());
+    assert!(!p.bin("examples/b").is_file());
+}
+
+#[test]
+fn filtering_implicit_examples() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/bin/a.rs", "fn main() {}")
+        .file("src/bin/b.rs", "fn main() {}")
+        .file("examples/a.rs", "fn main() {}")
+        .file("examples/b.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --examples").run();
+    assert!(!p.bin("a").is_file());
+    assert!(!p.bin("b").is_file());
+    assert!(p.bin("examples/a").is_file());
+    assert!(p.bin("examples/b").is_file());
+}
+
+#[test]
+fn ignore_dotfile() {
+    let p = project()
+        .file("src/bin/.a.rs", "")
+        .file("src/bin/a.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn ignore_dotdirs() {
+    let p = project()
+        .file("src/bin/a.rs", "fn main() {}")
+        .file(".git/Cargo.toml", "")
+        .file(".pc/dummy-fix.patch/Cargo.toml", "")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn dotdir_root() {
+    let p = ProjectBuilder::new(root().join(".foo"))
+        .file("src/bin/a.rs", "fn main() {}")
+        .build();
+    p.cargo("build").run();
+}
+
+#[test]
+fn custom_target_dir_env() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    let exe_name = format!("foo{}", env::consts::EXE_SUFFIX);
+
+    p.cargo("build").env("CARGO_TARGET_DIR", "foo/target").run();
+    assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+    assert!(!p.root().join("target/debug").join(&exe_name).is_file());
+
+    p.cargo("build").run();
+    assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+    assert!(p.root().join("target/debug").join(&exe_name).is_file());
+
+    fs::create_dir(p.root().join(".cargo")).unwrap();
+    File::create(p.root().join(".cargo/config"))
+        .unwrap()
+        .write_all(
+            br#"
+        [build]
+        target-dir = "foo/target"
+    "#,
+        ).unwrap();
+    p.cargo("build").env("CARGO_TARGET_DIR", "bar/target").run();
+    assert!(p.root().join("bar/target/debug").join(&exe_name).is_file());
+    assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+    assert!(p.root().join("target/debug").join(&exe_name).is_file());
+}
+
+#[test]
+fn custom_target_dir_line_parameter() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    let exe_name = format!("foo{}", env::consts::EXE_SUFFIX);
+
+    p.cargo("build --target-dir foo/target").run();
+    assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+    assert!(!p.root().join("target/debug").join(&exe_name).is_file());
+
+    p.cargo("build").run();
+    assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+    assert!(p.root().join("target/debug").join(&exe_name).is_file());
+
+    fs::create_dir(p.root().join(".cargo")).unwrap();
+    File::create(p.root().join(".cargo/config"))
+        .unwrap()
+        .write_all(
+            br#"
+        [build]
+        target-dir = "foo/target"
+    "#,
+        ).unwrap();
+    p.cargo("build --target-dir bar/target").run();
+    assert!(p.root().join("bar/target/debug").join(&exe_name).is_file());
+    assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+    assert!(p.root().join("target/debug").join(&exe_name).is_file());
+
+    p.cargo("build --target-dir foobar/target")
+        .env("CARGO_TARGET_DIR", "bar/target")
+        .run();
+    assert!(
+        p.root()
+            .join("foobar/target/debug")
+            .join(&exe_name)
+            .is_file()
+    );
+    assert!(p.root().join("bar/target/debug").join(&exe_name).is_file());
+    assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
+    assert!(p.root().join("target/debug").join(&exe_name).is_file());
+}
+
+#[test]
+fn build_multiple_packages() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.d1]
+                path = "d1"
+            [dependencies.d2]
+                path = "d2"
+
+            [[bin]]
+                name = "foo"
+        "#,
+        ).file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .file("d1/Cargo.toml", &basic_bin_manifest("d1"))
+        .file("d1/src/lib.rs", "")
+        .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
+        .file(
+            "d2/Cargo.toml",
+            r#"
+            [package]
+            name = "d2"
+            version = "0.0.1"
+            authors = []
+
+            [[bin]]
+                name = "d2"
+                doctest = false
+        "#,
+        ).file("d2/src/main.rs", "fn main() { println!(\"d2\"); }")
+        .build();
+
+    p.cargo("build -p d1 -p d2 -p foo").run();
+
+    assert!(p.bin("foo").is_file());
+    p.process(&p.bin("foo")).with_stdout("i am foo\n").run();
+
+    let d1_path = &p
+        .build_dir()
+        .join("debug")
+        .join(format!("d1{}", env::consts::EXE_SUFFIX));
+    let d2_path = &p
+        .build_dir()
+        .join("debug")
+        .join(format!("d2{}", env::consts::EXE_SUFFIX));
+
+    assert!(d1_path.is_file());
+    p.process(d1_path).with_stdout("d1").run();
+
+    assert!(d2_path.is_file());
+    p.process(d2_path).with_stdout("d2").run();
+}
+
+#[test]
+fn invalid_spec() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.d1]
+                path = "d1"
+
+            [[bin]]
+                name = "foo"
+        "#,
+        ).file("src/bin/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .file("d1/Cargo.toml", &basic_bin_manifest("d1"))
+        .file("d1/src/lib.rs", "")
+        .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
+        .build();
+
+    p.cargo("build -p notAValidDep")
+        .with_status(101)
+        .with_stderr("[ERROR] package id specification `notAValidDep` matched no packages")
+        .run();
+
+    p.cargo("build -p d1 -p notAValidDep")
+        .with_status(101)
+        .with_stderr("[ERROR] package id specification `notAValidDep` matched no packages")
+        .run();
+}
+
+#[test]
+fn manifest_with_bom_is_ok() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            "\u{FEFF}
+            [package]
+            name = \"foo\"
+            version = \"0.0.1\"
+            authors = []
+        ",
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn panic_abort_compiles_with_panic_abort() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [profile.dev]
+            panic = 'abort'
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build -v")
+        .with_stderr_contains("[..] -C panic=abort [..]")
+        .run();
+}
+
+#[test]
+fn explicit_color_config_is_propagated_to_rustc() {
+    let p = project()
+        .file("Cargo.toml", &basic_manifest("test", "0.0.0"))
+        .file("src/lib.rs", "")
+        .build();
+    p.cargo("build -v --color always")
+        .with_stderr_contains("[..]rustc [..] src/lib.rs --color always[..]")
+        .run();
+
+    p.cargo("clean").run();
+
+    p.cargo("build -v --color never")
+        .with_stderr(
+            "\
+[COMPILING] test v0.0.0 ([..])
+[RUNNING] `rustc [..] --color never [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn compiler_json_error_format() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file(
+            "build.rs",
+            "fn main() { println!(\"cargo:rustc-cfg=xyz\") }",
+        ).file("src/main.rs", "fn main() { let unused = 92; }")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+        .file("bar/src/lib.rs", r#"fn dead() {}"#)
+        .build();
+
+    // Using jobs=1 to ensure that the order of messages is consistent.
+    p.cargo("build -v --message-format=json --jobs=1")
+        .with_json(
+            r#"
+    {
+        "reason":"compiler-artifact",
+        "package_id":"foo 0.5.0 ([..])",
+        "target":{
+            "kind":["custom-build"],
+            "crate_types":["bin"],
+            "edition": "2015",
+            "name":"build-script-build",
+            "src_path":"[..]build.rs"
+        },
+        "profile": {
+            "debug_assertions": true,
+            "debuginfo": 2,
+            "opt_level": "0",
+            "overflow_checks": true,
+            "test": false
+        },
+        "features": [],
+        "filenames": "{...}",
+        "fresh": false
+    }
+
+    {
+        "reason":"compiler-message",
+        "package_id":"bar 0.5.0 ([..])",
+        "target":{
+            "kind":["lib"],
+            "crate_types":["lib"],
+            "edition": "2015",
+            "name":"bar",
+            "src_path":"[..]lib.rs"
+        },
+        "message":"{...}"
+    }
+
+    {
+        "reason":"compiler-artifact",
+        "profile": {
+            "debug_assertions": true,
+            "debuginfo": 2,
+            "opt_level": "0",
+            "overflow_checks": true,
+            "test": false
+        },
+        "features": [],
+        "package_id":"bar 0.5.0 ([..])",
+        "target":{
+            "kind":["lib"],
+            "crate_types":["lib"],
+            "edition": "2015",
+            "name":"bar",
+            "src_path":"[..]lib.rs"
+        },
+        "filenames":["[..].rlib"],
+        "fresh": false
+    }
+
+    {
+        "reason":"build-script-executed",
+        "package_id":"foo 0.5.0 ([..])",
+        "linked_libs":[],
+        "linked_paths":[],
+        "env":[],
+        "cfgs":["xyz"]
+    }
+
+    {
+        "reason":"compiler-message",
+        "package_id":"foo 0.5.0 ([..])",
+        "target":{
+            "kind":["bin"],
+            "crate_types":["bin"],
+            "edition": "2015",
+            "name":"foo",
+            "src_path":"[..]main.rs"
+        },
+        "message":"{...}"
+    }
+
+    {
+        "reason":"compiler-artifact",
+        "package_id":"foo 0.5.0 ([..])",
+        "target":{
+            "kind":["bin"],
+            "crate_types":["bin"],
+            "edition": "2015",
+            "name":"foo",
+            "src_path":"[..]main.rs"
+        },
+        "profile": {
+            "debug_assertions": true,
+            "debuginfo": 2,
+            "opt_level": "0",
+            "overflow_checks": true,
+            "test": false
+        },
+        "features": [],
+        "filenames": "{...}",
+        "fresh": false
+    }
+"#,
+        ).run();
+
+    // With fresh build, we should repeat the artifacts,
+    // but omit compiler warnings.
+    p.cargo("build -v --message-format=json --jobs=1")
+        .with_json(
+            r#"
+    {
+        "reason":"compiler-artifact",
+        "package_id":"foo 0.5.0 ([..])",
+        "target":{
+            "kind":["custom-build"],
+            "crate_types":["bin"],
+            "edition": "2015",
+            "name":"build-script-build",
+            "src_path":"[..]build.rs"
+        },
+        "profile": {
+            "debug_assertions": true,
+            "debuginfo": 2,
+            "opt_level": "0",
+            "overflow_checks": true,
+            "test": false
+        },
+        "features": [],
+        "filenames": "{...}",
+        "fresh": true
+    }
+
+    {
+        "reason":"compiler-artifact",
+        "profile": {
+            "debug_assertions": true,
+            "debuginfo": 2,
+            "opt_level": "0",
+            "overflow_checks": true,
+            "test": false
+        },
+        "features": [],
+        "package_id":"bar 0.5.0 ([..])",
+        "target":{
+            "kind":["lib"],
+            "crate_types":["lib"],
+            "edition": "2015",
+            "name":"bar",
+            "src_path":"[..]lib.rs"
+        },
+        "filenames":["[..].rlib"],
+        "fresh": true
+    }
+
+    {
+        "reason":"build-script-executed",
+        "package_id":"foo 0.5.0 ([..])",
+        "linked_libs":[],
+        "linked_paths":[],
+        "env":[],
+        "cfgs":["xyz"]
+    }
+
+    {
+        "reason":"compiler-artifact",
+        "package_id":"foo 0.5.0 ([..])",
+        "target":{
+            "kind":["bin"],
+            "crate_types":["bin"],
+            "edition": "2015",
+            "name":"foo",
+            "src_path":"[..]main.rs"
+        },
+        "profile": {
+            "debug_assertions": true,
+            "debuginfo": 2,
+            "opt_level": "0",
+            "overflow_checks": true,
+            "test": false
+        },
+        "features": [],
+        "filenames": "{...}",
+        "fresh": true
+    }
+"#,
+        ).run();
+}
+
+#[test]
+fn wrong_message_format_option() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --message-format XML")
+        .with_status(1)
+        .with_stderr_contains(
+            "\
+error: 'XML' isn't a valid value for '--message-format <FMT>'
+<tab>[possible values: human, json, short]
+",
+        ).run();
+}
+
+#[test]
+fn message_format_json_forward_stderr() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", "fn main() { let unused = 0; }")
+        .build();
+
+    p.cargo("rustc --release --bin foo --message-format JSON")
+        .with_json(
+            r#"
+    {
+        "reason":"compiler-message",
+        "package_id":"foo 0.5.0 ([..])",
+        "target":{
+            "kind":["bin"],
+            "crate_types":["bin"],
+            "edition": "2015",
+            "name":"foo",
+            "src_path":"[..]"
+        },
+        "message":"{...}"
+    }
+
+    {
+        "reason":"compiler-artifact",
+        "package_id":"foo 0.5.0 ([..])",
+        "target":{
+            "kind":["bin"],
+            "crate_types":["bin"],
+            "edition": "2015",
+            "name":"foo",
+            "src_path":"[..]"
+        },
+        "profile":{
+            "debug_assertions":false,
+            "debuginfo":null,
+            "opt_level":"3",
+            "overflow_checks": false,
+            "test":false
+        },
+        "features":[],
+        "filenames": "{...}",
+        "fresh": false
+    }
+"#,
+        ).run();
+}
+
+#[test]
+fn no_warn_about_package_metadata() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [package.metadata]
+            foo = "bar"
+            a = true
+            b = 3
+
+            [package.metadata.another]
+            bar = 3
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build")
+        .with_stderr(
+            "[..] foo v0.0.1 ([..])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+        ).run();
+}
+
+#[test]
+fn cargo_build_empty_target() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --target")
+        .arg("")
+        .with_status(101)
+        .with_stderr_contains("[..] target was empty")
+        .run();
+}
+
+#[test]
+fn build_all_workspace() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("build --all")
+        .with_stderr(
+            "[..] Compiling bar v0.1.0 ([..])\n\
+             [..] Compiling foo v0.1.0 ([..])\n\
+             [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+        ).run();
+}
+
+#[test]
+fn build_all_exclude() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }")
+        .build();
+
+    p.cargo("build --all --exclude baz")
+        .with_stderr_contains("[..]Compiling foo v0.1.0 [..]")
+        .with_stderr_contains("[..]Compiling bar v0.1.0 [..]")
+        .with_stderr_does_not_contain("[..]Compiling baz v0.1.0 [..]")
+        .run();
+}
+
+#[test]
+fn build_all_workspace_implicit_examples() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("src/bin/a.rs", "fn main() {}")
+        .file("src/bin/b.rs", "fn main() {}")
+        .file("examples/c.rs", "fn main() {}")
+        .file("examples/d.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "")
+        .file("bar/src/bin/e.rs", "fn main() {}")
+        .file("bar/src/bin/f.rs", "fn main() {}")
+        .file("bar/examples/g.rs", "fn main() {}")
+        .file("bar/examples/h.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --all --examples")
+        .with_stderr(
+            "[..] Compiling bar v0.1.0 ([..])\n\
+             [..] Compiling foo v0.1.0 ([..])\n\
+             [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+        ).run();
+    assert!(!p.bin("a").is_file());
+    assert!(!p.bin("b").is_file());
+    assert!(p.bin("examples/c").is_file());
+    assert!(p.bin("examples/d").is_file());
+    assert!(!p.bin("e").is_file());
+    assert!(!p.bin("f").is_file());
+    assert!(p.bin("examples/g").is_file());
+    assert!(p.bin("examples/h").is_file());
+}
+
+#[test]
+fn build_all_virtual_manifest() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    // The order in which bar and baz are built is not guaranteed
+    p.cargo("build --all")
+        .with_stderr_contains("[..] Compiling baz v0.1.0 ([..])")
+        .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
+        .with_stderr(
+            "[..] Compiling [..] v0.1.0 ([..])\n\
+             [..] Compiling [..] v0.1.0 ([..])\n\
+             [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+        ).run();
+}
+
+#[test]
+fn build_virtual_manifest_all_implied() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    // The order in which bar and baz are built is not guaranteed
+    p.cargo("build")
+        .with_stderr_contains("[..] Compiling baz v0.1.0 ([..])")
+        .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
+        .with_stderr(
+            "[..] Compiling [..] v0.1.0 ([..])\n\
+             [..] Compiling [..] v0.1.0 ([..])\n\
+             [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+        ).run();
+}
+
+#[test]
+fn build_virtual_manifest_one_project() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    p.cargo("build -p bar")
+        .with_stderr_does_not_contain("[..]baz[..]")
+        .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
+        .with_stderr(
+            "[..] Compiling [..] v0.1.0 ([..])\n\
+             [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+        ).run();
+}
+
+#[test]
+fn build_all_virtual_manifest_implicit_examples() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "")
+        .file("bar/src/bin/a.rs", "fn main() {}")
+        .file("bar/src/bin/b.rs", "fn main() {}")
+        .file("bar/examples/c.rs", "fn main() {}")
+        .file("bar/examples/d.rs", "fn main() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "")
+        .file("baz/src/bin/e.rs", "fn main() {}")
+        .file("baz/src/bin/f.rs", "fn main() {}")
+        .file("baz/examples/g.rs", "fn main() {}")
+        .file("baz/examples/h.rs", "fn main() {}")
+        .build();
+
+    // The order in which bar and baz are built is not guaranteed
+    p.cargo("build --all --examples")
+        .with_stderr_contains("[..] Compiling baz v0.1.0 ([..])")
+        .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
+        .with_stderr(
+            "[..] Compiling [..] v0.1.0 ([..])\n\
+             [..] Compiling [..] v0.1.0 ([..])\n\
+             [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
+        ).run();
+    assert!(!p.bin("a").is_file());
+    assert!(!p.bin("b").is_file());
+    assert!(p.bin("examples/c").is_file());
+    assert!(p.bin("examples/d").is_file());
+    assert!(!p.bin("e").is_file());
+    assert!(!p.bin("f").is_file());
+    assert!(p.bin("examples/g").is_file());
+    assert!(p.bin("examples/h").is_file());
+}
+
+#[test]
+fn build_all_member_dependency_same_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["a"]
+        "#,
+        ).file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.1.0"
+
+            [dependencies]
+            a = "0.1.0"
+        "#,
+        ).file("a/src/lib.rs", "pub fn a() {}")
+        .build();
+
+    Package::new("a", "0.1.0").publish();
+
+    p.cargo("build --all")
+        .with_stderr(
+            "[UPDATING] `[..]` index\n\
+             [DOWNLOADING] crates ...\n\
+             [DOWNLOADED] a v0.1.0 ([..])\n\
+             [COMPILING] a v0.1.0\n\
+             [COMPILING] a v0.1.0 ([..])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+        ).run();
+}
+
+#[test]
+fn run_proper_binary() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+            [[bin]]
+            name = "main"
+            [[bin]]
+            name = "other"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "src/bin/main.rs",
+            r#"fn main() { panic!("This should never be run."); }"#,
+        ).file("src/bin/other.rs", "fn main() {}")
+        .build();
+
+    p.cargo("run --bin other").run();
+}
+
+#[test]
+fn run_proper_binary_main_rs() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/lib.rs", "")
+        .file("src/bin/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("run --bin foo").run();
+}
+
+#[test]
+fn run_proper_alias_binary_from_src() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+            [[bin]]
+            name = "foo"
+            [[bin]]
+            name = "bar"
+        "#,
+        ).file("src/foo.rs", r#"fn main() { println!("foo"); }"#)
+        .file("src/bar.rs", r#"fn main() { println!("bar"); }"#)
+        .build();
+
+    p.cargo("build --all").run();
+    p.process(&p.bin("foo")).with_stdout("foo\n").run();
+    p.process(&p.bin("bar")).with_stdout("bar\n").run();
+}
+
+#[test]
+fn run_proper_alias_binary_main_rs() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+            [[bin]]
+            name = "foo"
+            [[bin]]
+            name = "bar"
+        "#,
+        ).file("src/main.rs", r#"fn main() { println!("main"); }"#)
+        .build();
+
+    p.cargo("build --all").run();
+    p.process(&p.bin("foo")).with_stdout("main\n").run();
+    p.process(&p.bin("bar")).with_stdout("main\n").run();
+}
+
+#[test]
+fn run_proper_binary_main_rs_as_foo() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/foo.rs",
+            r#" fn main() { panic!("This should never be run."); }"#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("run --bin foo").run();
+}
+
+#[test]
+fn rustc_wrapper() {
+    // We don't have /usr/bin/env on Windows.
+    if cfg!(windows) {
+        return;
+    }
+
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("build -v")
+        .env("RUSTC_WRAPPER", "/usr/bin/env")
+        .with_stderr_contains("[RUNNING] `/usr/bin/env rustc --crate-name foo [..]")
+        .run();
+}
+
+#[test]
+fn cdylib_not_lifted() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            authors = []
+            version = "0.1.0"
+
+            [lib]
+            crate-type = ["cdylib"]
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    let files = if cfg!(windows) {
+        vec!["foo.dll.lib", "foo.dll.exp", "foo.dll"]
+    } else if cfg!(target_os = "macos") {
+        vec!["libfoo.dylib"]
+    } else {
+        vec!["libfoo.so"]
+    };
+
+    for file in files {
+        println!("checking: {}", file);
+        assert!(p.root().join("target/debug/deps").join(&file).is_file());
+    }
+}
+
+#[test]
+fn cdylib_final_outputs() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo-bar"
+            authors = []
+            version = "0.1.0"
+
+            [lib]
+            crate-type = ["cdylib"]
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    let files = if cfg!(windows) {
+        vec!["foo_bar.dll.lib", "foo_bar.dll"]
+    } else if cfg!(target_os = "macos") {
+        vec!["libfoo_bar.dylib"]
+    } else {
+        vec!["libfoo_bar.so"]
+    };
+
+    for file in files {
+        println!("checking: {}", file);
+        assert!(p.root().join("target/debug").join(&file).is_file());
+    }
+}
+
+#[test]
+fn deterministic_cfg_flags() {
+    // This bug is non-deterministic
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            build = "build.rs"
+
+            [features]
+            default = ["f_a", "f_b", "f_c", "f_d"]
+            f_a = []
+            f_b = []
+            f_c = []
+            f_d = []
+        "#,
+        ).file(
+            "build.rs",
+            r#"
+                fn main() {
+                    println!("cargo:rustc-cfg=cfg_a");
+                    println!("cargo:rustc-cfg=cfg_b");
+                    println!("cargo:rustc-cfg=cfg_c");
+                    println!("cargo:rustc-cfg=cfg_d");
+                    println!("cargo:rustc-cfg=cfg_e");
+                }
+            "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.1.0 [..]
+[RUNNING] [..]
+[RUNNING] [..]
+[RUNNING] `rustc --crate-name foo [..] \
+--cfg[..]default[..]--cfg[..]f_a[..]--cfg[..]f_b[..]\
+--cfg[..]f_c[..]--cfg[..]f_d[..] \
+--cfg cfg_a --cfg cfg_b --cfg cfg_c --cfg cfg_d --cfg cfg_e`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+        ).run();
+}
+
+#[test]
+fn explicit_bins_without_paths() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [[bin]]
+            name = "foo"
+
+            [[bin]]
+            name = "bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("src/main.rs", "fn main() {}")
+        .file("src/bin/bar.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn no_bin_in_src_with_lib() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/lib.rs", "")
+        .file("src/foo.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  can't find `foo` bin, specify bin.path",
+        ).run();
+}
+
+#[test]
+fn inferred_bins() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/bin/bar.rs", "fn main() {}")
+        .file("src/bin/baz/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+    assert!(p.bin("bar").is_file());
+    assert!(p.bin("baz").is_file());
+}
+
+#[test]
+fn inferred_bins_duplicate_name() {
+    // this should fail, because we have two binaries with the same name
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/bin/bar.rs", "fn main() {}")
+        .file("src/bin/bar/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").with_status(101).with_stderr_contains(
+            "[..]found duplicate binary name bar, but all binary targets must have a unique name[..]",
+        )
+        .run();
+}
+
+#[test]
+fn inferred_bin_path() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+        [package]
+        name = "foo"
+        version = "0.1.0"
+        authors = []
+
+        [[bin]]
+        name = "bar"
+        # Note, no `path` key!
+        "#,
+        ).file("src/bin/bar/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").run();
+    assert!(p.bin("bar").is_file());
+}
+
+#[test]
+fn inferred_examples() {
+    let p = project()
+        .file("src/lib.rs", "fn main() {}")
+        .file("examples/bar.rs", "fn main() {}")
+        .file("examples/baz/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("test").run();
+    assert!(p.bin("examples/bar").is_file());
+    assert!(p.bin("examples/baz").is_file());
+}
+
+#[test]
+fn inferred_tests() {
+    let p = project()
+        .file("src/lib.rs", "fn main() {}")
+        .file("tests/bar.rs", "fn main() {}")
+        .file("tests/baz/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("test --test=bar --test=baz").run();
+}
+
+#[test]
+fn inferred_benchmarks() {
+    let p = project()
+        .file("src/lib.rs", "fn main() {}")
+        .file("benches/bar.rs", "fn main() {}")
+        .file("benches/baz/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("bench --bench=bar --bench=baz").run();
+}
+
+#[test]
+fn target_edition() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+
+                [lib]
+                edition = "2018"
+            "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .without_status() // passes on nightly, fails on stable, b/c --edition is nightly-only
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..]--edition=2018 [..]
+",
+        ).run();
+}
+
+#[test]
+fn target_edition_override() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+                edition = "2018"
+
+                [lib]
+                edition = "2015"
+            "#,
+        ).file(
+            "src/lib.rs",
+            "
+                pub fn async() {}
+                pub fn try() {}
+                pub fn await() {}
+            "
+        )
+        .build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn same_metadata_different_directory() {
+    // A top-level crate built in two different workspaces should have the
+    // same metadata hash.
+    let p = project()
+        .at("foo1")
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+    let output = t!(String::from_utf8(
+        t!(p.cargo("build -v").exec_with_output()).stderr,
+    ));
+    let metadata = output
+        .split_whitespace()
+        .find(|arg| arg.starts_with("metadata="))
+        .unwrap();
+
+    let p = project()
+        .at("foo2")
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr_contains(format!("[..]{}[..]", metadata))
+        .run();
+}
+
+#[test]
+fn building_a_dependent_crate_witout_bin_should_fail() {
+    Package::new("testless", "0.1.0")
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "testless"
+            version = "0.1.0"
+
+            [[bin]]
+            name = "a_bin"
+        "#,
+        ).file("src/lib.rs", "")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [dependencies]
+            testless = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains("[..]can't find `a_bin` bin, specify bin.path")
+        .run();
+}
+
+#[test]
+fn uplift_dsym_of_bin_on_mac() {
+    if !cfg!(any(target_os = "macos", target_os = "ios")) {
+        return;
+    }
+    let p = project()
+        .file("src/main.rs", "fn main() { panic!(); }")
+        .file("src/bin/b.rs", "fn main() { panic!(); }")
+        .file("examples/c.rs", "fn main() { panic!(); }")
+        .file("tests/d.rs", "fn main() { panic!(); }")
+        .build();
+
+    p.cargo("build --bins --examples --tests").run();
+    assert!(p.bin("foo.dSYM").is_dir());
+    assert!(p.bin("b.dSYM").is_dir());
+    assert!(
+        p.bin("b.dSYM")
+            .symlink_metadata()
+            .expect("read metadata from b.dSYM")
+            .file_type()
+            .is_symlink()
+    );
+    assert!(!p.bin("c.dSYM").is_dir());
+    assert!(!p.bin("d.dSYM").is_dir());
+}
+
+#[test]
+fn uplift_pdb_of_bin_on_windows() {
+    if !cfg!(all(target_os = "windows", target_env = "msvc")) {
+        return;
+    }
+    let p = project()
+        .file("src/main.rs", "fn main() { panic!(); }")
+        .file("src/bin/b.rs", "fn main() { panic!(); }")
+        .file("examples/c.rs", "fn main() { panic!(); }")
+        .file("tests/d.rs", "fn main() { panic!(); }")
+        .build();
+
+    p.cargo("build --bins --examples --tests").run();
+    assert!(p.target_debug_dir().join("foo.pdb").is_file());
+    assert!(p.target_debug_dir().join("b.pdb").is_file());
+    assert!(!p.target_debug_dir().join("c.pdb").is_file());
+    assert!(!p.target_debug_dir().join("d.pdb").is_file());
+}
+
+// Make sure that `cargo build` chooses the correct profile for building
+// targets based on filters (assuming --profile is not specified).
+#[test]
+fn build_filter_infer_profile() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/main.rs", "fn main() {}")
+        .file("tests/t1.rs", "")
+        .file("benches/b1.rs", "")
+        .file("examples/ex1.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr_contains(
+            "\
+             [RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
+             --emit=dep-info,link[..]",
+        ).with_stderr_contains(
+            "\
+             [RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
+             --emit=dep-info,link[..]",
+        ).run();
+
+    p.root().join("target").rm_rf();
+    p.cargo("build -v --test=t1")
+        .with_stderr_contains(
+            "\
+             [RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
+             --emit=dep-info,link[..]",
+        ).with_stderr_contains(
+            "[RUNNING] `rustc --crate-name t1 tests/t1.rs --color never --emit=dep-info,link[..]",
+        ).with_stderr_contains(
+            "\
+             [RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
+             --emit=dep-info,link[..]",
+        ).run();
+
+    p.root().join("target").rm_rf();
+    p.cargo("build -v --bench=b1")
+        .with_stderr_contains(
+            "\
+             [RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
+             --emit=dep-info,link[..]",
+        ).with_stderr_contains(
+            "\
+             [RUNNING] `rustc --crate-name b1 benches/b1.rs --color never --emit=dep-info,link \
+             -C opt-level=3[..]",
+        ).with_stderr_contains(
+            "\
+             [RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
+             --emit=dep-info,link[..]",
+        ).run();
+}
+
+#[test]
+fn targets_selected_default() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+    p.cargo("build -v")
+        // bin
+        .with_stderr_contains("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
+            --emit=dep-info,link[..]")
+        // bench
+        .with_stderr_does_not_contain("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
+            -C opt-level=3 --test [..]")
+        // unit test
+        .with_stderr_does_not_contain("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
+            -C debuginfo=2 --test [..]").run();
+}
+
+#[test]
+fn targets_selected_all() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+    p.cargo("build -v --all-targets")
+        // bin
+        .with_stderr_contains("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
+            --emit=dep-info,link[..]")
+        // bench
+        .with_stderr_contains("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
+            -C opt-level=3 --test [..]")
+        // unit test
+        .with_stderr_contains("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
+            -C debuginfo=2 --test [..]").run();
+}
+
+#[test]
+fn all_targets_no_lib() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+    p.cargo("build -v --all-targets")
+        // bin
+        .with_stderr_contains("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
+            --emit=dep-info,link[..]")
+        // bench
+        .with_stderr_contains("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
+            -C opt-level=3 --test [..]")
+        // unit test
+        .with_stderr_contains("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
+            -C debuginfo=2 --test [..]").run();
+}
+
+#[test]
+fn no_linkable_target() {
+    // Issue 3169. This is currently not an error as per discussion in PR #4797
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            [dependencies]
+            the_lib = { path = "the_lib" }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "the_lib/Cargo.toml",
+            r#"
+            [package]
+            name = "the_lib"
+            version = "0.1.0"
+            [lib]
+            name = "the_lib"
+            crate-type = ["staticlib"]
+        "#,
+        ).file("the_lib/src/lib.rs", "pub fn foo() {}")
+        .build();
+    p.cargo("build")
+        .with_stderr_contains(
+            "\
+             [WARNING] The package `the_lib` provides no linkable [..] \
+             while compiling `foo`. [..] in `the_lib`'s Cargo.toml. [..]",
+        ).run();
+}
+
+#[test]
+fn avoid_dev_deps() {
+    Package::new("foo", "1.0.0").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dev-dependencies]
+            baz = "1.0.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").with_status(101).run();
+    p.cargo("build -Zavoid-dev-deps")
+        .masquerade_as_nightly_cargo()
+        .run();
+}
+
+#[test]
+fn invalid_jobs() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("build --jobs over9000")
+        .with_status(1)
+        .with_stderr("error: Invalid value: could not parse `over9000` as a number")
+        .run();
+}
+
+#[test]
+fn target_filters_workspace() {
+    let ws = project()
+        .at("ws")
+        .file(
+            "Cargo.toml",
+            r#"
+        [workspace]
+        members = ["a", "b"]
+        "#,
+        ).file("a/Cargo.toml", &basic_lib_manifest("a"))
+        .file("a/src/lib.rs", "")
+        .file("a/examples/ex1.rs", "fn main() {}")
+        .file("b/Cargo.toml", &basic_bin_manifest("b"))
+        .file("b/src/main.rs", "fn main() {}")
+        .file("b/examples/ex1.rs", "fn main() {}")
+        .build();
+
+    ws.cargo("build -v --example ex")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] no example target named `ex`
+
+Did you mean `ex1`?",
+        ).run();
+
+    ws.cargo("build -v --lib")
+        .with_status(0)
+        .with_stderr_contains("[RUNNING] `rustc [..]a/src/lib.rs[..]")
+        .run();
+
+    ws.cargo("build -v --example ex1")
+        .with_status(0)
+        .with_stderr_contains("[RUNNING] `rustc [..]a/examples/ex1.rs[..]")
+        .with_stderr_contains("[RUNNING] `rustc [..]b/examples/ex1.rs[..]")
+        .run();
+}
+
+#[test]
+fn target_filters_workspace_not_found() {
+    let ws = project()
+        .at("ws")
+        .file(
+            "Cargo.toml",
+            r#"
+        [workspace]
+        members = ["a", "b"]
+        "#,
+        ).file("a/Cargo.toml", &basic_bin_manifest("a"))
+        .file("a/src/main.rs", "fn main() {}")
+        .file("b/Cargo.toml", &basic_bin_manifest("b"))
+        .file("b/src/main.rs", "fn main() {}")
+        .build();
+
+    ws.cargo("build -v --lib")
+        .with_status(101)
+        .with_stderr("[ERROR] no library targets found in packages: a, b")
+        .run();
+}
diff --git a/tests/testsuite/build_auth.rs b/tests/testsuite/build_auth.rs
new file mode 100644 (file)
index 0000000..394df28
--- /dev/null
@@ -0,0 +1,243 @@
+use std;
+use std::collections::HashSet;
+use std::io::prelude::*;
+use std::net::TcpListener;
+use std::thread;
+
+use bufstream::BufStream;
+use git2;
+use support::paths;
+use support::{basic_manifest, project};
+
+// Test that HTTP auth is offered from `credential.helper`
+#[test]
+fn http_auth_offered() {
+    let server = TcpListener::bind("127.0.0.1:0").unwrap();
+    let addr = server.local_addr().unwrap();
+
+    fn headers(rdr: &mut BufRead) -> HashSet<String> {
+        let valid = ["GET", "Authorization", "Accept"];
+        rdr.lines()
+            .map(|s| s.unwrap())
+            .take_while(|s| s.len() > 2)
+            .map(|s| s.trim().to_string())
+            .filter(|s| valid.iter().any(|prefix| s.starts_with(*prefix)))
+            .collect()
+    }
+
+    let t = thread::spawn(move || {
+        let mut conn = BufStream::new(server.accept().unwrap().0);
+        let req = headers(&mut conn);
+        conn.write_all(
+            b"\
+            HTTP/1.1 401 Unauthorized\r\n\
+            WWW-Authenticate: Basic realm=\"wheee\"\r\n
+            \r\n\
+        ",
+        ).unwrap();
+        assert_eq!(
+            req,
+            vec![
+                "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1",
+                "Accept: */*",
+            ].into_iter()
+            .map(|s| s.to_string())
+            .collect()
+        );
+        drop(conn);
+
+        let mut conn = BufStream::new(server.accept().unwrap().0);
+        let req = headers(&mut conn);
+        conn.write_all(
+            b"\
+            HTTP/1.1 401 Unauthorized\r\n\
+            WWW-Authenticate: Basic realm=\"wheee\"\r\n
+            \r\n\
+        ",
+        ).unwrap();
+        assert_eq!(
+            req,
+            vec![
+                "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1",
+                "Authorization: Basic Zm9vOmJhcg==",
+                "Accept: */*",
+            ].into_iter()
+            .map(|s| s.to_string())
+            .collect()
+        );
+    });
+
+    let script = project()
+        .at("script")
+        .file("Cargo.toml", &basic_manifest("script", "0.1.0"))
+        .file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                println!("username=foo");
+                println!("password=bar");
+            }
+        "#,
+        ).build();
+
+    script.cargo("build -v").run();
+    let script = script.bin("script");
+
+    let config = paths::home().join(".gitconfig");
+    let mut config = git2::Config::open(&config).unwrap();
+    config
+        .set_str("credential.helper", &script.display().to_string())
+        .unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            git = "http://127.0.0.1:{}/foo/bar"
+        "#,
+                addr.port()
+            ),
+        ).file("src/main.rs", "")
+        .file(
+            ".cargo/config",
+            "\
+        [net]
+        retry = 0
+        ",
+        ).build();
+
+    // This is a "contains" check because the last error differs by platform,
+    // may span multiple lines, and isn't relevant to this test.
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(&format!(
+            "\
+[UPDATING] git repository `http://{addr}/foo/bar`
+[ERROR] failed to load source for a dependency on `bar`
+
+Caused by:
+  Unable to update http://{addr}/foo/bar
+
+Caused by:
+  failed to clone into: [..]
+
+Caused by:
+  failed to authenticate when downloading repository
+attempted to find username/password via `credential.helper`, but [..]
+
+Caused by:
+",
+            addr = addr
+        )).run();
+
+    t.join().ok().unwrap();
+}
+
+// Boy, sure would be nice to have a TLS implementation in rust!
+#[test]
+fn https_something_happens() {
+    let server = TcpListener::bind("127.0.0.1:0").unwrap();
+    let addr = server.local_addr().unwrap();
+    let t = thread::spawn(move || {
+        let mut conn = server.accept().unwrap().0;
+        drop(conn.write(b"1234"));
+        drop(conn.shutdown(std::net::Shutdown::Write));
+        drop(conn.read(&mut [0; 16]));
+    });
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            git = "https://127.0.0.1:{}/foo/bar"
+        "#,
+                addr.port()
+            ),
+        ).file("src/main.rs", "")
+        .file(
+            ".cargo/config",
+            "\
+        [net]
+        retry = 0
+        ",
+        ).build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr_contains(&format!(
+            "[UPDATING] git repository `https://{addr}/foo/bar`",
+            addr = addr
+        )).with_stderr_contains(&format!(
+            "\
+Caused by:
+  {errmsg}
+",
+            errmsg = if cfg!(windows) {
+                "[..]failed to send request: [..]"
+            } else if cfg!(target_os = "macos") {
+                // OSX is difficult to tests as some builds may use
+                // Security.framework and others may use OpenSSL. In that case let's
+                // just not verify the error message here.
+                "[..]"
+            } else {
+                "[..]SSL error: [..]"
+            }
+        )).run();
+
+    t.join().ok().unwrap();
+}
+
+// Boy, sure would be nice to have an SSH implementation in rust!
+#[test]
+fn ssh_something_happens() {
+    let server = TcpListener::bind("127.0.0.1:0").unwrap();
+    let addr = server.local_addr().unwrap();
+    let t = thread::spawn(move || {
+        drop(server.accept().unwrap());
+    });
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            git = "ssh://127.0.0.1:{}/foo/bar"
+        "#,
+                addr.port()
+            ),
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr_contains(&format!(
+            "[UPDATING] git repository `ssh://{addr}/foo/bar`",
+            addr = addr
+        )).with_stderr_contains(
+            "\
+Caused by:
+  [..]failed to start SSH session: Failed getting banner[..]
+",
+        ).run();
+    t.join().ok().unwrap();
+}
diff --git a/tests/testsuite/build_lib.rs b/tests/testsuite/build_lib.rs
new file mode 100644 (file)
index 0000000..9b8de83
--- /dev/null
@@ -0,0 +1,60 @@
+use support::{basic_bin_manifest, basic_manifest, project};
+
+#[test]
+fn build_lib_only() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/lib.rs", r#" "#)
+        .build();
+
+    p.cargo("build --lib -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link -C debuginfo=2 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+        ).run();
+}
+
+#[test]
+fn build_with_no_lib() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --lib")
+        .with_status(101)
+        .with_stderr("[ERROR] no library targets found in package `foo`")
+        .run();
+}
+
+#[test]
+fn build_with_relative_cargo_home_path() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+
+            name = "foo"
+            version = "0.0.1"
+            authors = ["wycats@example.com"]
+
+            [dependencies]
+
+            "test-dependency" = { path = "src/test_dependency" }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("src/test_dependency/src/lib.rs", r#" "#)
+        .file(
+            "src/test_dependency/Cargo.toml",
+            &basic_manifest("test-dependency", "0.0.1"),
+        ).build();
+
+    p.cargo("build").env("CARGO_HOME", "./cargo_home/").run();
+}
diff --git a/tests/testsuite/build_plan.rs b/tests/testsuite/build_plan.rs
new file mode 100644 (file)
index 0000000..fa17a79
--- /dev/null
@@ -0,0 +1,208 @@
+use support::registry::Package;
+use support::{basic_bin_manifest, basic_manifest, main_file, project};
+
+#[test]
+fn cargo_build_plan_simple() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("build --build-plan -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .with_json(
+            r#"
+    {
+        "inputs": [
+            "[..]/foo/Cargo.toml"
+        ],
+        "invocations": [
+            {
+                "args": "{...}",
+                "cwd": "[..]/cit/[..]/foo",
+                "deps": [],
+                "env": "{...}",
+                "kind": "Host",
+                "links": "{...}",
+                "outputs": "{...}",
+                "package_name": "foo",
+                "package_version": "0.5.0",
+                "program": "rustc",
+                "target_kind": ["bin"]
+            }
+        ]
+    }
+    "#,
+        ).run();
+    assert!(!p.bin("foo").is_file());
+}
+
+#[test]
+fn cargo_build_plan_single_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.5.0"
+
+            [dependencies]
+            bar = { path = "bar" }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate bar;
+            pub fn foo() { bar::bar(); }
+
+            #[test]
+            fn test() { foo(); }
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+    p.cargo("build --build-plan -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .with_json(
+            r#"
+    {
+        "inputs": [
+            "[..]/foo/Cargo.toml",
+            "[..]/foo/bar/Cargo.toml"
+        ],
+        "invocations": [
+            {
+                "args": "{...}",
+                "cwd": "[..]/cit/[..]/foo",
+                "deps": [],
+                "env": "{...}",
+                "kind": "Host",
+                "links": "{...}",
+                "outputs": [
+                    "[..]/foo/target/debug/deps/libbar-[..].rlib"
+                ],
+                "package_name": "bar",
+                "package_version": "0.0.1",
+                "program": "rustc",
+                "target_kind": ["lib"]
+            },
+            {
+                "args": "{...}",
+                "cwd": "[..]/cit/[..]/foo",
+                "deps": [0],
+                "env": "{...}",
+                "kind": "Host",
+                "links": "{...}",
+                "outputs": [
+                    "[..]/foo/target/debug/deps/libfoo-[..].rlib"
+                ],
+                "package_name": "foo",
+                "package_version": "0.5.0",
+                "program": "rustc",
+                "target_kind": ["lib"]
+            }
+        ]
+    }
+    "#,
+        ).run();
+}
+
+#[test]
+fn cargo_build_plan_build_script() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+            build = "build.rs"
+        "#,
+        ).file("src/main.rs", r#"fn main() {}"#)
+        .file("build.rs", r#"fn main() {}"#)
+        .build();
+
+    p.cargo("build --build-plan -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .with_json(
+            r#"
+    {
+        "inputs": [
+            "[..]/foo/Cargo.toml"
+        ],
+        "invocations": [
+            {
+                "args": "{...}",
+                "cwd": "[..]/cit/[..]/foo",
+                "deps": [],
+                "env": "{...}",
+                "kind": "Host",
+                "links": "{...}",
+                "outputs": [
+                    "[..]/foo/target/debug/build/[..]/build_script_build-[..]"
+                ],
+                "package_name": "foo",
+                "package_version": "0.5.0",
+                "program": "rustc",
+                "target_kind": ["custom-build"]
+            },
+            {
+                "args": "{...}",
+                "cwd": "[..]/cit/[..]/foo",
+                "deps": [0],
+                "env": "{...}",
+                "kind": "Host",
+                "links": "{...}",
+                "outputs": [],
+                "package_name": "foo",
+                "package_version": "0.5.0",
+                "program": "[..]/build-script-build",
+                "target_kind": ["custom-build"]
+            },
+            {
+                "args": "{...}",
+                "cwd": "[..]/cit/[..]/foo",
+                "deps": [1],
+                "env": "{...}",
+                "kind": "Host",
+                "links": "{...}",
+                "outputs": "{...}",
+                "package_name": "foo",
+                "package_version": "0.5.0",
+                "program": "rustc",
+                "target_kind": ["bin"]
+            }
+        ]
+    }
+    "#,
+        ).run();
+}
+
+#[test]
+fn build_plan_with_dev_dep() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "foo"
+                version = "0.5.0"
+                authors = []
+
+                [dev-dependencies]
+                bar = "*"
+            "#,
+        )
+        .file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build --build-plan -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .run();
+}
diff --git a/tests/testsuite/build_script.rs b/tests/testsuite/build_script.rs
new file mode 100644 (file)
index 0000000..0f41f35
--- /dev/null
@@ -0,0 +1,3426 @@
+use std::env;
+use std::fs::{self, File};
+use std::io;
+use std::io::prelude::*;
+use std::thread;
+use std::time::Duration;
+
+use cargo::util::paths::remove_dir_all;
+use support::paths::CargoPathExt;
+use support::registry::Package;
+use support::{basic_manifest, cross_compile, project};
+use support::{rustc_host, sleep_ms};
+
+#[test]
+fn custom_build_script_failed() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+            build = "build.rs"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("build.rs", "fn main() { std::process::exit(101); }")
+        .build();
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name build_script_build build.rs --color never --crate-type bin [..]`
+[RUNNING] `[..]/build-script-build`
+[ERROR] failed to run custom build command for `foo v0.5.0 ([CWD])`
+process didn't exit successfully: `[..]/build-script-build` (exit code: 101)",
+        ).run();
+}
+
+#[test]
+fn custom_build_env_vars() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [features]
+            bar_feat = ["bar/foo"]
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+            build = "build.rs"
+
+            [features]
+            foo = []
+        "#,
+        ).file("bar/src/lib.rs", "pub fn hello() {}");
+
+    let file_content = format!(
+        r#"
+            use std::env;
+            use std::io::prelude::*;
+            use std::path::Path;
+            use std::fs;
+
+            fn main() {{
+                let _target = env::var("TARGET").unwrap();
+                let _ncpus = env::var("NUM_JOBS").unwrap();
+                let _dir = env::var("CARGO_MANIFEST_DIR").unwrap();
+
+                let opt = env::var("OPT_LEVEL").unwrap();
+                assert_eq!(opt, "0");
+
+                let opt = env::var("PROFILE").unwrap();
+                assert_eq!(opt, "debug");
+
+                let debug = env::var("DEBUG").unwrap();
+                assert_eq!(debug, "true");
+
+                let out = env::var("OUT_DIR").unwrap();
+                assert!(out.starts_with(r"{0}"));
+                assert!(fs::metadata(&out).map(|m| m.is_dir()).unwrap_or(false));
+
+                let _host = env::var("HOST").unwrap();
+
+                let _feat = env::var("CARGO_FEATURE_FOO").unwrap();
+
+                let _cargo = env::var("CARGO").unwrap();
+
+                let rustc = env::var("RUSTC").unwrap();
+                assert_eq!(rustc, "rustc");
+
+                let rustdoc = env::var("RUSTDOC").unwrap();
+                assert_eq!(rustdoc, "rustdoc");
+
+                assert!(env::var("RUSTC_LINKER").is_err());
+            }}
+        "#,
+        p.root()
+            .join("target")
+            .join("debug")
+            .join("build")
+            .display()
+    );
+
+    let p = p.file("bar/build.rs", &file_content).build();
+
+    p.cargo("build --features bar_feat").run();
+}
+
+#[test]
+fn custom_build_env_var_rustc_linker() {
+    if cross_compile::disabled() {
+        return;
+    }
+    let target = cross_compile::alternate();
+    let p = project()
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+                [target.{}]
+                linker = "/path/to/linker"
+                "#,
+                target
+            ),
+        ).file(
+            "build.rs",
+            r#"
+            use std::env;
+
+            fn main() {
+                assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker"));
+            }
+            "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    // no crate type set => linker never called => build succeeds if and
+    // only if build.rs succeeds, despite linker binary not existing.
+    p.cargo("build --target").arg(&target).run();
+}
+
+#[test]
+fn custom_build_script_wrong_rustc_flags() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+            build = "build.rs"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "build.rs",
+            r#"fn main() { println!("cargo:rustc-flags=-aaa -bbb"); }"#,
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+             [ERROR] Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ([CWD])`: \
+             `-aaa -bbb`",
+        ).run();
+}
+
+/*
+#[test]
+fn custom_build_script_rustc_flags() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.foo]
+            path = "foo"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "foo/Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+            build = "build.rs"
+        "#,
+        ).file("foo/src/lib.rs", "")
+        .file(
+            "foo/build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-flags=-l nonexistinglib -L /dummy/path1 -L /dummy/path2");
+            }
+        "#,
+        ).build();
+
+    // TODO: TEST FAILS BECAUSE OF WRONG STDOUT (but otherwise, the build works)
+    p.cargo("build --verbose")
+        .with_status(101)
+        .with_stderr(
+            "\
+[COMPILING] bar v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name test [CWD]/src/lib.rs --crate-type lib -C debuginfo=2 \
+        -C metadata=[..] \
+        -C extra-filename=-[..] \
+        --out-dir [CWD]/target \
+        --emit=dep-info,link \
+        -L [CWD]/target \
+        -L [CWD]/target/deps`
+",
+        ).run();
+}
+*/
+
+#[test]
+fn links_no_build_cmd() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            links = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] package `foo v0.5.0 ([CWD])` specifies that it links to `a` but does \
+not have a custom build script
+",
+        ).run();
+}
+
+#[test]
+fn links_duplicates() {
+    // this tests that the links_duplicates are caught at resolver time
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            links = "a"
+            build = "build.rs"
+
+            [dependencies.a-sys]
+            path = "a-sys"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "")
+        .file(
+            "a-sys/Cargo.toml",
+            r#"
+            [project]
+            name = "a-sys"
+            version = "0.5.0"
+            authors = []
+            links = "a"
+            build = "build.rs"
+        "#,
+        ).file("a-sys/src/lib.rs", "")
+        .file("a-sys/build.rs", "")
+        .build();
+
+    p.cargo("build").with_status(101)
+                       .with_stderr("\
+error: failed to select a version for `a-sys`.
+    ... required by package `foo v0.5.0 ([..])`
+versions that meet the requirements `*` are: 0.5.0
+
+the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well:
+package `foo v0.5.0 ([..])`
+
+failed to select a version for `a-sys` which could resolve this conflict
+").run();
+}
+
+#[test]
+fn links_duplicates_deep_dependency() {
+    // this tests that the links_duplicates are caught at resolver time
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            links = "a"
+            build = "build.rs"
+
+            [dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+
+            [dependencies.a-sys]
+            path = "a-sys"
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file("a/build.rs", "")
+        .file(
+            "a/a-sys/Cargo.toml",
+            r#"
+            [project]
+            name = "a-sys"
+            version = "0.5.0"
+            authors = []
+            links = "a"
+            build = "build.rs"
+        "#,
+        ).file("a/a-sys/src/lib.rs", "")
+        .file("a/a-sys/build.rs", "")
+        .build();
+
+    p.cargo("build").with_status(101)
+                       .with_stderr("\
+error: failed to select a version for `a-sys`.
+    ... required by package `a v0.5.0 ([..])`
+    ... which is depended on by `foo v0.5.0 ([..])`
+versions that meet the requirements `*` are: 0.5.0
+
+the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well:
+package `foo v0.5.0 ([..])`
+
+failed to select a version for `a-sys` which could resolve this conflict
+").run();
+}
+
+#[test]
+fn overrides_and_links() {
+    let target = rustc_host();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+
+            [dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            use std::env;
+            fn main() {
+                assert_eq!(env::var("DEP_FOO_FOO").ok().expect("FOO missing"),
+                           "bar");
+                assert_eq!(env::var("DEP_FOO_BAR").ok().expect("BAR missing"),
+                           "baz");
+            }
+        "#,
+        ).file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}.foo]
+            rustc-flags = "-L foo -L bar"
+            foo = "bar"
+            bar = "baz"
+        "#,
+                target
+            ),
+        ).file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            links = "foo"
+            build = "build.rs"
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file("a/build.rs", "not valid rust code")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[..]
+[..]
+[..]
+[..]
+[..]
+[RUNNING] `rustc --crate-name foo [..] -L foo -L bar`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn unused_overrides() {
+    let target = rustc_host();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}.foo]
+            rustc-flags = "-L foo -L bar"
+            foo = "bar"
+            bar = "baz"
+        "#,
+                target
+            ),
+        ).build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn links_passes_env_vars() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+
+            [dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            use std::env;
+            fn main() {
+                assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar");
+                assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz");
+            }
+        "#,
+        ).file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            links = "foo"
+            build = "build.rs"
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file(
+            "a/build.rs",
+            r#"
+            use std::env;
+            fn main() {
+                let lib = env::var("CARGO_MANIFEST_LINKS").unwrap();
+                assert_eq!(lib, "foo");
+
+                println!("cargo:foo=bar");
+                println!("cargo:bar=baz");
+            }
+        "#,
+        ).build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn only_rerun_build_script() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v").run();
+    p.root().move_into_the_past();
+
+    File::create(&p.root().join("some-new-file")).unwrap();
+    p.root().move_into_the_past();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn rebuild_continues_to_pass_env_vars() {
+    let a = project()
+        .at("a")
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            links = "foo"
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            use std::time::Duration;
+            fn main() {
+                println!("cargo:foo=bar");
+                println!("cargo:bar=baz");
+                std::thread::sleep(Duration::from_millis(500));
+            }
+        "#,
+        ).build();
+    a.root().move_into_the_past();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+
+            [dependencies.a]
+            path = '{}'
+        "#,
+                a.root().display()
+            ),
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            use std::env;
+            fn main() {
+                assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar");
+                assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz");
+            }
+        "#,
+        ).build();
+
+    p.cargo("build -v").run();
+    p.root().move_into_the_past();
+
+    File::create(&p.root().join("some-new-file")).unwrap();
+    p.root().move_into_the_past();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn testing_and_such() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "fn main() {}")
+        .build();
+
+    println!("build");
+    p.cargo("build -v").run();
+    p.root().move_into_the_past();
+
+    File::create(&p.root().join("src/lib.rs")).unwrap();
+    p.root().move_into_the_past();
+
+    println!("test");
+    p.cargo("test -vj1")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..]`
+[RUNNING] `rustc --crate-name foo [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/foo-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] `rustdoc --test [..]`",
+        ).with_stdout_contains_n("running 0 tests", 2)
+        .run();
+
+    println!("doc");
+    p.cargo("doc -v")
+        .with_stderr(
+            "\
+[DOCUMENTING] foo v0.5.0 ([CWD])
+[RUNNING] `rustdoc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    File::create(&p.root().join("src/main.rs"))
+        .unwrap()
+        .write_all(b"fn main() {}")
+        .unwrap();
+    println!("run");
+    p.cargo("run")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+        ).run();
+}
+
+#[test]
+fn propagation_of_l_flags() {
+    let target = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            links = "bar"
+            build = "build.rs"
+
+            [dependencies.b]
+            path = "../b"
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file(
+            "a/build.rs",
+            r#"fn main() { println!("cargo:rustc-flags=-L bar"); }"#,
+        ).file(
+            "b/Cargo.toml",
+            r#"
+            [project]
+            name = "b"
+            version = "0.5.0"
+            authors = []
+            links = "foo"
+            build = "build.rs"
+        "#,
+        ).file("b/src/lib.rs", "")
+        .file("b/build.rs", "bad file")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}.foo]
+            rustc-flags = "-L foo"
+        "#,
+                target
+            ),
+        ).build();
+
+    p.cargo("build -v -j1")
+        .with_stderr_contains(
+            "\
+[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]`
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name foo [..] -L bar -L foo`
+",
+        ).run();
+}
+
+#[test]
+fn propagation_of_l_flags_new() {
+    let target = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            links = "bar"
+            build = "build.rs"
+
+            [dependencies.b]
+            path = "../b"
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file(
+            "a/build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-link-search=bar");
+            }
+        "#,
+        ).file(
+            "b/Cargo.toml",
+            r#"
+            [project]
+            name = "b"
+            version = "0.5.0"
+            authors = []
+            links = "foo"
+            build = "build.rs"
+        "#,
+        ).file("b/src/lib.rs", "")
+        .file("b/build.rs", "bad file")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}.foo]
+            rustc-link-search = ["foo"]
+        "#,
+                target
+            ),
+        ).build();
+
+    p.cargo("build -v -j1")
+        .with_stderr_contains(
+            "\
+[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]`
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name foo [..] -L bar -L foo`
+",
+        ).run();
+}
+
+#[test]
+fn build_deps_simple() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+            [build-dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            "
+            #[allow(unused_extern_crates)]
+            extern crate a;
+            fn main() {}
+        ",
+        ).file("a/Cargo.toml", &basic_manifest("a", "0.5.0"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] a v0.5.0 ([CWD]/a)
+[RUNNING] `rustc --crate-name a [..]`
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] build.rs [..] --extern a=[..]`
+[RUNNING] `[..]/foo-[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn build_deps_not_for_normal() {
+    let target = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+            [build-dependencies.aaaaa]
+            path = "a"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "#[allow(unused_extern_crates)] extern crate aaaaa;",
+        ).file(
+            "build.rs",
+            "
+            #[allow(unused_extern_crates)]
+            extern crate aaaaa;
+            fn main() {}
+        ",
+        ).file("a/Cargo.toml", &basic_manifest("aaaaa", "0.5.0"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v --target")
+        .arg(&target)
+        .with_status(101)
+        .with_stderr_contains("[..]can't find crate for `aaaaa`[..]")
+        .with_stderr_contains(
+            "\
+[ERROR] Could not compile `foo`.
+
+Caused by:
+  process didn't exit successfully: [..]
+",
+        ).run();
+}
+
+#[test]
+fn build_cmd_with_a_build_cmd() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+
+            [build-dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            "
+            #[allow(unused_extern_crates)]
+            extern crate a;
+            fn main() {}
+        ",
+        ).file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+
+            [build-dependencies.b]
+            path = "../b"
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file(
+            "a/build.rs",
+            "#[allow(unused_extern_crates)] extern crate b; fn main() {}",
+        ).file("b/Cargo.toml", &basic_manifest("b", "0.5.0"))
+        .file("b/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] b v0.5.0 ([CWD]/b)
+[RUNNING] `rustc --crate-name b [..]`
+[COMPILING] a v0.5.0 ([CWD]/a)
+[RUNNING] `rustc [..] a/build.rs [..] --extern b=[..]`
+[RUNNING] `[..]/a-[..]/build-script-build`
+[RUNNING] `rustc --crate-name a [..]lib.rs --color never --crate-type lib \
+    --emit=dep-info,link -C debuginfo=2 \
+    -C metadata=[..] \
+    --out-dir [..]target/debug/deps \
+    -L [..]target/debug/deps`
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name build_script_build build.rs --color never --crate-type bin \
+    --emit=dep-info,link \
+    -C debuginfo=2 -C metadata=[..] --out-dir [..] \
+    -L [..]target/debug/deps \
+    --extern a=[..]liba[..].rlib`
+[RUNNING] `[..]/foo-[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..]lib.rs --color never --crate-type lib \
+    --emit=dep-info,link -C debuginfo=2 \
+    -C metadata=[..] \
+    --out-dir [..] \
+    -L [..]target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn out_dir_is_preserved() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            use std::env;
+            use std::fs::File;
+            use std::path::Path;
+            fn main() {
+                let out = env::var("OUT_DIR").unwrap();
+                File::create(Path::new(&out).join("foo")).unwrap();
+            }
+        "#,
+        ).build();
+
+    // Make the file
+    p.cargo("build -v").run();
+    p.root().move_into_the_past();
+
+    // Change to asserting that it's there
+    File::create(&p.root().join("build.rs"))
+        .unwrap()
+        .write_all(
+            br#"
+        use std::env;
+        use std::old_io::File;
+        fn main() {
+            let out = env::var("OUT_DIR").unwrap();
+            File::open(&Path::new(&out).join("foo")).unwrap();
+        }
+    "#,
+        ).unwrap();
+    p.root().move_into_the_past();
+    p.cargo("build -v").run();
+
+    // Run a fresh build where file should be preserved
+    p.cargo("build -v").run();
+
+    // One last time to make sure it's still there.
+    File::create(&p.root().join("foo")).unwrap();
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn output_separate_lines() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-flags=-L foo");
+                println!("cargo:rustc-flags=-l static=foo");
+            }
+        "#,
+        ).build();
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] build.rs [..]`
+[RUNNING] `[..]/foo-[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo`
+[ERROR] could not find native static library [..]
+",
+        ).run();
+}
+
+#[test]
+fn output_separate_lines_new() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-link-search=foo");
+                println!("cargo:rustc-link-lib=static=foo");
+            }
+        "#,
+        ).build();
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] build.rs [..]`
+[RUNNING] `[..]/foo-[..]/build-script-build`
+[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo`
+[ERROR] could not find native static library [..]
+",
+        ).run();
+}
+
+#[cfg(not(windows))] // FIXME(#867)
+#[test]
+fn code_generation() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            include!(concat!(env!("OUT_DIR"), "/hello.rs"));
+
+            fn main() {
+                println!("{}", message());
+            }
+        "#,
+        ).file(
+            "build.rs",
+            r#"
+            use std::env;
+            use std::fs::File;
+            use std::io::prelude::*;
+            use std::path::PathBuf;
+
+            fn main() {
+                let dst = PathBuf::from(env::var("OUT_DIR").unwrap());
+                let mut f = File::create(&dst.join("hello.rs")).unwrap();
+                f.write_all(b"
+                    pub fn message() -> &'static str {
+                        \"Hello, World!\"
+                    }
+                ").unwrap();
+            }
+        "#,
+        ).build();
+
+    p.cargo("run")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo`",
+        ).with_stdout("Hello, World!")
+        .run();
+
+    p.cargo("test").run();
+}
+
+#[test]
+fn release_with_build_script() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() {}
+        "#,
+        ).build();
+
+    p.cargo("build -v --release").run();
+}
+
+#[test]
+fn build_script_only() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+              [project]
+              name = "foo"
+              version = "0.0.0"
+              authors = []
+              build = "build.rs"
+        "#,
+        ).file("build.rs", r#"fn main() {}"#)
+        .build();
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  no targets specified in the manifest
+  either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present",
+        ).run();
+}
+
+#[test]
+fn shared_dep_with_a_build_script() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+
+            [dependencies.a]
+            path = "a"
+
+            [build-dependencies.b]
+            path = "b"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "fn main() {}")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("a/build.rs", "fn main() {}")
+        .file("a/src/lib.rs", "")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [package]
+            name = "b"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies.a]
+            path = "../a"
+        "#,
+        ).file("b/src/lib.rs", "")
+        .build();
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn transitive_dep_host() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+
+            [build-dependencies.b]
+            path = "b"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "fn main() {}")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            links = "foo"
+            build = "build.rs"
+        "#,
+        ).file("a/build.rs", "fn main() {}")
+        .file("a/src/lib.rs", "")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [package]
+            name = "b"
+            version = "0.5.0"
+            authors = []
+
+            [lib]
+            name = "b"
+            plugin = true
+
+            [dependencies.a]
+            path = "../a"
+        "#,
+        ).file("b/src/lib.rs", "")
+        .build();
+    p.cargo("build").run();
+}
+
+#[test]
+fn test_a_lib_with_a_build_command() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            include!(concat!(env!("OUT_DIR"), "/foo.rs"));
+
+            /// ```
+            /// foo::bar();
+            /// ```
+            pub fn bar() {
+                assert_eq!(foo(), 1);
+            }
+        "#,
+        ).file(
+            "build.rs",
+            r#"
+            use std::env;
+            use std::io::prelude::*;
+            use std::fs::File;
+            use std::path::PathBuf;
+
+            fn main() {
+                let out = PathBuf::from(env::var("OUT_DIR").unwrap());
+                File::create(out.join("foo.rs")).unwrap().write_all(b"
+                    fn foo() -> i32 { 1 }
+                ").unwrap();
+            }
+        "#,
+        ).build();
+    p.cargo("test").run();
+}
+
+#[test]
+fn test_dev_dep_build_script() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dev-dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("a/build.rs", "fn main() {}")
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("test").run();
+}
+
+#[test]
+fn build_script_with_dynamic_native_dependency() {
+    let _workspace = project()
+        .at("ws")
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["builder", "foo"]
+        "#,
+        ).build();
+
+    let build = project()
+        .at("ws/builder")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "builder"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "builder"
+            crate-type = ["dylib"]
+            plugin = true
+        "#,
+        ).file("src/lib.rs", "#[no_mangle] pub extern fn foo() {}")
+        .build();
+
+    let foo = project()
+        .at("ws/foo")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+
+            [build-dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("build.rs", "extern crate bar; fn main() { bar::bar() }")
+        .file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "bar/build.rs",
+            r#"
+            use std::env;
+            use std::path::PathBuf;
+
+            fn main() {
+                let src = PathBuf::from(env::var("SRC").unwrap());
+                println!("cargo:rustc-link-search=native={}/target/debug/deps",
+                         src.display());
+            }
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"
+            pub fn bar() {
+                #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))]
+                #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))]
+                extern { fn foo(); }
+                unsafe { foo() }
+            }
+        "#,
+        ).build();
+
+    build
+        .cargo("build -v")
+        .env("RUST_LOG", "cargo::ops::cargo_rustc")
+        .run();
+
+    foo.cargo("build -v")
+        .env("SRC", build.root())
+        .env("RUST_LOG", "cargo::ops::cargo_rustc")
+        .run();
+}
+
+#[test]
+fn profile_and_opt_level_set_correctly() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+              use std::env;
+
+              fn main() {
+                  assert_eq!(env::var("OPT_LEVEL").unwrap(), "3");
+                  assert_eq!(env::var("PROFILE").unwrap(), "release");
+                  assert_eq!(env::var("DEBUG").unwrap(), "false");
+              }
+        "#,
+        ).build();
+    p.cargo("bench").run();
+}
+
+#[test]
+fn profile_debug_0() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [profile.dev]
+            debug = 0
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+              use std::env;
+
+              fn main() {
+                  assert_eq!(env::var("OPT_LEVEL").unwrap(), "0");
+                  assert_eq!(env::var("PROFILE").unwrap(), "debug");
+                  assert_eq!(env::var("DEBUG").unwrap(), "false");
+              }
+        "#,
+        ).build();
+    p.cargo("build").run();
+}
+
+#[test]
+fn build_script_with_lto() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+
+            [profile.dev]
+            lto = true
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "fn main() {}")
+        .build();
+    p.cargo("build").run();
+}
+
+#[test]
+fn test_duplicate_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            build = "build.rs"
+
+            [dependencies.bar]
+            path = "bar"
+
+            [build-dependencies.bar]
+            path = "bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            extern crate bar;
+            fn main() { bar::do_nothing() }
+        "#,
+        ).file(
+            "build.rs",
+            r#"
+            extern crate bar;
+            fn main() { bar::do_nothing() }
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn do_nothing() {}")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn cfg_feedback() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/main.rs", "#[cfg(foo)] fn main() {}")
+        .file(
+            "build.rs",
+            r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#,
+        ).build();
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn cfg_override() {
+    let target = rustc_host();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            links = "a"
+            build = "build.rs"
+        "#,
+        ).file("src/main.rs", "#[cfg(foo)] fn main() {}")
+        .file("build.rs", "")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}.a]
+            rustc-cfg = ["foo"]
+        "#,
+                target
+            ),
+        ).build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn cfg_test() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "build.rs",
+            r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            ///
+            /// ```
+            /// extern crate foo;
+            ///
+            /// fn main() {
+            ///     foo::foo()
+            /// }
+            /// ```
+            ///
+            #[cfg(foo)]
+            pub fn foo() {}
+
+            #[cfg(foo)]
+            #[test]
+            fn test_foo() {
+                foo()
+            }
+        "#,
+        ).file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}")
+        .build();
+    p.cargo("test -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] [..] build.rs [..]
+[RUNNING] `[..]/build-script-build`
+[RUNNING] [..] --cfg foo[..]
+[RUNNING] [..] --cfg foo[..]
+[RUNNING] [..] --cfg foo[..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/foo-[..][EXE]`
+[RUNNING] `[..]/test-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] [..] --cfg foo[..]",
+        ).with_stdout_contains("test test_foo ... ok")
+        .with_stdout_contains("test test_bar ... ok")
+        .with_stdout_contains_n("test [..] ... ok", 3)
+        .run();
+}
+
+#[test]
+fn cfg_doc() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file(
+            "build.rs",
+            r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#,
+        ).file("src/lib.rs", "#[cfg(foo)] pub fn foo() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "bar/build.rs",
+            r#"fn main() { println!("cargo:rustc-cfg=bar"); }"#,
+        ).file("bar/src/lib.rs", "#[cfg(bar)] pub fn bar() {}")
+        .build();
+    p.cargo("doc").run();
+    assert!(p.root().join("target/doc").is_dir());
+    assert!(p.root().join("target/doc/foo/fn.foo.html").is_file());
+    assert!(p.root().join("target/doc/bar/fn.bar.html").is_file());
+}
+
+#[test]
+fn cfg_override_test() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+            links = "a"
+        "#,
+        ).file("build.rs", "")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}.a]
+            rustc-cfg = ["foo"]
+        "#,
+                rustc_host()
+            ),
+        ).file(
+            "src/lib.rs",
+            r#"
+            ///
+            /// ```
+            /// extern crate foo;
+            ///
+            /// fn main() {
+            ///     foo::foo()
+            /// }
+            /// ```
+            ///
+            #[cfg(foo)]
+            pub fn foo() {}
+
+            #[cfg(foo)]
+            #[test]
+            fn test_foo() {
+                foo()
+            }
+        "#,
+        ).file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}")
+        .build();
+    p.cargo("test -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `[..]`
+[RUNNING] `[..]`
+[RUNNING] `[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/foo-[..][EXE]`
+[RUNNING] `[..]/test-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] [..] --cfg foo[..]",
+        ).with_stdout_contains("test test_foo ... ok")
+        .with_stdout_contains("test test_bar ... ok")
+        .with_stdout_contains_n("test [..] ... ok", 3)
+        .run();
+}
+
+#[test]
+fn cfg_override_doc() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+            links = "a"
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{target}.a]
+            rustc-cfg = ["foo"]
+            [target.{target}.b]
+            rustc-cfg = ["bar"]
+        "#,
+                target = rustc_host()
+            ),
+        ).file("build.rs", "")
+        .file("src/lib.rs", "#[cfg(foo)] pub fn foo() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+            links = "b"
+        "#,
+        ).file("bar/build.rs", "")
+        .file("bar/src/lib.rs", "#[cfg(bar)] pub fn bar() {}")
+        .build();
+    p.cargo("doc").run();
+    assert!(p.root().join("target/doc").is_dir());
+    assert!(p.root().join("target/doc/foo/fn.foo.html").is_file());
+    assert!(p.root().join("target/doc/bar/fn.bar.html").is_file());
+}
+
+#[test]
+fn env_build() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            const FOO: &'static str = env!("FOO");
+            fn main() {
+                println!("{}", FOO);
+            }
+        "#,
+        ).file(
+            "build.rs",
+            r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#,
+        ).build();
+    p.cargo("build -v").run();
+    p.cargo("run -v").with_stdout("foo\n").run();
+}
+
+#[test]
+fn env_test() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "build.rs",
+            r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#,
+        ).file(
+            "src/lib.rs",
+            r#"pub const FOO: &'static str = env!("FOO"); "#,
+        ).file(
+            "tests/test.rs",
+            r#"
+            extern crate foo;
+
+            #[test]
+            fn test_foo() {
+                assert_eq!("foo", foo::FOO);
+            }
+        "#,
+        ).build();
+    p.cargo("test -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] [..] build.rs [..]
+[RUNNING] `[..]/build-script-build`
+[RUNNING] [..] --crate-name foo[..]
+[RUNNING] [..] --crate-name foo[..]
+[RUNNING] [..] --crate-name test[..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/foo-[..][EXE]`
+[RUNNING] `[..]/test-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] [..] --crate-name foo[..]",
+        ).with_stdout_contains_n("running 0 tests", 2)
+        .with_stdout_contains("test test_foo ... ok")
+        .run();
+}
+
+#[test]
+fn env_doc() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            const FOO: &'static str = env!("FOO");
+            fn main() {}
+        "#,
+        ).file(
+            "build.rs",
+            r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#,
+        ).build();
+    p.cargo("doc -v").run();
+}
+
+#[test]
+fn flags_go_into_tests() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            b = { path = "b" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("tests/foo.rs", "")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [project]
+            name = "b"
+            version = "0.5.0"
+            authors = []
+            [dependencies]
+            a = { path = "../a" }
+        "#,
+        ).file("b/src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file(
+            "a/build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-link-search=test");
+            }
+        "#,
+        ).build();
+
+    p.cargo("test -v --test=foo")
+        .with_stderr(
+            "\
+[COMPILING] a v0.5.0 ([..]
+[RUNNING] `rustc [..] a/build.rs [..]`
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] a/src/lib.rs [..] -L test[..]`
+[COMPILING] b v0.5.0 ([..]
+[RUNNING] `rustc [..] b/src/lib.rs [..] -L test[..]`
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..] src/lib.rs [..] -L test[..]`
+[RUNNING] `rustc [..] tests/foo.rs [..] -L test[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/foo-[..][EXE]`",
+        ).with_stdout_contains("running 0 tests")
+        .run();
+
+    p.cargo("test -v -pb --lib")
+        .with_stderr(
+            "\
+[FRESH] a v0.5.0 ([..]
+[COMPILING] b v0.5.0 ([..]
+[RUNNING] `rustc [..] b/src/lib.rs [..] -L test[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]/b-[..][EXE]`",
+        ).with_stdout_contains("running 0 tests")
+        .run();
+}
+
+#[test]
+fn diamond_passes_args_only_once() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+            b = { path = "b" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("tests/foo.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            [dependencies]
+            b = { path = "../b" }
+            c = { path = "../c" }
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [project]
+            name = "b"
+            version = "0.5.0"
+            authors = []
+            [dependencies]
+            c = { path = "../c" }
+        "#,
+        ).file("b/src/lib.rs", "")
+        .file(
+            "c/Cargo.toml",
+            r#"
+            [project]
+            name = "c"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "c/build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-link-search=native=test");
+            }
+        "#,
+        ).file("c/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] c v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[RUNNING] `[..]`
+[RUNNING] `rustc [..]`
+[COMPILING] b v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[COMPILING] a v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `[..]rlib -L native=test`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn adding_an_override_invalidates() {
+    let target = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            links = "foo"
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(".cargo/config", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-link-search=native=foo");
+            }
+        "#,
+        ).build();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[RUNNING] `[..]`
+[RUNNING] `rustc [..] -L native=foo`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    File::create(p.root().join(".cargo/config"))
+        .unwrap()
+        .write_all(
+            format!(
+                "
+        [target.{}.foo]
+        rustc-link-search = [\"native=bar\"]
+    ",
+                target
+            ).as_bytes(),
+        ).unwrap();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..] -L native=bar`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn changing_an_override_invalidates() {
+    let target = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            links = "foo"
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            &format!(
+                "
+            [target.{}.foo]
+            rustc-link-search = [\"native=foo\"]
+        ",
+                target
+            ),
+        ).file("build.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..] -L native=foo`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    File::create(p.root().join(".cargo/config"))
+        .unwrap()
+        .write_all(
+            format!(
+                "
+        [target.{}.foo]
+        rustc-link-search = [\"native=bar\"]
+    ",
+                target
+            ).as_bytes(),
+        ).unwrap();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..] -L native=bar`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn fresh_builds_possible_with_link_libs() {
+    // The bug is non-deterministic. Sometimes you can get a fresh build
+    let target = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            links = "nativefoo"
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            &format!(
+                "
+            [target.{}.nativefoo]
+            rustc-link-lib = [\"a\"]
+            rustc-link-search = [\"./b\"]
+            rustc-flags = \"-l z -L ./\"
+        ",
+                target
+            ),
+        ).file("build.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build -v")
+        .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint=info")
+        .with_stderr(
+            "\
+[FRESH] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn fresh_builds_possible_with_multiple_metadata_overrides() {
+    // The bug is non-deterministic. Sometimes you can get a fresh build
+    let target = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            links = "foo"
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            &format!(
+                "
+            [target.{}.foo]
+            a = \"\"
+            b = \"\"
+            c = \"\"
+            d = \"\"
+            e = \"\"
+        ",
+                target
+            ),
+        ).file("build.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..]
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build -v")
+        .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint=info")
+        .with_stderr(
+            "\
+[FRESH] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn rebuild_only_on_explicit_paths() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rerun-if-changed=foo");
+                println!("cargo:rerun-if-changed=bar");
+            }
+        "#,
+        ).build();
+
+    p.cargo("build -v").run();
+
+    // files don't exist, so should always rerun if they don't exist
+    println!("run without");
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    sleep_ms(1000);
+    File::create(p.root().join("foo")).unwrap();
+    File::create(p.root().join("bar")).unwrap();
+    sleep_ms(1000); // make sure the to-be-created outfile has a timestamp distinct from the infiles
+
+    // now the exist, so run once, catch the mtime, then shouldn't run again
+    println!("run with");
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    println!("run with2");
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[FRESH] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    sleep_ms(1000);
+
+    // random other files do not affect freshness
+    println!("run baz");
+    File::create(p.root().join("baz")).unwrap();
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[FRESH] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    // but changing dependent files does
+    println!("run foo change");
+    File::create(p.root().join("foo")).unwrap();
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    // .. as does deleting a file
+    println!("run foo delete");
+    fs::remove_file(p.root().join("bar")).unwrap();
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn doctest_receives_build_link_args() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            links = "bar"
+            build = "build.rs"
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file(
+            "a/build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-link-search=native=bar");
+            }
+        "#,
+        ).build();
+
+    p.cargo("test -v")
+        .with_stderr_contains(
+            "[RUNNING] `rustdoc --test [..] --crate-name foo [..]-L native=bar[..]`",
+        ).run();
+}
+
+#[test]
+fn please_respect_the_dag() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+
+            [dependencies]
+            a = { path = 'a' }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-link-search=native=foo");
+            }
+        "#,
+        ).file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            links = "bar"
+            build = "build.rs"
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file(
+            "a/build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-link-search=native=bar");
+            }
+        "#,
+        ).build();
+
+    p.cargo("build -v")
+        .with_stderr_contains("[RUNNING] `rustc [..] -L native=foo -L native=bar[..]`")
+        .run();
+}
+
+#[test]
+fn non_utf8_output() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "build.rs",
+            r#"
+            use std::io::prelude::*;
+
+            fn main() {
+                let mut out = std::io::stdout();
+                // print something that's not utf8
+                out.write_all(b"\xff\xff\n").unwrap();
+
+                // now print some cargo metadata that's utf8
+                println!("cargo:rustc-cfg=foo");
+
+                // now print more non-utf8
+                out.write_all(b"\xff\xff\n").unwrap();
+            }
+        "#,
+        ).file("src/main.rs", "#[cfg(foo)] fn main() {}")
+        .build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn custom_target_dir() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            target-dir = 'test'
+        "#,
+        ).file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("a/build.rs", "fn main() {}")
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn panic_abort_with_build_scripts() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [profile.release]
+            panic = 'abort'
+
+            [dependencies]
+            a = { path = "a" }
+        "#,
+        ).file(
+            "src/lib.rs",
+            "#[allow(unused_extern_crates)] extern crate a;",
+        ).file("build.rs", "fn main() {}")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+
+            [build-dependencies]
+            b = { path = "../b" }
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file(
+            "a/build.rs",
+            "#[allow(unused_extern_crates)] extern crate b; fn main() {}",
+        ).file(
+            "b/Cargo.toml",
+            r#"
+            [project]
+            name = "b"
+            version = "0.5.0"
+            authors = []
+        "#,
+        ).file("b/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v --release").run();
+
+    p.root().join("target").rm_rf();
+
+    p.cargo("test --release -v")
+        .with_stderr_does_not_contain("[..]panic[..]")
+        .run();
+}
+
+#[test]
+fn warnings_emitted() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() {
+                println!("cargo:warning=foo");
+                println!("cargo:warning=bar");
+            }
+        "#,
+        ).build();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `rustc [..]`
+[RUNNING] `[..]`
+warning: foo
+warning: bar
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn warnings_hidden_for_upstream() {
+    Package::new("bar", "0.1.0")
+        .file(
+            "build.rs",
+            r#"
+                fn main() {
+                    println!("cargo:warning=foo");
+                    println!("cargo:warning=bar");
+                }
+            "#,
+        ).file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "bar"
+                version = "0.1.0"
+                authors = []
+                build = "build.rs"
+            "#,
+        ).file("src/lib.rs", "")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 ([..])
+[COMPILING] bar v0.1.0
+[RUNNING] `rustc [..]`
+[RUNNING] `[..]`
+[RUNNING] `rustc [..]`
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn warnings_printed_on_vv() {
+    Package::new("bar", "0.1.0")
+        .file(
+            "build.rs",
+            r#"
+                fn main() {
+                    println!("cargo:warning=foo");
+                    println!("cargo:warning=bar");
+                }
+            "#,
+        ).file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "bar"
+                version = "0.1.0"
+                authors = []
+                build = "build.rs"
+            "#,
+        ).file("src/lib.rs", "")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -vv")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 ([..])
+[COMPILING] bar v0.1.0
+[RUNNING] `rustc [..]`
+[RUNNING] `[..]`
+warning: foo
+warning: bar
+[RUNNING] `rustc [..]`
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn output_shows_on_vv() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            use std::io::prelude::*;
+
+            fn main() {
+                std::io::stderr().write_all(b"stderr\n").unwrap();
+                std::io::stdout().write_all(b"stdout\n").unwrap();
+            }
+        "#,
+        ).build();
+
+    p.cargo("build -vv")
+        .with_stdout("[foo 0.5.0] stdout")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([..])
+[RUNNING] `rustc [..]`
+[RUNNING] `[..]`
+[foo 0.5.0] stderr
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn links_with_dots() {
+    let target = rustc_host();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+            links = "a.b"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-link-search=bar")
+            }
+        "#,
+        ).file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}.'a.b']
+            rustc-link-search = ["foo"]
+        "#,
+                target
+            ),
+        ).build();
+
+    p.cargo("build -v")
+        .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..] [..] -L foo[..]`")
+        .run();
+}
+
+#[test]
+fn rustc_and_rustdoc_set_correctly() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+              use std::env;
+
+              fn main() {
+                  assert_eq!(env::var("RUSTC").unwrap(), "rustc");
+                  assert_eq!(env::var("RUSTDOC").unwrap(), "rustdoc");
+              }
+        "#,
+        ).build();
+    p.cargo("bench").run();
+}
+
+#[test]
+fn cfg_env_vars_available() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            use std::env;
+
+            fn main() {
+                let fam = env::var("CARGO_CFG_TARGET_FAMILY").unwrap();
+                if cfg!(unix) {
+                    assert_eq!(fam, "unix");
+                } else {
+                    assert_eq!(fam, "windows");
+                }
+            }
+        "#,
+        ).build();
+    p.cargo("bench").run();
+}
+
+#[test]
+fn switch_features_rerun() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+
+            [features]
+            foo = []
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                println!(include_str!(concat!(env!("OUT_DIR"), "/output")));
+            }
+        "#,
+        ).file(
+            "build.rs",
+            r#"
+            use std::env;
+            use std::fs::File;
+            use std::io::Write;
+            use std::path::Path;
+
+            fn main() {
+                let out_dir = env::var_os("OUT_DIR").unwrap();
+                let out_dir = Path::new(&out_dir).join("output");
+                let mut f = File::create(&out_dir).unwrap();
+
+                if env::var_os("CARGO_FEATURE_FOO").is_some() {
+                    f.write_all(b"foo").unwrap();
+                } else {
+                    f.write_all(b"bar").unwrap();
+                }
+            }
+        "#,
+        ).build();
+
+    p.cargo("run -v --features=foo").with_stdout("foo\n").run();
+    p.cargo("run -v").with_stdout("bar\n").run();
+    p.cargo("run -v --features=foo").with_stdout("foo\n").run();
+}
+
+#[test]
+fn assume_build_script_when_build_rs_present() {
+    let p = project()
+        .file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                if ! cfg!(foo) {
+                    panic!("the build script was not run");
+                }
+            }
+        "#,
+        ).file(
+            "build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-cfg=foo");
+            }
+        "#,
+        ).build();
+
+    p.cargo("run -v").run();
+}
+
+#[test]
+fn if_build_set_to_false_dont_treat_build_rs_as_build_script() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = false
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                if cfg!(foo) {
+                    panic!("the build script was run");
+                }
+            }
+        "#,
+        ).file(
+            "build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-cfg=foo");
+            }
+        "#,
+        ).build();
+
+    p.cargo("run -v").run();
+}
+
+#[test]
+fn deterministic_rustc_dependency_flags() {
+    // This bug is non-deterministic hence the large number of dependencies
+    // in the hopes it will have a much higher chance of triggering it.
+
+    Package::new("dep1", "0.1.0")
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "dep1"
+                version = "0.1.0"
+                authors = []
+                build = "build.rs"
+            "#,
+        ).file(
+            "build.rs",
+            r#"
+                fn main() {
+                    println!("cargo:rustc-flags=-L native=test1");
+                }
+            "#,
+        ).file("src/lib.rs", "")
+        .publish();
+    Package::new("dep2", "0.1.0")
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "dep2"
+                version = "0.1.0"
+                authors = []
+                build = "build.rs"
+            "#,
+        ).file(
+            "build.rs",
+            r#"
+                fn main() {
+                    println!("cargo:rustc-flags=-L native=test2");
+                }
+            "#,
+        ).file("src/lib.rs", "")
+        .publish();
+    Package::new("dep3", "0.1.0")
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "dep3"
+                version = "0.1.0"
+                authors = []
+                build = "build.rs"
+            "#,
+        ).file(
+            "build.rs",
+            r#"
+                fn main() {
+                    println!("cargo:rustc-flags=-L native=test3");
+                }
+            "#,
+        ).file("src/lib.rs", "")
+        .publish();
+    Package::new("dep4", "0.1.0")
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "dep4"
+                version = "0.1.0"
+                authors = []
+                build = "build.rs"
+            "#,
+        ).file(
+            "build.rs",
+            r#"
+                fn main() {
+                    println!("cargo:rustc-flags=-L native=test4");
+                }
+            "#,
+        ).file("src/lib.rs", "")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            dep1 = "*"
+            dep2 = "*"
+            dep3 = "*"
+            dep4 = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v")
+        .with_stderr_contains(
+            "\
+[RUNNING] `rustc --crate-name foo [..] -L native=test1 -L native=test2 \
+-L native=test3 -L native=test4`
+",
+        ).run();
+}
+
+#[test]
+fn links_duplicates_with_cycle() {
+    // this tests that the links_duplicates are caught at resolver time
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            links = "a"
+            build = "build.rs"
+
+            [dependencies.a]
+            path = "a"
+
+            [dev-dependencies]
+            b = { path = "b" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            links = "a"
+            build = "build.rs"
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file("a/build.rs", "")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [project]
+            name = "b"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            foo = { path = ".." }
+        "#,
+        ).file("b/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").with_status(101)
+                       .with_stderr("\
+error: failed to select a version for `a`.
+    ... required by package `foo v0.5.0 ([..])`
+versions that meet the requirements `*` are: 0.5.0
+
+the package `a` links to the native library `a`, but it conflicts with a previous package which links to `a` as well:
+package `foo v0.5.0 ([..])`
+
+failed to select a version for `a` which could resolve this conflict
+").run();
+}
+
+#[test]
+fn rename_with_link_search_path() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [lib]
+            crate-type = ["cdylib"]
+        "#,
+        ).file(
+            "src/lib.rs",
+            "#[no_mangle] pub extern fn cargo_test_foo() {}",
+        );
+    let p = p.build();
+
+    p.cargo("build").run();
+
+    let p2 = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+        .file(
+            "build.rs",
+            r#"
+            use std::env;
+            use std::fs;
+            use std::path::PathBuf;
+
+            fn main() {
+                // Move the `libfoo.so` from the root of our project into the
+                // build directory. This way Cargo should automatically manage
+                // `LD_LIBRARY_PATH` and such.
+                let root = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
+                let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
+                let src = root.join(&file);
+
+                let dst_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+                let dst = dst_dir.join(&file);
+
+                fs::copy(&src, &dst).unwrap();
+                // handle windows, like below
+                drop(fs::copy(root.join("foo.dll.lib"), dst_dir.join("foo.dll.lib")));
+
+                println!("cargo:rerun-if-changed=build.rs");
+                if cfg!(target_env = "msvc") {
+                    println!("cargo:rustc-link-lib=foo.dll");
+                } else {
+                    println!("cargo:rustc-link-lib=foo");
+                }
+                println!("cargo:rustc-link-search={}",
+                         dst.parent().unwrap().display());
+            }
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            extern {
+                #[link_name = "cargo_test_foo"]
+                fn foo();
+            }
+
+            fn main() {
+                unsafe { foo(); }
+            }
+        "#,
+        );
+    let p2 = p2.build();
+
+    // Move the output `libfoo.so` into the directory of `p2`, and then delete
+    // the `p` project. On OSX the `libfoo.dylib` artifact references the
+    // original path in `p` so we want to make sure that it can't find it (hence
+    // the deletion).
+    let root = p.root().join("target").join("debug").join("deps");
+    let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
+    let src = root.join(&file);
+
+    let dst = p2.root().join(&file);
+
+    fs::copy(&src, &dst).unwrap();
+    // copy the import library for windows, if it exists
+    drop(fs::copy(
+        &root.join("foo.dll.lib"),
+        p2.root().join("foo.dll.lib"),
+    ));
+    remove_dir_all(p.root()).unwrap();
+
+    // Everything should work the first time
+    p2.cargo("run").run();
+
+    // Now rename the root directory and rerun `cargo run`. Not only should we
+    // not build anything but we also shouldn't crash.
+    let mut new = p2.root();
+    new.pop();
+    new.push("bar2");
+
+    // For whatever reason on Windows right after we execute a binary it's very
+    // unlikely that we're able to successfully delete or rename that binary.
+    // It's not really clear why this is the case or if it's a bug in Cargo
+    // holding a handle open too long. In an effort to reduce the flakiness of
+    // this test though we throw this in a loop
+    //
+    // For some more information see #5481 and rust-lang/rust#48775
+    let mut i = 0;
+    loop {
+        let error = match fs::rename(p2.root(), &new) {
+            Ok(()) => break,
+            Err(e) => e,
+        };
+        i += 1;
+        if !cfg!(windows) || error.kind() != io::ErrorKind::PermissionDenied || i > 10 {
+            panic!("failed to rename: {}", error);
+        }
+        println!("assuming {} is spurious, waiting to try again", error);
+        thread::sleep(Duration::from_millis(100));
+    }
+
+    p2.cargo("run")
+        .cwd(&new)
+        .with_stderr(
+            "\
+[FINISHED] [..]
+[RUNNING] [..]
+",
+        ).run();
+}
+
+#[test]
+fn optional_build_script_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "foo"
+                version = "0.5.0"
+                authors = []
+
+                [dependencies]
+                bar = { path = "bar", optional = true }
+
+                [build-dependencies]
+                bar = { path = "bar", optional = true }
+            "#,
+        ).file(
+            "build.rs",
+            r#"
+            #[cfg(feature = "bar")]
+            extern crate bar;
+
+            fn main() {
+                #[cfg(feature = "bar")] {
+                    println!("cargo:rustc-env=FOO={}", bar::bar());
+                    return
+                }
+                println!("cargo:rustc-env=FOO=0");
+            }
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+                #[cfg(feature = "bar")]
+                extern crate bar;
+
+                fn main() {
+                    println!("{}", env!("FOO"));
+                }
+            "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+        .file("bar/src/lib.rs", "pub fn bar() -> u32 { 1 }");
+    let p = p.build();
+
+    p.cargo("run").with_stdout("0\n").run();
+    p.cargo("run --features bar").with_stdout("1\n").run();
+}
+
+#[test]
+fn optional_build_dep_and_required_normal_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "./bar", optional = true }
+
+            [build-dependencies]
+            bar = { path = "./bar" }
+            "#,
+        ).file("build.rs", "extern crate bar; fn main() { bar::bar(); }")
+        .file(
+            "src/main.rs",
+            r#"
+                #[cfg(feature = "bar")]
+                extern crate bar;
+
+                fn main() {
+                    #[cfg(feature = "bar")] {
+                        println!("{}", bar::bar());
+                    }
+                    #[cfg(not(feature = "bar"))] {
+                        println!("0");
+                    }
+                }
+            "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+        .file("bar/src/lib.rs", "pub fn bar() -> u32 { 1 }");
+    let p = p.build();
+
+    p.cargo("run")
+        .with_stdout("0")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.5.0 ([..])
+[COMPILING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]foo[EXE]`",
+        ).run();
+
+    p.cargo("run --all-features")
+        .with_stdout("1")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]foo[EXE]`",
+        ).run();
+}
diff --git a/tests/testsuite/build_script_env.rs b/tests/testsuite/build_script_env.rs
new file mode 100644 (file)
index 0000000..2a7be1a
--- /dev/null
@@ -0,0 +1,99 @@
+use std::fs::File;
+
+use support::project;
+use support::sleep_ms;
+
+#[test]
+fn rerun_if_env_changes() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            "build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rerun-if-env-changed=FOO");
+            }
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+        ).run();
+    p.cargo("build")
+        .env("FOO", "bar")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+        ).run();
+    p.cargo("build")
+        .env("FOO", "baz")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+        ).run();
+    p.cargo("build")
+        .env("FOO", "baz")
+        .with_stderr("[FINISHED] [..]")
+        .run();
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn rerun_if_env_or_file_changes() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            "build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rerun-if-env-changed=FOO");
+                println!("cargo:rerun-if-changed=foo");
+            }
+        "#,
+        ).file("foo", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+        ).run();
+    p.cargo("build")
+        .env("FOO", "bar")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+        ).run();
+    p.cargo("build")
+        .env("FOO", "bar")
+        .with_stderr("[FINISHED] [..]")
+        .run();
+    sleep_ms(1000);
+    File::create(p.root().join("foo")).unwrap();
+    p.cargo("build")
+        .env("FOO", "bar")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+        ).run();
+}
diff --git a/tests/testsuite/cargo_alias_config.rs b/tests/testsuite/cargo_alias_config.rs
new file mode 100644 (file)
index 0000000..6226adb
--- /dev/null
@@ -0,0 +1,145 @@
+use support::{basic_bin_manifest, project};
+
+#[test]
+fn alias_incorrect_config_type() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [alias]
+            b-cargo-test = 5
+        "#,
+        ).build();
+
+    p.cargo("b-cargo-test -v")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[ERROR] invalid configuration for key `alias.b-cargo-test`
+expected a list, but found a integer for [..]",
+        ).run();
+}
+
+#[test]
+fn alias_default_config_overrides_config() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [alias]
+            b = "not_build"
+        "#,
+        ).build();
+
+    p.cargo("b -v")
+        .with_stderr_contains("[COMPILING] foo v0.5.0 [..]")
+        .run();
+}
+
+#[test]
+fn alias_config() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [alias]
+            b-cargo-test = "build"
+        "#,
+        ).build();
+
+    p.cargo("b-cargo-test -v")
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.5.0 [..]
+[RUNNING] `rustc --crate-name foo [..]",
+        ).run();
+}
+
+#[test]
+fn recursive_alias() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", r"fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [alias]
+            b-cargo-test = "build"
+            a-cargo-test = ["b-cargo-test", "-v"]
+        "#,
+        ).build();
+
+    p.cargo("a-cargo-test")
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.5.0 [..]
+[RUNNING] `rustc --crate-name foo [..]",
+        ).run();
+}
+
+#[test]
+fn alias_list_test() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [alias]
+            b-cargo-test = ["build", "--release"]
+         "#,
+        ).build();
+
+    p.cargo("b-cargo-test -v")
+        .with_stderr_contains("[COMPILING] foo v0.5.0 [..]")
+        .with_stderr_contains("[RUNNING] `rustc --crate-name [..]")
+        .run();
+}
+
+#[test]
+fn alias_with_flags_config() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [alias]
+            b-cargo-test = "build --release"
+         "#,
+        ).build();
+
+    p.cargo("b-cargo-test -v")
+        .with_stderr_contains("[COMPILING] foo v0.5.0 [..]")
+        .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]")
+        .run();
+}
+
+#[test]
+fn cant_shadow_builtin() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [alias]
+            build = "fetch"
+         "#,
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[WARNING] alias `build` is ignored, because it is shadowed by a built in command
+[COMPILING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
diff --git a/tests/testsuite/cargo_command.rs b/tests/testsuite/cargo_command.rs
new file mode 100644 (file)
index 0000000..8e0b6e1
--- /dev/null
@@ -0,0 +1,320 @@
+use std::env;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::{Path, PathBuf};
+use std::str;
+
+use cargo;
+use support::cargo_process;
+use support::paths::{self, CargoPathExt};
+use support::registry::Package;
+use support::{basic_bin_manifest, basic_manifest, cargo_exe, project, Project};
+
+#[cfg_attr(windows, allow(dead_code))]
+enum FakeKind<'a> {
+    Executable,
+    Symlink { target: &'a Path },
+}
+
+/// Add an empty file with executable flags (and platform-dependent suffix).
+/// TODO: move this to `Project` if other cases using this emerge.
+fn fake_file(proj: Project, dir: &Path, name: &str, kind: &FakeKind) -> Project {
+    let path = proj
+        .root()
+        .join(dir)
+        .join(&format!("{}{}", name, env::consts::EXE_SUFFIX));
+    path.parent().unwrap().mkdir_p();
+    match *kind {
+        FakeKind::Executable => {
+            File::create(&path).unwrap();
+            make_executable(&path);
+        }
+        FakeKind::Symlink { target } => {
+            make_symlink(&path, target);
+        }
+    }
+    return proj;
+
+    #[cfg(unix)]
+    fn make_executable(p: &Path) {
+        use std::os::unix::prelude::*;
+
+        let mut perms = fs::metadata(p).unwrap().permissions();
+        let mode = perms.mode();
+        perms.set_mode(mode | 0o111);
+        fs::set_permissions(p, perms).unwrap();
+    }
+    #[cfg(windows)]
+    fn make_executable(_: &Path) {}
+    #[cfg(unix)]
+    fn make_symlink(p: &Path, t: &Path) {
+        ::std::os::unix::fs::symlink(t, p).expect("Failed to create symlink");
+    }
+    #[cfg(windows)]
+    fn make_symlink(_: &Path, _: &Path) {
+        panic!("Not supported")
+    }
+}
+
+fn path() -> Vec<PathBuf> {
+    env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect()
+}
+
+#[test]
+fn list_commands_with_descriptions() {
+    let p = project().build();
+    p.cargo("--list")
+        .with_stdout_contains("    build                Compile a local package and all of its dependencies")
+        // assert read-manifest prints the right one-line description followed by another command, indented.
+        .with_stdout_contains("    read-manifest        Print a JSON representation of a Cargo.toml manifest.")
+        .run();
+}
+
+#[test]
+fn list_command_looks_at_path() {
+    let proj = project().build();
+    let proj = fake_file(
+        proj,
+        Path::new("path-test"),
+        "cargo-1",
+        &FakeKind::Executable,
+    );
+
+    let mut path = path();
+    path.push(proj.root().join("path-test"));
+    let path = env::join_paths(path.iter()).unwrap();
+    let output = cargo_process("-v --list").env("PATH", &path).exec_with_output().unwrap();
+    let output = str::from_utf8(&output.stdout).unwrap();
+    assert!(
+        output.contains("\n    1                   "),
+        "missing 1: {}",
+        output
+    );
+}
+
+// windows and symlinks don't currently agree that well
+#[cfg(unix)]
+#[test]
+fn list_command_resolves_symlinks() {
+    let proj = project().build();
+    let proj = fake_file(
+        proj,
+        Path::new("path-test"),
+        "cargo-2",
+        &FakeKind::Symlink {
+            target: &cargo_exe(),
+        },
+    );
+
+    let mut path = path();
+    path.push(proj.root().join("path-test"));
+    let path = env::join_paths(path.iter()).unwrap();
+    let output = cargo_process("-v --list").env("PATH", &path).exec_with_output().unwrap();
+    let output = str::from_utf8(&output.stdout).unwrap();
+    assert!(
+        output.contains("\n    2                   "),
+        "missing 2: {}",
+        output
+    );
+}
+
+#[test]
+fn find_closest_biuld_to_build() {
+    cargo_process("biuld")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: no such subcommand: `biuld`
+
+<tab>Did you mean `build`?
+",
+        ).run();
+
+    // But, if we actually have `biuld`, it must work!
+    // https://github.com/rust-lang/cargo/issues/5201
+    Package::new("cargo-biuld", "1.0.0")
+        .file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                println!("Similar, but not identical to, build");
+            }
+        "#,
+        ).publish();
+
+    cargo_process("install cargo-biuld").run();
+    cargo_process("biuld")
+        .with_stdout("Similar, but not identical to, build\n")
+        .run();
+    cargo_process("--list")
+        .with_stdout_contains(
+            "    build                Compile a local package and all of its dependencies\n",
+        ).with_stdout_contains("    biuld\n")
+        .run();
+}
+
+// if a subcommand is more than 3 edit distance away, we don't make a suggestion
+#[test]
+fn find_closest_dont_correct_nonsense() {
+    cargo_process("there-is-no-way-that-there-is-a-command-close-to-this")
+        .cwd(&paths::root())
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] no such subcommand: \
+                        `there-is-no-way-that-there-is-a-command-close-to-this`
+",
+        ).run();
+}
+
+#[test]
+fn displays_subcommand_on_error() {
+    cargo_process("invalid-command")
+        .with_status(101)
+        .with_stderr("[ERROR] no such subcommand: `invalid-command`\n")
+        .run();
+}
+
+#[test]
+fn override_cargo_home() {
+    let root = paths::root();
+    let my_home = root.join("my_home");
+    fs::create_dir(&my_home).unwrap();
+    File::create(&my_home.join("config"))
+        .unwrap()
+        .write_all(
+            br#"
+        [cargo-new]
+        name = "foo"
+        email = "bar"
+        git = false
+    "#,
+        ).unwrap();
+
+    cargo_process("new foo")
+        .env("USER", "foo")
+        .env("CARGO_HOME", &my_home)
+        .run();
+
+    let toml = paths::root().join("foo/Cargo.toml");
+    let mut contents = String::new();
+    File::open(&toml)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"authors = ["foo <bar>"]"#));
+}
+
+#[test]
+fn cargo_subcommand_env() {
+    let src = format!(
+        r#"
+        use std::env;
+
+        fn main() {{
+            println!("{{}}", env::var("{}").unwrap());
+        }}
+        "#,
+        cargo::CARGO_ENV
+    );
+
+    let p = project()
+        .at("cargo-envtest")
+        .file("Cargo.toml", &basic_bin_manifest("cargo-envtest"))
+        .file("src/main.rs", &src)
+        .build();
+
+    let target_dir = p.target_debug_dir();
+
+    p.cargo("build").run();
+    assert!(p.bin("cargo-envtest").is_file());
+
+    let cargo = cargo_exe().canonicalize().unwrap();
+    let mut path = path();
+    path.push(target_dir);
+    let path = env::join_paths(path.iter()).unwrap();
+
+    cargo_process("envtest")
+        .env("PATH", &path)
+        .with_stdout(cargo.to_str().unwrap())
+        .run();
+}
+
+#[test]
+fn cargo_subcommand_args() {
+    let p = project()
+        .at("cargo-foo")
+        .file("Cargo.toml", &basic_manifest("cargo-foo", "0.0.1"))
+        .file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                let args: Vec<_> = ::std::env::args().collect();
+                println!("{:?}", args);
+            }
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+    let cargo_foo_bin = p.bin("cargo-foo");
+    assert!(cargo_foo_bin.is_file());
+
+    let mut path = path();
+    path.push(p.target_debug_dir());
+    let path = env::join_paths(path.iter()).unwrap();
+
+    cargo_process("foo bar -v --help")
+        .env("PATH", &path)
+        .with_stdout(
+            if cfg!(windows) { // weird edge-case w/ CWD & (windows vs unix)
+                format!(r#"[{:?}, "foo", "bar", "-v", "--help"]"#, cargo_foo_bin)
+            } else {
+                r#"["[CWD]/cargo-foo/target/debug/cargo-foo", "foo", "bar", "-v", "--help"]"#.to_string()
+            }
+        ).run();
+}
+
+#[test]
+fn cargo_help() {
+    cargo_process("").run();
+    cargo_process("help").run();
+    cargo_process("-h").run();
+    cargo_process("help build").run();
+    cargo_process("build -h").run();
+    cargo_process("help help").run();
+}
+
+#[test]
+fn cargo_help_external_subcommand() {
+    Package::new("cargo-fake-help", "1.0.0")
+        .file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                if ::std::env::args().nth(2) == Some(String::from("--help")) {
+                    println!("fancy help output");
+                }
+            }"#,
+        ).publish();
+    cargo_process("install cargo-fake-help").run();
+    cargo_process("help fake-help")
+        .with_stdout("fancy help output\n")
+        .run();
+}
+
+#[test]
+fn explain() {
+    cargo_process("--explain E0001")
+        .with_stdout_contains(
+            "This error suggests that the expression arm corresponding to the noted pattern",
+        ).run();
+}
+
+// Test that the output of 'cargo -Z help' shows a different help screen with
+// all the -Z flags.
+#[test]
+fn z_flags_help() {
+    cargo_process("-Z help")
+        .with_stdout_contains(
+            "    -Z unstable-options -- Allow the usage of unstable options such as --registry",
+        ).run();
+}
diff --git a/tests/testsuite/cargo_features.rs b/tests/testsuite/cargo_features.rs
new file mode 100644 (file)
index 0000000..de067c2
--- /dev/null
@@ -0,0 +1,298 @@
+use support::{project, publish};
+
+#[test]
+fn feature_required() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+            im-a-teapot = true
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  the `im-a-teapot` manifest key is unstable and may not work properly in England
+
+Caused by:
+  feature `test-dummy-unstable` is required
+
+consider adding `cargo-features = [\"test-dummy-unstable\"]` to the manifest
+",
+        ).run();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  the `im-a-teapot` manifest key is unstable and may not work properly in England
+
+Caused by:
+  feature `test-dummy-unstable` is required
+
+this Cargo does not support nightly features, but if you
+switch to nightly channel you can add
+`cargo-features = [\"test-dummy-unstable\"]` to enable this feature
+",
+        ).run();
+}
+
+#[test]
+fn unknown_feature() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["foo"]
+
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  unknown cargo feature `foo`
+",
+        ).run();
+}
+
+#[test]
+fn stable_feature_warns() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["test-dummy-stable"]
+
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build")
+        .with_stderr(
+            "\
+warning: the cargo feature `test-dummy-stable` is now stable and is no longer \
+necessary to be listed in the manifest
+[COMPILING] a [..]
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn nightly_feature_requires_nightly() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["test-dummy-unstable"]
+
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+            im-a-teapot = true
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(
+            "\
+[COMPILING] a [..]
+[FINISHED] [..]
+",
+        ).run();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \
+  but this is the `stable` channel
+",
+        ).run();
+}
+
+#[test]
+fn nightly_feature_requires_nightly_in_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "b"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            cargo-features = ["test-dummy-unstable"]
+
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+            im-a-teapot = true
+        "#,
+        ).file("a/src/lib.rs", "")
+        .build();
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(
+            "\
+[COMPILING] a [..]
+[COMPILING] b [..]
+[FINISHED] [..]
+",
+        ).run();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to load source for a dependency on `a`
+
+Caused by:
+  Unable to update [..]
+
+Caused by:
+  failed to parse manifest at `[..]`
+
+Caused by:
+  the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \
+  but this is the `stable` channel
+",
+        ).run();
+}
+
+#[test]
+fn cant_publish() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["test-dummy-unstable"]
+
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+            im-a-teapot = true
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(
+            "\
+[COMPILING] a [..]
+[FINISHED] [..]
+",
+        ).run();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \
+  but this is the `stable` channel
+",
+        ).run();
+}
+
+#[test]
+fn z_flags_rejected() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["test-dummy-unstable"]
+
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+            im-a-teapot = true
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build -Zprint-im-a-teapot")
+        .with_status(101)
+        .with_stderr("error: the `-Z` flag is only accepted on the nightly channel of Cargo")
+        .run();
+
+    p.cargo("build -Zarg")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr("error: unknown `-Z` flag specified: arg")
+        .run();
+
+    p.cargo("build -Zprint-im-a-teapot")
+        .masquerade_as_nightly_cargo()
+        .with_stdout("im-a-teapot = true\n")
+        .with_stderr(
+            "\
+[COMPILING] a [..]
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn publish_allowed() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["test-dummy-unstable"]
+
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("publish --index")
+        .arg(publish::registry().to_string())
+        .masquerade_as_nightly_cargo()
+        .run();
+}
diff --git a/tests/testsuite/cfg.rs b/tests/testsuite/cfg.rs
new file mode 100644 (file)
index 0000000..c332438
--- /dev/null
@@ -0,0 +1,435 @@
+use std::fmt;
+use std::str::FromStr;
+
+use cargo::util::{Cfg, CfgExpr};
+use support::registry::Package;
+use support::rustc_host;
+use support::{basic_manifest, project};
+
+macro_rules! c {
+    ($a:ident) => {
+        Cfg::Name(stringify!($a).to_string())
+    };
+    ($a:ident = $e:expr) => {
+        Cfg::KeyPair(stringify!($a).to_string(), $e.to_string())
+    };
+}
+
+macro_rules! e {
+    (any($($t:tt),*)) => (CfgExpr::Any(vec![$(e!($t)),*]));
+    (all($($t:tt),*)) => (CfgExpr::All(vec![$(e!($t)),*]));
+    (not($($t:tt)*)) => (CfgExpr::Not(Box::new(e!($($t)*))));
+    (($($t:tt)*)) => (e!($($t)*));
+    ($($t:tt)*) => (CfgExpr::Value(c!($($t)*)));
+}
+
+fn good<T>(s: &str, expected: T)
+where
+    T: FromStr + PartialEq + fmt::Debug,
+    T::Err: fmt::Display,
+{
+    let c = match T::from_str(s) {
+        Ok(c) => c,
+        Err(e) => panic!("failed to parse `{}`: {}", s, e),
+    };
+    assert_eq!(c, expected);
+}
+
+fn bad<T>(s: &str, err: &str)
+where
+    T: FromStr + fmt::Display,
+    T::Err: fmt::Display,
+{
+    let e = match T::from_str(s) {
+        Ok(cfg) => panic!("expected `{}` to not parse but got {}", s, cfg),
+        Err(e) => e.to_string(),
+    };
+    assert!(
+        e.contains(err),
+        "when parsing `{}`,\n\"{}\" not contained \
+         inside: {}",
+        s,
+        err,
+        e
+    );
+}
+
+#[test]
+fn cfg_syntax() {
+    good("foo", c!(foo));
+    good("_bar", c!(_bar));
+    good(" foo", c!(foo));
+    good(" foo  ", c!(foo));
+    good(" foo  = \"bar\"", c!(foo = "bar"));
+    good("foo=\"\"", c!(foo = ""));
+    good(" foo=\"3\"      ", c!(foo = "3"));
+    good("foo = \"3 e\"", c!(foo = "3 e"));
+}
+
+#[test]
+fn cfg_syntax_bad() {
+    bad::<Cfg>("", "found nothing");
+    bad::<Cfg>(" ", "found nothing");
+    bad::<Cfg>("\t", "unexpected character");
+    bad::<Cfg>("7", "unexpected character");
+    bad::<Cfg>("=", "expected identifier");
+    bad::<Cfg>(",", "expected identifier");
+    bad::<Cfg>("(", "expected identifier");
+    bad::<Cfg>("foo (", "malformed cfg value");
+    bad::<Cfg>("bar =", "expected a string");
+    bad::<Cfg>("bar = \"", "unterminated string");
+    bad::<Cfg>("foo, bar", "malformed cfg value");
+}
+
+#[test]
+fn cfg_expr() {
+    good("foo", e!(foo));
+    good("_bar", e!(_bar));
+    good(" foo", e!(foo));
+    good(" foo  ", e!(foo));
+    good(" foo  = \"bar\"", e!(foo = "bar"));
+    good("foo=\"\"", e!(foo = ""));
+    good(" foo=\"3\"      ", e!(foo = "3"));
+    good("foo = \"3 e\"", e!(foo = "3 e"));
+
+    good("all()", e!(all()));
+    good("all(a)", e!(all(a)));
+    good("all(a, b)", e!(all(a, b)));
+    good("all(a, )", e!(all(a)));
+    good("not(a = \"b\")", e!(not(a = "b")));
+    good("not(all(a))", e!(not(all(a))));
+}
+
+#[test]
+fn cfg_expr_bad() {
+    bad::<CfgExpr>(" ", "found nothing");
+    bad::<CfgExpr>(" all", "expected `(`");
+    bad::<CfgExpr>("all(a", "expected `)`");
+    bad::<CfgExpr>("not", "expected `(`");
+    bad::<CfgExpr>("not(a", "expected `)`");
+    bad::<CfgExpr>("a = ", "expected a string");
+    bad::<CfgExpr>("all(not())", "expected identifier");
+    bad::<CfgExpr>("foo(a)", "consider using all() or any() explicitly");
+}
+
+#[test]
+fn cfg_matches() {
+    assert!(e!(foo).matches(&[c!(bar), c!(foo), c!(baz)]));
+    assert!(e!(any(foo)).matches(&[c!(bar), c!(foo), c!(baz)]));
+    assert!(e!(any(foo, bar)).matches(&[c!(bar)]));
+    assert!(e!(any(foo, bar)).matches(&[c!(foo)]));
+    assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)]));
+    assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)]));
+    assert!(e!(not(foo)).matches(&[c!(bar)]));
+    assert!(e!(not(foo)).matches(&[]));
+    assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(bar)]));
+    assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo), c!(bar)]));
+
+    assert!(!e!(foo).matches(&[]));
+    assert!(!e!(foo).matches(&[c!(bar)]));
+    assert!(!e!(foo).matches(&[c!(fo)]));
+    assert!(!e!(any(foo)).matches(&[]));
+    assert!(!e!(any(foo)).matches(&[c!(bar)]));
+    assert!(!e!(any(foo)).matches(&[c!(bar), c!(baz)]));
+    assert!(!e!(all(foo)).matches(&[c!(bar), c!(baz)]));
+    assert!(!e!(all(foo, bar)).matches(&[c!(bar)]));
+    assert!(!e!(all(foo, bar)).matches(&[c!(foo)]));
+    assert!(!e!(all(foo, bar)).matches(&[]));
+    assert!(!e!(not(bar)).matches(&[c!(bar)]));
+    assert!(!e!(not(bar)).matches(&[c!(baz), c!(bar)]));
+    assert!(!e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo)]));
+}
+
+#[test]
+fn cfg_easy() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+
+            [target.'cfg(unix)'.dependencies]
+            b = { path = 'b' }
+            [target."cfg(windows)".dependencies]
+            b = { path = 'b' }
+        "#,
+        ).file("src/lib.rs", "extern crate b;")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+        .file("b/src/lib.rs", "")
+        .build();
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn dont_include() {
+    let other_family = if cfg!(unix) { "windows" } else { "unix" };
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+
+            [target.'cfg({})'.dependencies]
+            b = {{ path = 'b' }}
+        "#,
+                other_family
+            ),
+        ).file("src/lib.rs", "")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+        .file("b/src/lib.rs", "")
+        .build();
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] a v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn works_through_the_registry() {
+    Package::new("baz", "0.1.0").publish();
+    Package::new("bar", "0.1.0")
+        .target_dep("baz", "0.1.0", "cfg(unix)")
+        .target_dep("baz", "0.1.0", "cfg(windows)")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "#[allow(unused_extern_crates)] extern crate bar;",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[DOWNLOADED] [..]
+[COMPILING] baz v0.1.0
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn ignore_version_from_other_platform() {
+    let this_family = if cfg!(unix) { "unix" } else { "windows" };
+    let other_family = if cfg!(unix) { "windows" } else { "unix" };
+    Package::new("bar", "0.1.0").publish();
+    Package::new("bar", "0.2.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [target.'cfg({})'.dependencies]
+            bar = "0.1.0"
+
+            [target.'cfg({})'.dependencies]
+            bar = "0.2.0"
+        "#,
+                this_family, other_family
+            ),
+        ).file(
+            "src/lib.rs",
+            "#[allow(unused_extern_crates)] extern crate bar;",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn bad_target_spec() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [target.'cfg(4)'.dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  failed to parse `4` as a cfg expression
+
+Caused by:
+  unexpected character in cfg `4`, [..]
+",
+        ).run();
+}
+
+#[test]
+fn bad_target_spec2() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [target.'cfg(bar =)'.dependencies]
+            baz = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  failed to parse `bar =` as a cfg expression
+
+Caused by:
+  expected a string, found nothing
+",
+        ).run();
+}
+
+#[test]
+fn multiple_match_ok() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+
+            [target.'cfg(unix)'.dependencies]
+            b = {{ path = 'b' }}
+            [target.'cfg(target_family = "unix")'.dependencies]
+            b = {{ path = 'b' }}
+            [target."cfg(windows)".dependencies]
+            b = {{ path = 'b' }}
+            [target.'cfg(target_family = "windows")'.dependencies]
+            b = {{ path = 'b' }}
+            [target."cfg(any(windows, unix))".dependencies]
+            b = {{ path = 'b' }}
+
+            [target.{}.dependencies]
+            b = {{ path = 'b' }}
+        "#,
+                rustc_host()
+            ),
+        ).file("src/lib.rs", "extern crate b;")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+        .file("b/src/lib.rs", "")
+        .build();
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn any_ok() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+
+            [target."cfg(any(windows, unix))".dependencies]
+            b = { path = 'b' }
+        "#,
+        ).file("src/lib.rs", "extern crate b;")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+        .file("b/src/lib.rs", "")
+        .build();
+    p.cargo("build -v").run();
+}
+
+// https://github.com/rust-lang/cargo/issues/5313
+#[test]
+#[cfg(all(
+    target_arch = "x86_64",
+    target_os = "linux",
+    target_env = "gnu"
+))]
+fn cfg_looks_at_rustflags_for_target() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+
+            [target.'cfg(with_b)'.dependencies]
+            b = { path = 'b' }
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[cfg(with_b)]
+            extern crate b;
+
+            fn main() { b::foo(); }
+        "#,
+        ).file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+        .file("b/src/lib.rs", "pub fn foo() {}")
+        .build();
+
+    p.cargo("build --target x86_64-unknown-linux-gnu")
+        .env("RUSTFLAGS", "--cfg with_b")
+        .run();
+}
diff --git a/tests/testsuite/check-style.sh b/tests/testsuite/check-style.sh
new file mode 100755 (executable)
index 0000000..72d7ac6
--- /dev/null
@@ -0,0 +1,3 @@
+echo "checking for lines over 100 characters..."
+find src tests -name '*.rs' | xargs grep '.\{101,\}' && exit 1
+echo "ok"
diff --git a/tests/testsuite/check.rs b/tests/testsuite/check.rs
new file mode 100644 (file)
index 0000000..3322986
--- /dev/null
@@ -0,0 +1,715 @@
+use std::fmt::{self, Write};
+
+use glob::glob;
+use support::install::exe;
+use support::is_nightly;
+use support::paths::CargoPathExt;
+use support::registry::Package;
+use support::{basic_manifest, project};
+
+#[test]
+fn check_success() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            "extern crate bar; fn main() { ::bar::baz(); }",
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    foo.cargo("check").run();
+}
+
+#[test]
+fn check_fail() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            "extern crate bar; fn main() { ::bar::baz(42); }",
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    foo.cargo("check").with_status(101).run();
+}
+
+#[test]
+fn custom_derive() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+#[macro_use]
+extern crate bar;
+
+trait B {
+    fn b(&self);
+}
+
+#[derive(B)]
+struct A;
+
+fn main() {
+    let a = A;
+    a.b();
+}
+"#,
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            [lib]
+            proc-macro = true
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro_derive(B)]
+pub fn derive(_input: TokenStream) -> TokenStream {
+    format!("impl B for A {{ fn b(&self) {{}} }}").parse().unwrap()
+}
+"#,
+        ).build();
+
+    foo.cargo("check").run();
+}
+
+#[test]
+fn check_build() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            "extern crate bar; fn main() { ::bar::baz(); }",
+        ).build();
+
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    foo.cargo("check").run();
+    foo.cargo("build").run();
+}
+
+#[test]
+fn build_check() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            "extern crate bar; fn main() { ::bar::baz(); }",
+        ).build();
+
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    foo.cargo("build").run();
+    foo.cargo("check").run();
+}
+
+// Checks that where a project has both a lib and a bin, the lib is only checked
+// not built.
+#[test]
+fn issue_3418() {
+    let foo = project()
+        .file("src/lib.rs", "")
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    foo.cargo("check -v")
+        .with_stderr_contains("[..] --emit=dep-info,metadata [..]")
+        .run();
+}
+
+// Some weirdness that seems to be caused by a crate being built as well as
+// checked, but in this case with a proc macro too.
+#[test]
+fn issue_3419() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            rustc-serialize = "*"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate rustc_serialize;
+
+            use rustc_serialize::Decodable;
+
+            pub fn take<T: Decodable>() {}
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            extern crate rustc_serialize;
+
+            extern crate foo;
+
+            #[derive(RustcDecodable)]
+            pub struct Foo;
+
+            fn main() {
+                foo::take::<Foo>();
+            }
+        "#,
+        ).build();
+
+    Package::new("rustc-serialize", "1.0.0")
+        .file(
+            "src/lib.rs",
+            r#"pub trait Decodable: Sized {
+                    fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error>;
+                 }
+                 pub trait Decoder {
+                    type Error;
+                    fn read_struct<T, F>(&mut self, s_name: &str, len: usize, f: F)
+                                         -> Result<T, Self::Error>
+                    where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+                 } "#,
+        ).publish();
+
+    p.cargo("check").run();
+}
+
+// Check on a dylib should have a different metadata hash than build.
+#[test]
+fn dylib_check_preserves_build_cache() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [lib]
+            crate-type = ["dylib"]
+
+            [dependencies]
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[..]Compiling foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("check").run();
+
+    p.cargo("build")
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+}
+
+// test `cargo rustc --profile check`
+#[test]
+fn rustc_check() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            "extern crate bar; fn main() { ::bar::baz(); }",
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    foo.cargo("rustc --profile check -- --emit=metadata").run();
+}
+
+#[test]
+fn rustc_check_err() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            "extern crate bar; fn main() { ::bar::qux(); }",
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    foo.cargo("rustc --profile check -- --emit=metadata")
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn check_all() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [workspace]
+            [dependencies]
+            b = { path = "b" }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("examples/a.rs", "fn main() {}")
+        .file("tests/a.rs", "")
+        .file("src/lib.rs", "")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+        .file("b/src/main.rs", "fn main() {}")
+        .file("b/src/lib.rs", "")
+        .build();
+
+    p.cargo("check --all -v")
+        .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]")
+        .with_stderr_contains("[..] --crate-name foo src/main.rs [..]")
+        .with_stderr_contains("[..] --crate-name b b/src/lib.rs [..]")
+        .with_stderr_contains("[..] --crate-name b b/src/main.rs [..]")
+        .run();
+}
+
+#[test]
+fn check_virtual_all_implied() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    p.cargo("check -v")
+        .with_stderr_contains("[..] --crate-name bar bar/src/lib.rs [..]")
+        .with_stderr_contains("[..] --crate-name baz baz/src/lib.rs [..]")
+        .run();
+}
+
+#[test]
+fn targets_selected_default() {
+    let foo = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/lib.rs", "pub fn smth() {}")
+        .file("examples/example1.rs", "fn main() {}")
+        .file("tests/test2.rs", "#[test] fn t() {}")
+        .file("benches/bench3.rs", "")
+        .build();
+
+    foo.cargo("check -v")
+        .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]")
+        .with_stderr_contains("[..] --crate-name foo src/main.rs [..]")
+        .with_stderr_does_not_contain("[..] --crate-name example1 examples/example1.rs [..]")
+        .with_stderr_does_not_contain("[..] --crate-name test2 tests/test2.rs [..]")
+        .with_stderr_does_not_contain("[..] --crate-name bench3 benches/bench3.rs [..]")
+        .run();
+}
+
+#[test]
+fn targets_selected_all() {
+    let foo = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/lib.rs", "pub fn smth() {}")
+        .file("examples/example1.rs", "fn main() {}")
+        .file("tests/test2.rs", "#[test] fn t() {}")
+        .file("benches/bench3.rs", "")
+        .build();
+
+    foo.cargo("check --all-targets -v")
+        .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]")
+        .with_stderr_contains("[..] --crate-name foo src/main.rs [..]")
+        .with_stderr_contains("[..] --crate-name example1 examples/example1.rs [..]")
+        .with_stderr_contains("[..] --crate-name test2 tests/test2.rs [..]")
+        .with_stderr_contains("[..] --crate-name bench3 benches/bench3.rs [..]")
+        .run();
+}
+
+#[test]
+fn check_unit_test_profile() {
+    let foo = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            #[cfg(test)]
+            mod tests {
+                #[test]
+                fn it_works() {
+                    badtext
+                }
+            }
+        "#,
+        ).build();
+
+    foo.cargo("check").run();
+    foo.cargo("check --profile test")
+        .with_status(101)
+        .with_stderr_contains("[..]badtext[..]")
+        .run();
+}
+
+// Verify what is checked with various command-line filters.
+#[test]
+fn check_filters() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            fn unused_normal_lib() {}
+            #[cfg(test)]
+            mod tests {
+                fn unused_unit_lib() {}
+            }
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            fn main() {}
+            fn unused_normal_bin() {}
+            #[cfg(test)]
+            mod tests {
+                fn unused_unit_bin() {}
+            }
+        "#,
+        ).file(
+            "tests/t1.rs",
+            r#"
+            fn unused_normal_t1() {}
+            #[cfg(test)]
+            mod tests {
+                fn unused_unit_t1() {}
+            }
+        "#,
+        ).file(
+            "examples/ex1.rs",
+            r#"
+            fn main() {}
+            fn unused_normal_ex1() {}
+            #[cfg(test)]
+            mod tests {
+                fn unused_unit_ex1() {}
+            }
+        "#,
+        ).file(
+            "benches/b1.rs",
+            r#"
+            fn unused_normal_b1() {}
+            #[cfg(test)]
+            mod tests {
+                fn unused_unit_b1() {}
+            }
+        "#,
+        ).build();
+
+    p.cargo("check")
+        .with_stderr_contains("[..]unused_normal_lib[..]")
+        .with_stderr_contains("[..]unused_normal_bin[..]")
+        .with_stderr_does_not_contain("[..]unused_normal_t1[..]")
+        .with_stderr_does_not_contain("[..]unused_normal_ex1[..]")
+        .with_stderr_does_not_contain("[..]unused_normal_b1[..]")
+        .with_stderr_does_not_contain("[..]unused_unit_[..]")
+        .run();
+    p.root().join("target").rm_rf();
+    p.cargo("check --tests -v")
+        .with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --test [..]")
+        .with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --crate-type lib [..]")
+        .with_stderr_contains("[..] --crate-name foo src/main.rs [..] --test [..]")
+        .with_stderr_contains("[..]unused_unit_lib[..]")
+        .with_stderr_contains("[..]unused_unit_bin[..]")
+        .with_stderr_contains("[..]unused_normal_lib[..]")
+        .with_stderr_contains("[..]unused_normal_bin[..]")
+        .with_stderr_contains("[..]unused_unit_t1[..]")
+        .with_stderr_does_not_contain("[..]unused_normal_ex1[..]")
+        .with_stderr_does_not_contain("[..]unused_unit_ex1[..]")
+        .with_stderr_does_not_contain("[..]unused_normal_b1[..]")
+        .with_stderr_does_not_contain("[..]unused_unit_b1[..]")
+        .with_stderr_does_not_contain("[..]--crate-type bin[..]")
+        .run();
+    p.root().join("target").rm_rf();
+    p.cargo("check --test t1 -v")
+        .with_stderr_contains("[..]unused_normal_lib[..]")
+        .with_stderr_contains("[..]unused_unit_t1[..]")
+        .with_stderr_does_not_contain("[..]unused_unit_lib[..]")
+        .with_stderr_does_not_contain("[..]unused_normal_bin[..]")
+        .with_stderr_does_not_contain("[..]unused_unit_bin[..]")
+        .with_stderr_does_not_contain("[..]unused_normal_ex1[..]")
+        .with_stderr_does_not_contain("[..]unused_normal_b1[..]")
+        .with_stderr_does_not_contain("[..]unused_unit_ex1[..]")
+        .with_stderr_does_not_contain("[..]unused_unit_b1[..]")
+        .run();
+    p.root().join("target").rm_rf();
+    p.cargo("check --all-targets -v")
+        .with_stderr_contains("[..]unused_normal_lib[..]")
+        .with_stderr_contains("[..]unused_normal_bin[..]")
+        .with_stderr_contains("[..]unused_normal_t1[..]")
+        .with_stderr_contains("[..]unused_normal_ex1[..]")
+        .with_stderr_contains("[..]unused_normal_b1[..]")
+        .with_stderr_contains("[..]unused_unit_b1[..]")
+        .with_stderr_contains("[..]unused_unit_t1[..]")
+        .with_stderr_contains("[..]unused_unit_lib[..]")
+        .with_stderr_contains("[..]unused_unit_bin[..]")
+        .with_stderr_does_not_contain("[..]unused_unit_ex1[..]")
+        .run();
+}
+
+#[test]
+fn check_artifacts() {
+    // Verify which artifacts are created when running check (#4059).
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/main.rs", "fn main() {}")
+        .file("tests/t1.rs", "")
+        .file("examples/ex1.rs", "fn main() {}")
+        .file("benches/b1.rs", "")
+        .build();
+    p.cargo("check").run();
+    assert!(p.root().join("target/debug/libfoo.rmeta").is_file());
+    assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+    assert!(!p.root().join("target/debug").join(exe("foo")).is_file());
+
+    p.root().join("target").rm_rf();
+    p.cargo("check --lib").run();
+    assert!(p.root().join("target/debug/libfoo.rmeta").is_file());
+    assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+    assert!(!p.root().join("target/debug").join(exe("foo")).is_file());
+
+    p.root().join("target").rm_rf();
+    p.cargo("check --bin foo").run();
+    if is_nightly() {
+        // The nightly check can be removed once 1.27 is stable.
+        // Bins now generate `rmeta` files.
+        // See: https://github.com/rust-lang/rust/pull/49289
+        assert!(p.root().join("target/debug/libfoo.rmeta").is_file());
+    }
+    assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+    assert!(!p.root().join("target/debug").join(exe("foo")).is_file());
+
+    p.root().join("target").rm_rf();
+    p.cargo("check --test t1").run();
+    assert!(!p.root().join("target/debug/libfoo.rmeta").is_file());
+    assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+    assert!(!p.root().join("target/debug").join(exe("foo")).is_file());
+    assert_eq!(
+        glob(&p.root().join("target/debug/t1-*").to_str().unwrap())
+            .unwrap()
+            .count(),
+        0
+    );
+
+    p.root().join("target").rm_rf();
+    p.cargo("check --example ex1").run();
+    assert!(!p.root().join("target/debug/libfoo.rmeta").is_file());
+    assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+    assert!(
+        !p.root()
+            .join("target/debug/examples")
+            .join(exe("ex1"))
+            .is_file()
+    );
+
+    p.root().join("target").rm_rf();
+    p.cargo("check --bench b1").run();
+    assert!(!p.root().join("target/debug/libfoo.rmeta").is_file());
+    assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+    assert!(!p.root().join("target/debug").join(exe("foo")).is_file());
+    assert_eq!(
+        glob(&p.root().join("target/debug/b1-*").to_str().unwrap())
+            .unwrap()
+            .count(),
+        0
+    );
+}
+
+#[test]
+fn short_message_format() {
+    let foo = project()
+        .file("src/lib.rs", "fn foo() { let _x: bool = 'a'; }")
+        .build();
+    foo.cargo("check --message-format=short")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+src/lib.rs:1:27: error[E0308]: mismatched types
+error: aborting due to previous error
+error: Could not compile `foo`.
+",
+        ).run();
+}
+
+#[test]
+fn proc_macro() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "demo"
+                version = "0.0.1"
+
+                [lib]
+                proc-macro = true
+            "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+                extern crate proc_macro;
+
+                use proc_macro::TokenStream;
+
+                #[proc_macro_derive(Foo)]
+                pub fn demo(_input: TokenStream) -> TokenStream {
+                    "".parse().unwrap()
+                }
+            "#,
+        ).file(
+            "src/main.rs",
+            r#"
+                #[macro_use]
+                extern crate demo;
+
+                #[derive(Foo)]
+                struct A;
+
+                fn main() {}
+            "#,
+        ).build();
+    p.cargo("check -v").env("RUST_LOG", "cargo=trace").run();
+}
+
+#[test]
+fn does_not_use_empty_rustc_wrapper() {
+    let p = project().file("src/lib.rs", "").build();
+    p.cargo("check").env("RUSTC_WRAPPER", "").run();
+}
+
+#[test]
+fn error_from_deep_recursion() -> Result<(), fmt::Error> {
+    let mut big_macro = String::new();
+    writeln!(big_macro, "macro_rules! m {{")?;
+    for i in 0..130 {
+        writeln!(big_macro, "({}) => {{ m!({}); }};", i, i + 1)?;
+    }
+    writeln!(big_macro, "}}")?;
+    writeln!(big_macro, "m!(0);")?;
+
+    let p = project().file("src/lib.rs", &big_macro).build();
+    p.cargo("check --message-format=json")
+        .with_status(101)
+        .with_stdout_contains(
+            "[..]\"message\":\"recursion limit reached while expanding the macro `m`\"[..]",
+        )
+        .run();
+
+    Ok(())
+}
diff --git a/tests/testsuite/clean.rs b/tests/testsuite/clean.rs
new file mode 100644 (file)
index 0000000..ea62d06
--- /dev/null
@@ -0,0 +1,280 @@
+use std::env;
+
+use support::registry::Package;
+use support::{basic_bin_manifest, basic_manifest, git, main_file, project};
+
+#[test]
+fn cargo_clean_simple() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("build").run();
+    assert!(p.build_dir().is_dir());
+
+    p.cargo("clean").run();
+    assert!(!p.build_dir().is_dir());
+}
+
+#[test]
+fn different_dir() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .file("src/bar/a.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    assert!(p.build_dir().is_dir());
+
+    p.cargo("clean")
+        .cwd(&p.root().join("src"))
+        .with_stdout("")
+        .run();
+    assert!(!p.build_dir().is_dir());
+}
+
+#[test]
+fn clean_multiple_packages() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.d1]
+                path = "d1"
+            [dependencies.d2]
+                path = "d2"
+
+            [[bin]]
+                name = "foo"
+        "#,
+        ).file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .file("d1/Cargo.toml", &basic_bin_manifest("d1"))
+        .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
+        .file("d2/Cargo.toml", &basic_bin_manifest("d2"))
+        .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }")
+        .build();
+
+    p.cargo("build -p d1 -p d2 -p foo").run();
+
+    let d1_path = &p
+        .build_dir()
+        .join("debug")
+        .join(format!("d1{}", env::consts::EXE_SUFFIX));
+    let d2_path = &p
+        .build_dir()
+        .join("debug")
+        .join(format!("d2{}", env::consts::EXE_SUFFIX));
+
+    assert!(p.bin("foo").is_file());
+    assert!(d1_path.is_file());
+    assert!(d2_path.is_file());
+
+    p.cargo("clean -p d1 -p d2")
+        .cwd(&p.root().join("src"))
+        .with_stdout("")
+        .run();
+    assert!(p.bin("foo").is_file());
+    assert!(!d1_path.is_file());
+    assert!(!d2_path.is_file());
+}
+
+#[test]
+fn clean_release() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build --release").run();
+
+    p.cargo("clean -p foo").run();
+    p.cargo("build --release").with_stdout("").run();
+
+    p.cargo("clean -p foo --release").run();
+    p.cargo("build --release")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn clean_doc() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("doc").run();
+
+    let doc_path = &p.build_dir().join("doc");
+
+    assert!(doc_path.is_dir());
+
+    p.cargo("clean --doc").run();
+
+    assert!(!doc_path.is_dir());
+    assert!(p.build_dir().is_dir());
+}
+
+#[test]
+fn build_script() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "build.rs",
+            r#"
+            use std::path::PathBuf;
+            use std::env;
+
+            fn main() {
+                let out = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+                if env::var("FIRST").is_ok() {
+                    std::fs::File::create(out.join("out")).unwrap();
+                } else {
+                    assert!(!std::fs::metadata(out.join("out")).is_ok());
+                }
+            }
+        "#,
+        ).file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").env("FIRST", "1").run();
+    p.cargo("clean -p foo").run();
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] build.rs [..]`
+[RUNNING] `[..]build-script-build`
+[RUNNING] `rustc [..] src/main.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn clean_git() {
+    let git = git::new("dep", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("dep", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            dep = {{ git = '{}' }}
+        "#,
+                git.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("clean -p dep").with_stdout("").run();
+    p.cargo("build").run();
+}
+
+#[test]
+fn registry() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.1.0").publish();
+
+    p.cargo("build").run();
+    p.cargo("clean -p bar").with_stdout("").run();
+    p.cargo("build").run();
+}
+
+#[test]
+fn clean_verbose() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+        [package]
+        name = "foo"
+        version = "0.0.1"
+
+        [dependencies]
+        bar = "0.1"
+    "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.1.0").publish();
+
+    p.cargo("build").run();
+    p.cargo("clean -p bar --verbose")
+        .with_stderr(
+            "\
+[REMOVING] [..]
+[REMOVING] [..]
+",
+        ).run();
+    p.cargo("build").run();
+}
diff --git a/tests/testsuite/concurrent.rs b/tests/testsuite/concurrent.rs
new file mode 100644 (file)
index 0000000..a999467
--- /dev/null
@@ -0,0 +1,517 @@
+use std::fs::{self, File};
+use std::io::Write;
+use std::net::TcpListener;
+use std::process::Stdio;
+use std::sync::mpsc::channel;
+use std::thread;
+use std::time::Duration;
+use std::{env, str};
+
+use git2;
+use support::cargo_process;
+use support::git;
+use support::install::{cargo_home, assert_has_installed_exe};
+use support::registry::Package;
+use support::{basic_manifest, execs, project};
+
+fn pkg(name: &str, vers: &str) {
+    Package::new(name, vers)
+        .file("src/main.rs", "fn main() {{}}")
+        .publish();
+}
+
+#[test]
+fn multiple_installs() {
+    let p = project()
+        .no_manifest()
+        .file("a/Cargo.toml", &basic_manifest("foo", "0.0.0"))
+        .file("a/src/main.rs", "fn main() {}")
+        .file("b/Cargo.toml", &basic_manifest("bar", "0.0.0"))
+        .file("b/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    let mut a = p.cargo("install").cwd(p.root().join("a")).build_command();
+    let mut b = p.cargo("install").cwd(p.root().join("b")).build_command();
+
+    a.stdout(Stdio::piped()).stderr(Stdio::piped());
+    b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+    let a = a.spawn().unwrap();
+    let b = b.spawn().unwrap();
+    let a = thread::spawn(move || a.wait_with_output().unwrap());
+    let b = b.wait_with_output().unwrap();
+    let a = a.join().unwrap();
+
+    execs().run_output(&a);
+    execs().run_output(&b);
+
+    assert_has_installed_exe(cargo_home(), "foo");
+    assert_has_installed_exe(cargo_home(), "bar");
+}
+
+#[test]
+fn concurrent_installs() {
+    const LOCKED_BUILD: &str = "waiting for file lock on build directory";
+
+    pkg("foo", "0.0.1");
+    pkg("bar", "0.0.1");
+
+    let mut a = cargo_process("install foo").build_command();
+    let mut b = cargo_process("install bar").build_command();
+
+    a.stdout(Stdio::piped()).stderr(Stdio::piped());
+    b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+    let a = a.spawn().unwrap();
+    let b = b.spawn().unwrap();
+    let a = thread::spawn(move || a.wait_with_output().unwrap());
+    let b = b.wait_with_output().unwrap();
+    let a = a.join().unwrap();
+
+    assert!(!str::from_utf8(&a.stderr).unwrap().contains(LOCKED_BUILD));
+    assert!(!str::from_utf8(&b.stderr).unwrap().contains(LOCKED_BUILD));
+
+    execs().run_output(&a);
+    execs().run_output(&b);
+
+    assert_has_installed_exe(cargo_home(), "foo");
+    assert_has_installed_exe(cargo_home(), "bar");
+}
+
+#[test]
+fn one_install_should_be_bad() {
+    let p = project()
+        .no_manifest()
+        .file("a/Cargo.toml", &basic_manifest("foo", "0.0.0"))
+        .file("a/src/main.rs", "fn main() {}")
+        .file("b/Cargo.toml", &basic_manifest("foo", "0.0.0"))
+        .file("b/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    let mut a = p.cargo("install").cwd(p.root().join("a")).build_command();
+    let mut b = p.cargo("install").cwd(p.root().join("b")).build_command();
+
+    a.stdout(Stdio::piped()).stderr(Stdio::piped());
+    b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+    let a = a.spawn().unwrap();
+    let b = b.spawn().unwrap();
+    let a = thread::spawn(move || a.wait_with_output().unwrap());
+    let b = b.wait_with_output().unwrap();
+    let a = a.join().unwrap();
+
+    let (bad, good) = if a.status.code() == Some(101) {
+        (a, b)
+    } else {
+        (b, a)
+    };
+    execs()
+        .with_status(101)
+        .with_stderr_contains(
+            "[ERROR] binary `foo[..]` already exists in destination as part of `[..]`",
+        ).run_output(&bad);
+    execs()
+        .with_stderr_contains("warning: be sure to add `[..]` to your PATH [..]")
+        .run_output(&good);
+
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn multiple_registry_fetches() {
+    let mut pkg = Package::new("bar", "1.0.2");
+    for i in 0..10 {
+        let name = format!("foo{}", i);
+        Package::new(&name, "1.0.0").publish();
+        pkg.dep(&name, "*");
+    }
+    pkg.publish();
+
+    let p = project()
+        .no_manifest()
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("a/src/main.rs", "fn main() {}")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            authors = []
+            version = "0.0.0"
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("b/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    let mut a = p.cargo("build").cwd(p.root().join("a")).build_command();
+    let mut b = p.cargo("build").cwd(p.root().join("b")).build_command();
+
+    a.stdout(Stdio::piped()).stderr(Stdio::piped());
+    b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+    let a = a.spawn().unwrap();
+    let b = b.spawn().unwrap();
+    let a = thread::spawn(move || a.wait_with_output().unwrap());
+    let b = b.wait_with_output().unwrap();
+    let a = a.join().unwrap();
+
+    execs().run_output(&a);
+    execs().run_output(&b);
+
+    let suffix = env::consts::EXE_SUFFIX;
+    assert!(
+        p.root()
+            .join("a/target/debug")
+            .join(format!("foo{}", suffix))
+            .is_file()
+    );
+    assert!(
+        p.root()
+            .join("b/target/debug")
+            .join(format!("bar{}", suffix))
+            .is_file()
+    );
+}
+
+#[test]
+fn git_same_repo_different_tags() {
+    let a = git::new("dep", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("dep", "0.5.0"))
+            .file("src/lib.rs", "pub fn tag1() {}")
+    }).unwrap();
+
+    let repo = git2::Repository::open(&a.root()).unwrap();
+    git::tag(&repo, "tag1");
+
+    File::create(a.root().join("src/lib.rs"))
+        .unwrap()
+        .write_all(b"pub fn tag2() {}")
+        .unwrap();
+    git::add(&repo);
+    git::commit(&repo);
+    git::tag(&repo, "tag2");
+
+    let p = project()
+        .no_manifest()
+        .file(
+            "a/Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+
+            [dependencies]
+            dep = {{ git = '{}', tag = 'tag1' }}
+        "#,
+                a.url()
+            ),
+        ).file(
+            "a/src/main.rs",
+            "extern crate dep; fn main() { dep::tag1(); }",
+        ).file(
+            "b/Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "bar"
+            authors = []
+            version = "0.0.0"
+
+            [dependencies]
+            dep = {{ git = '{}', tag = 'tag2' }}
+        "#,
+                a.url()
+            ),
+        ).file(
+            "b/src/main.rs",
+            "extern crate dep; fn main() { dep::tag2(); }",
+        );
+    let p = p.build();
+
+    let mut a = p.cargo("build -v").cwd(p.root().join("a")).build_command();
+    let mut b = p.cargo("build -v").cwd(p.root().join("b")).build_command();
+
+    a.stdout(Stdio::piped()).stderr(Stdio::piped());
+    b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+    let a = a.spawn().unwrap();
+    let b = b.spawn().unwrap();
+    let a = thread::spawn(move || a.wait_with_output().unwrap());
+    let b = b.wait_with_output().unwrap();
+    let a = a.join().unwrap();
+
+    execs().run_output(&a);
+    execs().run_output(&b);
+}
+
+#[test]
+fn git_same_branch_different_revs() {
+    let a = git::new("dep", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("dep", "0.5.0"))
+            .file("src/lib.rs", "pub fn f1() {}")
+    }).unwrap();
+
+    let p = project()
+        .no_manifest()
+        .file(
+            "a/Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+
+            [dependencies]
+            dep = {{ git = '{}' }}
+        "#,
+                a.url()
+            ),
+        ).file(
+            "a/src/main.rs",
+            "extern crate dep; fn main() { dep::f1(); }",
+        ).file(
+            "b/Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "bar"
+            authors = []
+            version = "0.0.0"
+
+            [dependencies]
+            dep = {{ git = '{}' }}
+        "#,
+                a.url()
+            ),
+        ).file(
+            "b/src/main.rs",
+            "extern crate dep; fn main() { dep::f2(); }",
+        );
+    let p = p.build();
+
+    // Generate a Cargo.lock pointing at the current rev, then clear out the
+    // target directory
+    p.cargo("build").cwd(p.root().join("a")).run();
+    fs::remove_dir_all(p.root().join("a/target")).unwrap();
+
+    // Make a new commit on the master branch
+    let repo = git2::Repository::open(&a.root()).unwrap();
+    File::create(a.root().join("src/lib.rs"))
+        .unwrap()
+        .write_all(b"pub fn f2() {}")
+        .unwrap();
+    git::add(&repo);
+    git::commit(&repo);
+
+    // Now run both builds in parallel. The build of `b` should pick up the
+    // newest commit while the build of `a` should use the locked old commit.
+    let mut a = p.cargo("build").cwd(p.root().join("a")).build_command();
+    let mut b = p.cargo("build").cwd(p.root().join("b")).build_command();
+
+    a.stdout(Stdio::piped()).stderr(Stdio::piped());
+    b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+    let a = a.spawn().unwrap();
+    let b = b.spawn().unwrap();
+    let a = thread::spawn(move || a.wait_with_output().unwrap());
+    let b = b.wait_with_output().unwrap();
+    let a = a.join().unwrap();
+
+    execs().run_output(&a);
+    execs().run_output(&b);
+}
+
+#[test]
+fn same_project() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/lib.rs", "");
+    let p = p.build();
+
+    let mut a = p.cargo("build").build_command();
+    let mut b = p.cargo("build").build_command();
+
+    a.stdout(Stdio::piped()).stderr(Stdio::piped());
+    b.stdout(Stdio::piped()).stderr(Stdio::piped());
+
+    let a = a.spawn().unwrap();
+    let b = b.spawn().unwrap();
+    let a = thread::spawn(move || a.wait_with_output().unwrap());
+    let b = b.wait_with_output().unwrap();
+    let a = a.join().unwrap();
+
+    execs().run_output(&a);
+    execs().run_output(&b);
+}
+
+// Make sure that if Cargo dies while holding a lock that it's released and the
+// next Cargo to come in will take over cleanly.
+// older win versions don't support job objects, so skip test there
+#[test]
+#[cfg_attr(target_os = "windows", ignore)]
+fn killing_cargo_releases_the_lock() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+            build = "build.rs"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "build.rs",
+            r#"
+            use std::net::TcpStream;
+
+            fn main() {
+                if std::env::var("A").is_ok() {
+                    TcpStream::connect(&std::env::var("ADDR").unwrap()[..])
+                              .unwrap();
+                    std::thread::sleep(std::time::Duration::new(10, 0));
+                }
+            }
+        "#,
+        );
+    let p = p.build();
+
+    // Our build script will connect to our local TCP socket to inform us that
+    // it's started  and that's how we know that `a` will have the lock
+    // when we kill it.
+    let l = TcpListener::bind("127.0.0.1:0").unwrap();
+    let mut a = p.cargo("build").build_command();
+    let mut b = p.cargo("build").build_command();
+    a.stdout(Stdio::piped()).stderr(Stdio::piped());
+    b.stdout(Stdio::piped()).stderr(Stdio::piped());
+    a.env("ADDR", l.local_addr().unwrap().to_string())
+        .env("A", "a");
+    b.env("ADDR", l.local_addr().unwrap().to_string())
+        .env_remove("A");
+
+    // Spawn `a`, wait for it to get to the build script (at which point the
+    // lock is held), then kill it.
+    let mut a = a.spawn().unwrap();
+    l.accept().unwrap();
+    a.kill().unwrap();
+
+    // Spawn `b`, then just finish the output of a/b the same way the above
+    // tests does.
+    let b = b.spawn().unwrap();
+    let a = thread::spawn(move || a.wait_with_output().unwrap());
+    let b = b.wait_with_output().unwrap();
+    let a = a.join().unwrap();
+
+    // We killed `a`, so it shouldn't succeed, but `b` should have succeeded.
+    assert!(!a.status.success());
+    execs().run_output(&b);
+}
+
+#[test]
+fn debug_release_ok() {
+    let p = project().file("src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build").run();
+    fs::remove_dir_all(p.root().join("target")).unwrap();
+
+    let mut a = p.cargo("build").build_command();
+    let mut b = p.cargo("build --release").build_command();
+    a.stdout(Stdio::piped()).stderr(Stdio::piped());
+    b.stdout(Stdio::piped()).stderr(Stdio::piped());
+    let a = a.spawn().unwrap();
+    let b = b.spawn().unwrap();
+    let a = thread::spawn(move || a.wait_with_output().unwrap());
+    let b = b.wait_with_output().unwrap();
+    let a = a.join().unwrap();
+
+    execs()
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run_output(&a);
+    execs()
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] release [optimized] target(s) in [..]
+",
+        ).run_output(&b);
+}
+
+#[test]
+fn no_deadlock_with_git_dependencies() {
+    let dep1 = git::new("dep1", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+
+    let dep2 = git::new("dep2", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("dep2", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+
+            [dependencies]
+            dep1 = {{ git = '{}' }}
+            dep2 = {{ git = '{}' }}
+        "#,
+                dep1.url(),
+                dep2.url()
+            ),
+        ).file("src/main.rs", "fn main() { }");
+    let p = p.build();
+
+    let n_concurrent_builds = 5;
+
+    let (tx, rx) = channel();
+    for _ in 0..n_concurrent_builds {
+        let cmd = p
+            .cargo("build")
+            .build_command()
+            .stdout(Stdio::piped())
+            .stderr(Stdio::piped())
+            .spawn();
+        let tx = tx.clone();
+        thread::spawn(move || {
+            let result = cmd.unwrap().wait_with_output().unwrap();
+            tx.send(result).unwrap()
+        });
+    }
+
+    for _ in 0..n_concurrent_builds {
+        let result = rx.recv_timeout(Duration::from_secs(30)).expect("Deadlock!");
+        execs().run_output(&result);
+    }
+}
diff --git a/tests/testsuite/config.rs b/tests/testsuite/config.rs
new file mode 100644 (file)
index 0000000..681cc46
--- /dev/null
@@ -0,0 +1,690 @@
+use cargo::core::{enable_nightly_features, Shell};
+use cargo::util::config::{self, Config};
+use cargo::util::toml::{self, VecStringOrBool as VSOB};
+use cargo::CargoError;
+use std::borrow::Borrow;
+use std::collections;
+use std::fs;
+use support::{lines_match, paths, project};
+
+#[test]
+fn read_env_vars_for_config() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            use std::env;
+            fn main() {
+                assert_eq!(env::var("NUM_JOBS").unwrap(), "100");
+            }
+        "#,
+        ).build();
+
+    p.cargo("build").env("CARGO_BUILD_JOBS", "100").run();
+}
+
+fn write_config(config: &str) {
+    let path = paths::root().join(".cargo/config");
+    fs::create_dir_all(path.parent().unwrap()).unwrap();
+    fs::write(path, config).unwrap();
+}
+
+fn new_config(env: &[(&str, &str)]) -> Config {
+    enable_nightly_features(); // -Z advanced-env
+    let output = Box::new(fs::File::create(paths::root().join("shell.out")).unwrap());
+    let shell = Shell::from_write(output);
+    let cwd = paths::root();
+    let homedir = paths::home();
+    let env = env
+        .iter()
+        .map(|(k, v)| (k.to_string(), v.to_string()))
+        .collect();
+    let mut config = Config::new(shell, cwd, homedir);
+    config.set_env(env);
+    config
+        .configure(
+            0,
+            None,
+            &None,
+            false,
+            false,
+            &None,
+            &["advanced-env".into()],
+        ).unwrap();
+    config
+}
+
+fn assert_error<E: Borrow<CargoError>>(error: E, msgs: &str) {
+    let causes = error
+        .borrow()
+        .iter_chain()
+        .map(|e| e.to_string())
+        .collect::<Vec<_>>()
+        .join("\n");
+    if !lines_match(msgs, &causes) {
+        panic!(
+            "Did not find expected:\n{}\nActual error:\n{}\n",
+            msgs, causes
+        );
+    }
+}
+
+#[test]
+fn get_config() {
+    write_config(
+        "\
+[S]
+f1 = 123
+",
+    );
+
+    let config = new_config(&[]);
+
+    #[derive(Debug, Deserialize, Eq, PartialEq)]
+    struct S {
+        f1: Option<i64>,
+    }
+    let s: S = config.get("S").unwrap();
+    assert_eq!(s, S { f1: Some(123) });
+    let config = new_config(&[("CARGO_S_F1", "456")]);
+    let s: S = config.get("S").unwrap();
+    assert_eq!(s, S { f1: Some(456) });
+}
+
+#[test]
+fn config_unused_fields() {
+    write_config(
+        "\
+[S]
+unused = 456
+",
+    );
+
+    let config = new_config(&[("CARGO_S_UNUSED2", "1"), ("CARGO_S2_UNUSED", "2")]);
+
+    #[derive(Debug, Deserialize, Eq, PartialEq)]
+    struct S {
+        f1: Option<i64>,
+    }
+    // This prints a warning (verified below).
+    let s: S = config.get("S").unwrap();
+    assert_eq!(s, S { f1: None });
+    // This does not print anything, we cannot easily/reliably warn for
+    // environment variables.
+    let s: S = config.get("S2").unwrap();
+    assert_eq!(s, S { f1: None });
+
+    // Verify the warnings.
+    drop(config); // Paranoid about flushing the file.
+    let path = paths::root().join("shell.out");
+    let output = fs::read_to_string(path).unwrap();
+    let expected = "\
+warning: unused key `S.unused` in config file `[..]/.cargo/config`
+";
+    if !lines_match(expected, &output) {
+        panic!(
+            "Did not find expected:\n{}\nActual error:\n{}\n",
+            expected, output
+        );
+    }
+}
+
+#[test]
+fn config_load_toml_profile() {
+    write_config(
+        "\
+[profile.dev]
+opt-level = 's'
+lto = true
+codegen-units=4
+debug = true
+debug-assertions = true
+rpath = true
+panic = 'abort'
+overflow-checks = true
+incremental = true
+
+[profile.dev.build-override]
+opt-level = 1
+
+[profile.dev.overrides.bar]
+codegen-units = 9
+",
+    );
+
+    let config = new_config(&[
+        ("CARGO_PROFILE_DEV_CODEGEN_UNITS", "5"),
+        ("CARGO_PROFILE_DEV_BUILD_OVERRIDE_CODEGEN_UNITS", "11"),
+        ("CARGO_PROFILE_DEV_OVERRIDES_env_CODEGEN_UNITS", "13"),
+        ("CARGO_PROFILE_DEV_OVERRIDES_bar_OPT_LEVEL", "2"),
+    ]);
+
+    // TODO: don't use actual tomlprofile
+    let p: toml::TomlProfile = config.get("profile.dev").unwrap();
+    let mut overrides = collections::BTreeMap::new();
+    let key = toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("bar").unwrap());
+    let o_profile = toml::TomlProfile {
+        opt_level: Some(toml::TomlOptLevel("2".to_string())),
+        lto: None,
+        codegen_units: Some(9),
+        debug: None,
+        debug_assertions: None,
+        rpath: None,
+        panic: None,
+        overflow_checks: None,
+        incremental: None,
+        overrides: None,
+        build_override: None,
+    };
+    overrides.insert(key, o_profile);
+    let key = toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("env").unwrap());
+    let o_profile = toml::TomlProfile {
+        opt_level: None,
+        lto: None,
+        codegen_units: Some(13),
+        debug: None,
+        debug_assertions: None,
+        rpath: None,
+        panic: None,
+        overflow_checks: None,
+        incremental: None,
+        overrides: None,
+        build_override: None,
+    };
+    overrides.insert(key, o_profile);
+
+    assert_eq!(
+        p,
+        toml::TomlProfile {
+            opt_level: Some(toml::TomlOptLevel("s".to_string())),
+            lto: Some(toml::StringOrBool::Bool(true)),
+            codegen_units: Some(5),
+            debug: Some(toml::U32OrBool::Bool(true)),
+            debug_assertions: Some(true),
+            rpath: Some(true),
+            panic: Some("abort".to_string()),
+            overflow_checks: Some(true),
+            incremental: Some(true),
+            overrides: Some(overrides),
+            build_override: Some(Box::new(toml::TomlProfile {
+                opt_level: Some(toml::TomlOptLevel("1".to_string())),
+                lto: None,
+                codegen_units: Some(11),
+                debug: None,
+                debug_assertions: None,
+                rpath: None,
+                panic: None,
+                overflow_checks: None,
+                incremental: None,
+                overrides: None,
+                build_override: None
+            }))
+        }
+    );
+}
+
+#[test]
+fn config_deserialize_any() {
+    // Some tests to exercise deserialize_any for deserializers that need to
+    // be told the format.
+    write_config(
+        "\
+a = true
+b = ['b']
+c = ['c']
+",
+    );
+
+    let config = new_config(&[
+        ("CARGO_ENVB", "false"),
+        ("CARGO_C", "['d']"),
+        ("CARGO_ENVL", "['a', 'b']"),
+    ]);
+
+    let a = config.get::<VSOB>("a").unwrap();
+    match a {
+        VSOB::VecString(_) => panic!("expected bool"),
+        VSOB::Bool(b) => assert_eq!(b, true),
+    }
+    let b = config.get::<VSOB>("b").unwrap();
+    match b {
+        VSOB::VecString(l) => assert_eq!(l, vec!["b".to_string()]),
+        VSOB::Bool(_) => panic!("expected list"),
+    }
+    let c = config.get::<VSOB>("c").unwrap();
+    match c {
+        VSOB::VecString(l) => assert_eq!(l, vec!["c".to_string(), "d".to_string()]),
+        VSOB::Bool(_) => panic!("expected list"),
+    }
+    let envb = config.get::<VSOB>("envb").unwrap();
+    match envb {
+        VSOB::VecString(_) => panic!("expected bool"),
+        VSOB::Bool(b) => assert_eq!(b, false),
+    }
+    let envl = config.get::<VSOB>("envl").unwrap();
+    match envl {
+        VSOB::VecString(l) => assert_eq!(l, vec!["a".to_string(), "b".to_string()]),
+        VSOB::Bool(_) => panic!("expected list"),
+    }
+}
+
+#[test]
+fn config_toml_errors() {
+    write_config(
+        "\
+[profile.dev]
+opt-level = 'foo'
+",
+    );
+
+    let config = new_config(&[]);
+
+    assert_error(
+        config.get::<toml::TomlProfile>("profile.dev").unwrap_err(),
+        "error in [..]/.cargo/config: \
+         could not load config key `profile.dev.opt-level`: \
+         must be an integer, `z`, or `s`, but found: foo",
+    );
+
+    let config = new_config(&[("CARGO_PROFILE_DEV_OPT_LEVEL", "asdf")]);
+
+    assert_error(
+        config.get::<toml::TomlProfile>("profile.dev").unwrap_err(),
+        "error in environment variable `CARGO_PROFILE_DEV_OPT_LEVEL`: \
+         could not load config key `profile.dev.opt-level`: \
+         must be an integer, `z`, or `s`, but found: asdf",
+    );
+}
+
+#[test]
+fn load_nested() {
+    write_config(
+        "\
+[nest.foo]
+f1 = 1
+f2 = 2
+[nest.bar]
+asdf = 3
+",
+    );
+
+    let config = new_config(&[
+        ("CARGO_NEST_foo_f2", "3"),
+        ("CARGO_NESTE_foo_f1", "1"),
+        ("CARGO_NESTE_foo_f2", "3"),
+        ("CARGO_NESTE_bar_asdf", "3"),
+    ]);
+
+    type Nested = collections::HashMap<String, collections::HashMap<String, u8>>;
+
+    let n: Nested = config.get("nest").unwrap();
+    let mut expected = collections::HashMap::new();
+    let mut foo = collections::HashMap::new();
+    foo.insert("f1".to_string(), 1);
+    foo.insert("f2".to_string(), 3);
+    expected.insert("foo".to_string(), foo);
+    let mut bar = collections::HashMap::new();
+    bar.insert("asdf".to_string(), 3);
+    expected.insert("bar".to_string(), bar);
+    assert_eq!(n, expected);
+
+    let n: Nested = config.get("neste").unwrap();
+    assert_eq!(n, expected);
+}
+
+#[test]
+fn get_errors() {
+    write_config(
+        "\
+[S]
+f1 = 123
+f2 = 'asdf'
+big = 123456789
+",
+    );
+
+    let config = new_config(&[("CARGO_E_S", "asdf"), ("CARGO_E_BIG", "123456789")]);
+    assert_error(
+        config.get::<i64>("foo").unwrap_err(),
+        "missing config key `foo`",
+    );
+    assert_error(
+        config.get::<i64>("foo.bar").unwrap_err(),
+        "missing config key `foo.bar`",
+    );
+    assert_error(
+        config.get::<i64>("S.f2").unwrap_err(),
+        "error in [..]/.cargo/config: `S.f2` expected an integer, but found a string",
+    );
+    assert_error(
+        config.get::<u8>("S.big").unwrap_err(),
+        "error in [..].cargo/config: could not load config key `S.big`: \
+         invalid value: integer `123456789`, expected u8",
+    );
+
+    // Environment variable type errors.
+    assert_error(
+        config.get::<i64>("e.s").unwrap_err(),
+        "error in environment variable `CARGO_E_S`: invalid digit found in string",
+    );
+    assert_error(
+        config.get::<i8>("e.big").unwrap_err(),
+        "error in environment variable `CARGO_E_BIG`: \
+         could not load config key `e.big`: \
+         invalid value: integer `123456789`, expected i8",
+    );
+
+    #[derive(Debug, Deserialize)]
+    struct S {
+        f1: i64,
+        f2: String,
+        f3: i64,
+        big: i64,
+    }
+    assert_error(
+        config.get::<S>("S").unwrap_err(),
+        "missing config key `S.f3`",
+    );
+}
+
+#[test]
+fn config_get_option() {
+    write_config(
+        "\
+[foo]
+f1 = 1
+",
+    );
+
+    let config = new_config(&[("CARGO_BAR_ASDF", "3")]);
+
+    assert_eq!(config.get::<Option<i32>>("a").unwrap(), None);
+    assert_eq!(config.get::<Option<i32>>("a.b").unwrap(), None);
+    assert_eq!(config.get::<Option<i32>>("foo.f1").unwrap(), Some(1));
+    assert_eq!(config.get::<Option<i32>>("bar.asdf").unwrap(), Some(3));
+    assert_eq!(config.get::<Option<i32>>("bar.zzzz").unwrap(), None);
+}
+
+#[test]
+fn config_bad_toml() {
+    write_config("asdf");
+    let config = new_config(&[]);
+    assert_error(
+        config.get::<i32>("foo").unwrap_err(),
+        "\
+could not load Cargo configuration
+Caused by:
+  could not parse TOML configuration in `[..]/.cargo/config`
+Caused by:
+  could not parse input as TOML
+Caused by:
+  expected an equals, found eof at line 1",
+    );
+}
+
+#[test]
+fn config_get_list() {
+    write_config(
+        "\
+l1 = []
+l2 = ['one', 'two']
+l3 = 123
+l4 = ['one', 'two']
+
+[nested]
+l = ['x']
+
+[nested2]
+l = ['y']
+
+[nested-empty]
+",
+    );
+
+    type L = Vec<String>;
+
+    let config = new_config(&[
+        ("CARGO_L4", "['three', 'four']"),
+        ("CARGO_L5", "['a']"),
+        ("CARGO_ENV_EMPTY", "[]"),
+        ("CARGO_ENV_BLANK", ""),
+        ("CARGO_ENV_NUM", "1"),
+        ("CARGO_ENV_NUM_LIST", "[1]"),
+        ("CARGO_ENV_TEXT", "asdf"),
+        ("CARGO_LEPAIR", "['a', 'b']"),
+        ("CARGO_NESTED2_L", "['z']"),
+        ("CARGO_NESTEDE_L", "['env']"),
+        ("CARGO_BAD_ENV", "[zzz]"),
+    ]);
+
+    assert_eq!(config.get::<L>("unset").unwrap(), vec![] as Vec<String>);
+    assert_eq!(config.get::<L>("l1").unwrap(), vec![] as Vec<String>);
+    assert_eq!(config.get::<L>("l2").unwrap(), vec!["one", "two"]);
+    assert_error(
+        config.get::<L>("l3").unwrap_err(),
+        "\
+invalid configuration for key `l3`
+expected a list, but found a integer for `l3` in [..]/.cargo/config",
+    );
+    assert_eq!(
+        config.get::<L>("l4").unwrap(),
+        vec!["one", "two", "three", "four"]
+    );
+    assert_eq!(config.get::<L>("l5").unwrap(), vec!["a"]);
+    assert_eq!(config.get::<L>("env-empty").unwrap(), vec![] as Vec<String>);
+    assert_error(
+        config.get::<L>("env-blank").unwrap_err(),
+        "error in environment variable `CARGO_ENV_BLANK`: \
+         should have TOML list syntax, found ``",
+    );
+    assert_error(
+        config.get::<L>("env-num").unwrap_err(),
+        "error in environment variable `CARGO_ENV_NUM`: \
+         should have TOML list syntax, found `1`",
+    );
+    assert_error(
+        config.get::<L>("env-num-list").unwrap_err(),
+        "error in environment variable `CARGO_ENV_NUM_LIST`: \
+         expected string, found integer",
+    );
+    assert_error(
+        config.get::<L>("env-text").unwrap_err(),
+        "error in environment variable `CARGO_ENV_TEXT`: \
+         should have TOML list syntax, found `asdf`",
+    );
+    // "invalid number" here isn't the best error, but I think it's just toml.rs.
+    assert_error(
+        config.get::<L>("bad-env").unwrap_err(),
+        "error in environment variable `CARGO_BAD_ENV`: \
+         could not parse TOML list: invalid number at line 1",
+    );
+
+    // Try some other sequence-like types.
+    assert_eq!(
+        config
+            .get::<(String, String, String, String)>("l4")
+            .unwrap(),
+        (
+            "one".to_string(),
+            "two".to_string(),
+            "three".to_string(),
+            "four".to_string()
+        )
+    );
+    assert_eq!(config.get::<(String,)>("l5").unwrap(), ("a".to_string(),));
+
+    // Tuple struct
+    #[derive(Debug, Deserialize, Eq, PartialEq)]
+    struct TupS(String, String);
+    assert_eq!(
+        config.get::<TupS>("lepair").unwrap(),
+        TupS("a".to_string(), "b".to_string())
+    );
+
+    // Nested with an option.
+    #[derive(Debug, Deserialize, Eq, PartialEq)]
+    struct S {
+        l: Option<Vec<String>>,
+    }
+    assert_eq!(config.get::<S>("nested-empty").unwrap(), S { l: None });
+    assert_eq!(
+        config.get::<S>("nested").unwrap(),
+        S {
+            l: Some(vec!["x".to_string()]),
+        }
+    );
+    assert_eq!(
+        config.get::<S>("nested2").unwrap(),
+        S {
+            l: Some(vec!["y".to_string(), "z".to_string()]),
+        }
+    );
+    assert_eq!(
+        config.get::<S>("nestede").unwrap(),
+        S {
+            l: Some(vec!["env".to_string()]),
+        }
+    );
+}
+
+#[test]
+fn config_get_other_types() {
+    write_config(
+        "\
+ns = 123
+ns2 = 456
+",
+    );
+
+    let config = new_config(&[("CARGO_NSE", "987"), ("CARGO_NS2", "654")]);
+
+    #[derive(Debug, Deserialize, Eq, PartialEq)]
+    struct NewS(i32);
+    assert_eq!(config.get::<NewS>("ns").unwrap(), NewS(123));
+    assert_eq!(config.get::<NewS>("ns2").unwrap(), NewS(654));
+    assert_eq!(config.get::<NewS>("nse").unwrap(), NewS(987));
+    assert_error(
+        config.get::<NewS>("unset").unwrap_err(),
+        "missing config key `unset`",
+    );
+}
+
+#[test]
+fn config_relative_path() {
+    write_config(&format!(
+        "\
+p1 = 'foo/bar'
+p2 = '../abc'
+p3 = 'b/c'
+abs = '{}'
+",
+        paths::home().display(),
+    ));
+
+    let config = new_config(&[("CARGO_EPATH", "a/b"), ("CARGO_P3", "d/e")]);
+
+    assert_eq!(
+        config
+            .get::<config::ConfigRelativePath>("p1")
+            .unwrap()
+            .path(),
+        paths::root().join("foo/bar")
+    );
+    assert_eq!(
+        config
+            .get::<config::ConfigRelativePath>("p2")
+            .unwrap()
+            .path(),
+        paths::root().join("../abc")
+    );
+    assert_eq!(
+        config
+            .get::<config::ConfigRelativePath>("p3")
+            .unwrap()
+            .path(),
+        paths::root().join("d/e")
+    );
+    assert_eq!(
+        config
+            .get::<config::ConfigRelativePath>("abs")
+            .unwrap()
+            .path(),
+        paths::home()
+    );
+    assert_eq!(
+        config
+            .get::<config::ConfigRelativePath>("epath")
+            .unwrap()
+            .path(),
+        paths::root().join("a/b")
+    );
+}
+
+#[test]
+fn config_get_integers() {
+    write_config(
+        "\
+npos = 123456789
+nneg = -123456789
+i64max = 9223372036854775807
+",
+    );
+
+    let config = new_config(&[
+        ("CARGO_EPOS", "123456789"),
+        ("CARGO_ENEG", "-1"),
+        ("CARGO_EI64MAX", "9223372036854775807"),
+    ]);
+
+    assert_eq!(
+        config.get::<u64>("i64max").unwrap(),
+        9_223_372_036_854_775_807
+    );
+    assert_eq!(
+        config.get::<i64>("i64max").unwrap(),
+        9_223_372_036_854_775_807
+    );
+    assert_eq!(
+        config.get::<u64>("ei64max").unwrap(),
+        9_223_372_036_854_775_807
+    );
+    assert_eq!(
+        config.get::<i64>("ei64max").unwrap(),
+        9_223_372_036_854_775_807
+    );
+
+    assert_error(
+        config.get::<u32>("nneg").unwrap_err(),
+        "error in [..].cargo/config: \
+         could not load config key `nneg`: \
+         invalid value: integer `-123456789`, expected u32",
+    );
+    assert_error(
+        config.get::<u32>("eneg").unwrap_err(),
+        "error in environment variable `CARGO_ENEG`: \
+         could not load config key `eneg`: \
+         invalid value: integer `-1`, expected u32",
+    );
+    assert_error(
+        config.get::<i8>("npos").unwrap_err(),
+        "error in [..].cargo/config: \
+         could not load config key `npos`: \
+         invalid value: integer `123456789`, expected i8",
+    );
+    assert_error(
+        config.get::<i8>("epos").unwrap_err(),
+        "error in environment variable `CARGO_EPOS`: \
+         could not load config key `epos`: \
+         invalid value: integer `123456789`, expected i8",
+    );
+}
diff --git a/tests/testsuite/corrupt_git.rs b/tests/testsuite/corrupt_git.rs
new file mode 100644 (file)
index 0000000..3e4e5ee
--- /dev/null
@@ -0,0 +1,156 @@
+use std::fs;
+use std::path::{Path, PathBuf};
+
+use cargo::util::paths as cargopaths;
+use support::paths;
+use support::{basic_manifest, git, project};
+
+#[test]
+fn deleting_database_files() {
+    let project = project();
+    let git_project = git::new("bar", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+
+    let project = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            bar = {{ git = '{}' }}
+        "#,
+                git_project.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    project.cargo("build").run();
+
+    let mut files = Vec::new();
+    find_files(&paths::home().join(".cargo/git/db"), &mut files);
+    assert!(!files.is_empty());
+
+    let log = "cargo::sources::git=trace";
+    for file in files {
+        if !file.exists() {
+            continue;
+        }
+        println!("deleting {}", file.display());
+        cargopaths::remove_file(&file).unwrap();
+        project.cargo("build -v").env("RUST_LOG", log).run();
+
+        if !file.exists() {
+            continue;
+        }
+        println!("truncating {}", file.display());
+        make_writable(&file);
+        fs::OpenOptions::new()
+            .write(true)
+            .open(&file)
+            .unwrap()
+            .set_len(2)
+            .unwrap();
+        project.cargo("build -v").env("RUST_LOG", log).run();
+    }
+}
+
+#[test]
+fn deleting_checkout_files() {
+    let project = project();
+    let git_project = git::new("bar", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+
+    let project = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            bar = {{ git = '{}' }}
+        "#,
+                git_project.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    project.cargo("build").run();
+
+    let dir = paths::home()
+        .join(".cargo/git/checkouts")
+        // get the first entry in the checkouts dir for the package's location
+        .read_dir()
+        .unwrap()
+        .next()
+        .unwrap()
+        .unwrap()
+        .path()
+        // get the first child of that checkout dir for our checkout
+        .read_dir()
+        .unwrap()
+        .next()
+        .unwrap()
+        .unwrap()
+        .path()
+        // and throw on .git to corrupt things
+        .join(".git");
+    let mut files = Vec::new();
+    find_files(&dir, &mut files);
+    assert!(!files.is_empty());
+
+    let log = "cargo::sources::git=trace";
+    for file in files {
+        if !file.exists() {
+            continue;
+        }
+        println!("deleting {}", file.display());
+        cargopaths::remove_file(&file).unwrap();
+        project.cargo("build -v").env("RUST_LOG", log).run();
+
+        if !file.exists() {
+            continue;
+        }
+        println!("truncating {}", file.display());
+        make_writable(&file);
+        fs::OpenOptions::new()
+            .write(true)
+            .open(&file)
+            .unwrap()
+            .set_len(2)
+            .unwrap();
+        project.cargo("build -v").env("RUST_LOG", log).run();
+    }
+}
+
+fn make_writable(path: &Path) {
+    let mut p = path.metadata().unwrap().permissions();
+    p.set_readonly(false);
+    fs::set_permissions(path, p).unwrap();
+}
+
+fn find_files(path: &Path, dst: &mut Vec<PathBuf>) {
+    for e in path.read_dir().unwrap() {
+        let e = e.unwrap();
+        let path = e.path();
+        if e.file_type().unwrap().is_dir() {
+            find_files(&path, dst);
+        } else {
+            dst.push(path);
+        }
+    }
+}
diff --git a/tests/testsuite/cross_compile.rs b/tests/testsuite/cross_compile.rs
new file mode 100644 (file)
index 0000000..36c750a
--- /dev/null
@@ -0,0 +1,1163 @@
+use support::{basic_bin_manifest, basic_manifest, cross_compile, project};
+use support::{is_nightly, rustc_host};
+
+#[test]
+fn simple_cross() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "build.rs",
+            &format!(
+                r#"
+            fn main() {{
+                assert_eq!(std::env::var("TARGET").unwrap(), "{}");
+            }}
+        "#,
+                cross_compile::alternate()
+            ),
+        ).file(
+            "src/main.rs",
+            &format!(
+                r#"
+            use std::env;
+            fn main() {{
+                assert_eq!(env::consts::ARCH, "{}");
+            }}
+        "#,
+                cross_compile::alternate_arch()
+            ),
+        ).build();
+
+    let target = cross_compile::alternate();
+    p.cargo("build -v --target").arg(&target).run();
+    assert!(p.target_bin(&target, "foo").is_file());
+
+    p.process(&p.target_bin(&target, "foo")).run();
+}
+
+#[test]
+fn simple_cross_config() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [build]
+            target = "{}"
+        "#,
+                cross_compile::alternate()
+            ),
+        ).file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "build.rs",
+            &format!(
+                r#"
+            fn main() {{
+                assert_eq!(std::env::var("TARGET").unwrap(), "{}");
+            }}
+        "#,
+                cross_compile::alternate()
+            ),
+        ).file(
+            "src/main.rs",
+            &format!(
+                r#"
+            use std::env;
+            fn main() {{
+                assert_eq!(env::consts::ARCH, "{}");
+            }}
+        "#,
+                cross_compile::alternate_arch()
+            ),
+        ).build();
+
+    let target = cross_compile::alternate();
+    p.cargo("build -v").run();
+    assert!(p.target_bin(&target, "foo").is_file());
+
+    p.process(&p.target_bin(&target, "foo")).run();
+}
+
+#[test]
+fn simple_deps() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/main.rs", "extern crate bar; fn main() { bar::bar(); }")
+        .build();
+    let _p2 = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    let target = cross_compile::alternate();
+    p.cargo("build --target").arg(&target).run();
+    assert!(p.target_bin(&target, "foo").is_file());
+
+    p.process(&p.target_bin(&target, "foo")).run();
+}
+
+#[test]
+fn plugin_deps() {
+    if cross_compile::disabled() {
+        return;
+    }
+    if !is_nightly() {
+        return;
+    }
+
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #![feature(plugin)]
+            #![plugin(bar)]
+            extern crate baz;
+            fn main() {
+                assert_eq!(bar!(), baz::baz());
+            }
+        "#,
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "bar"
+            plugin = true
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #![feature(plugin_registrar, rustc_private)]
+
+            extern crate rustc_plugin;
+            extern crate syntax;
+
+            use rustc_plugin::Registry;
+            use syntax::tokenstream::TokenTree;
+            use syntax::source_map::Span;
+            use syntax::ast::*;
+            use syntax::ext::base::{ExtCtxt, MacEager, MacResult};
+            use syntax::ext::build::AstBuilder;
+
+            #[plugin_registrar]
+            pub fn foo(reg: &mut Registry) {
+                reg.register_macro("bar", expand_bar);
+            }
+
+            fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])
+                          -> Box<MacResult + 'static> {
+                MacEager::expr(cx.expr_lit(sp, LitKind::Int(1, LitIntType::Unsuffixed)))
+            }
+        "#,
+        ).build();
+    let _baz = project()
+        .at("baz")
+        .file("Cargo.toml", &basic_manifest("baz", "0.0.1"))
+        .file("src/lib.rs", "pub fn baz() -> i32 { 1 }")
+        .build();
+
+    let target = cross_compile::alternate();
+    foo.cargo("build --target").arg(&target).run();
+    assert!(foo.target_bin(&target, "foo").is_file());
+
+    foo.process(&foo.target_bin(&target, "foo")).run();
+}
+
+#[test]
+fn plugin_to_the_max() {
+    if cross_compile::disabled() {
+        return;
+    }
+    if !is_nightly() {
+        return;
+    }
+
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #![feature(plugin)]
+            #![plugin(bar)]
+            extern crate baz;
+            fn main() {
+                assert_eq!(bar!(), baz::baz());
+            }
+        "#,
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "bar"
+            plugin = true
+
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #![feature(plugin_registrar, rustc_private)]
+
+            extern crate rustc_plugin;
+            extern crate syntax;
+            extern crate baz;
+
+            use rustc_plugin::Registry;
+            use syntax::tokenstream::TokenTree;
+            use syntax::source_map::Span;
+            use syntax::ast::*;
+            use syntax::ext::base::{ExtCtxt, MacEager, MacResult};
+            use syntax::ext::build::AstBuilder;
+            use syntax::ptr::P;
+
+            #[plugin_registrar]
+            pub fn foo(reg: &mut Registry) {
+                reg.register_macro("bar", expand_bar);
+            }
+
+            fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])
+                          -> Box<MacResult + 'static> {
+                let bar = Ident::from_str("baz");
+                let path = cx.path(sp, vec![bar.clone(), bar]);
+                MacEager::expr(cx.expr_call(sp, cx.expr_path(path), vec![]))
+            }
+        "#,
+        ).build();
+    let _baz = project()
+        .at("baz")
+        .file("Cargo.toml", &basic_manifest("baz", "0.0.1"))
+        .file("src/lib.rs", "pub fn baz() -> i32 { 1 }")
+        .build();
+
+    let target = cross_compile::alternate();
+    foo.cargo("build -v --target").arg(&target).run();
+    println!("second");
+    foo.cargo("build -v --target").arg(&target).run();
+    assert!(foo.target_bin(&target, "foo").is_file());
+
+    foo.process(&foo.target_bin(&target, "foo")).run();
+}
+
+#[test]
+fn linker_and_ar() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let target = cross_compile::alternate();
+    let p = project()
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}]
+            ar = "my-ar-tool"
+            linker = "my-linker-tool"
+        "#,
+                target
+            ),
+        ).file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/foo.rs",
+            &format!(
+                r#"
+            use std::env;
+            fn main() {{
+                assert_eq!(env::consts::ARCH, "{}");
+            }}
+        "#,
+                cross_compile::alternate_arch()
+            ),
+        ).build();
+
+    p.cargo("build -v --target")
+        .arg(&target)
+        .with_status(101)
+        .with_stderr_contains(&format!(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name foo src/foo.rs --color never --crate-type bin \
+    --emit=dep-info,link -C debuginfo=2 \
+    -C metadata=[..] \
+    --out-dir [CWD]/target/{target}/debug/deps \
+    --target {target} \
+    -C ar=my-ar-tool -C linker=my-linker-tool \
+    -L dependency=[CWD]/target/{target}/debug/deps \
+    -L dependency=[CWD]/target/debug/deps`
+",
+            target = target,
+        )).run();
+}
+
+#[test]
+fn plugin_with_extra_dylib_dep() {
+    if cross_compile::disabled() {
+        return;
+    }
+    if !is_nightly() {
+        return;
+    }
+
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #![feature(plugin)]
+            #![plugin(bar)]
+
+            fn main() {}
+        "#,
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "bar"
+            plugin = true
+
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #![feature(plugin_registrar, rustc_private)]
+
+            extern crate rustc_plugin;
+            extern crate baz;
+
+            use rustc_plugin::Registry;
+
+            #[plugin_registrar]
+            pub fn foo(reg: &mut Registry) {
+                println!("{}", baz::baz());
+            }
+        "#,
+        ).build();
+    let _baz = project()
+        .at("baz")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "baz"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "baz"
+            crate_type = ["dylib"]
+        "#,
+        ).file("src/lib.rs", "pub fn baz() -> i32 { 1 }")
+        .build();
+
+    let target = cross_compile::alternate();
+    foo.cargo("build --target").arg(&target).run();
+}
+
+#[test]
+fn cross_tests() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            authors = []
+            version = "0.0.0"
+
+            [[bin]]
+            name = "bar"
+        "#,
+        ).file(
+            "src/bin/bar.rs",
+            &format!(
+                r#"
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+            use std::env;
+            fn main() {{
+                assert_eq!(env::consts::ARCH, "{}");
+            }}
+            #[test] fn test() {{ main() }}
+        "#,
+                cross_compile::alternate_arch()
+            ),
+        ).file(
+            "src/lib.rs",
+            &format!(
+                r#"
+            use std::env;
+            pub fn foo() {{ assert_eq!(env::consts::ARCH, "{}"); }}
+            #[test] fn test_foo() {{ foo() }}
+        "#,
+                cross_compile::alternate_arch()
+            ),
+        ).build();
+
+    let target = cross_compile::alternate();
+    p.cargo("test --target")
+        .arg(&target)
+        .with_stderr(&format!(
+            "\
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/{triple}/debug/deps/foo-[..][EXE]
+[RUNNING] target/{triple}/debug/deps/bar-[..][EXE]",
+            triple = target
+        )).with_stdout_contains("test test_foo ... ok")
+        .with_stdout_contains("test test ... ok")
+        .run();
+}
+
+#[test]
+fn no_cross_doctests() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            //! ```
+            //! extern crate foo;
+            //! assert!(true);
+            //! ```
+        "#,
+        ).build();
+
+    let host_output =
+        "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[DOCTEST] foo
+";
+
+    println!("a");
+    p.cargo("test").with_stderr(&host_output).run();
+
+    println!("b");
+    let target = cross_compile::host();
+    p.cargo("test --target")
+        .arg(&target)
+        .with_stderr(&format!(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/{triple}/debug/deps/foo-[..][EXE]
+[DOCTEST] foo
+",
+            triple = target
+        )).run();
+
+    println!("c");
+    let target = cross_compile::alternate();
+    p.cargo("test --target")
+        .arg(&target)
+        .with_stderr(&format!(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/{triple}/debug/deps/foo-[..][EXE]
+",
+            triple = target
+        )).run();
+}
+
+#[test]
+fn simple_cargo_run() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "src/main.rs",
+            &format!(
+                r#"
+            use std::env;
+            fn main() {{
+                assert_eq!(env::consts::ARCH, "{}");
+            }}
+        "#,
+                cross_compile::alternate_arch()
+            ),
+        ).build();
+
+    let target = cross_compile::alternate();
+    p.cargo("run --target").arg(&target).run();
+}
+
+#[test]
+fn cross_with_a_build_script() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let target = cross_compile::alternate();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+            build = 'build.rs'
+        "#,
+        ).file(
+            "build.rs",
+            &format!(
+                r#"
+            use std::env;
+            use std::path::PathBuf;
+            fn main() {{
+                assert_eq!(env::var("TARGET").unwrap(), "{0}");
+                let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+                assert_eq!(path.file_name().unwrap().to_str().unwrap(), "out");
+                path.pop();
+                assert!(path.file_name().unwrap().to_str().unwrap()
+                            .starts_with("foo-"));
+                path.pop();
+                assert_eq!(path.file_name().unwrap().to_str().unwrap(), "build");
+                path.pop();
+                assert_eq!(path.file_name().unwrap().to_str().unwrap(), "debug");
+                path.pop();
+                assert_eq!(path.file_name().unwrap().to_str().unwrap(), "{0}");
+                path.pop();
+                assert_eq!(path.file_name().unwrap().to_str().unwrap(), "target");
+            }}
+        "#,
+                target
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v --target")
+        .arg(&target)
+        .with_stderr(&format!(
+            "\
+[COMPILING] foo v0.0.0 ([CWD])
+[RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..]`
+[RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build`
+[RUNNING] `rustc [..] src/main.rs [..] --target {target} [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            target = target,
+        )).run();
+}
+
+#[test]
+fn build_script_needed_for_host_and_target() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let target = cross_compile::alternate();
+    let host = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+            build = 'build.rs'
+
+            [dependencies.d1]
+            path = "d1"
+            [build-dependencies.d2]
+            path = "d2"
+        "#,
+        ).file(
+            "build.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate d2;
+            fn main() { d2::d2(); }
+        "#,
+        ).file(
+            "src/main.rs",
+            "
+            #[allow(unused_extern_crates)]
+            extern crate d1;
+            fn main() { d1::d1(); }
+        ",
+        ).file(
+            "d1/Cargo.toml",
+            r#"
+            [package]
+            name = "d1"
+            version = "0.0.0"
+            authors = []
+            build = 'build.rs'
+        "#,
+        ).file("d1/src/lib.rs", "pub fn d1() {}")
+        .file(
+            "d1/build.rs",
+            r#"
+            use std::env;
+            fn main() {
+                let target = env::var("TARGET").unwrap();
+                println!("cargo:rustc-flags=-L /path/to/{}", target);
+            }
+        "#,
+        ).file(
+            "d2/Cargo.toml",
+            r#"
+            [package]
+            name = "d2"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies.d1]
+            path = "../d1"
+        "#,
+        ).file(
+            "d2/src/lib.rs",
+            "
+            #[allow(unused_extern_crates)]
+            extern crate d1;
+            pub fn d2() { d1::d1(); }
+        ",
+        ).build();
+
+    p.cargo("build -v --target")
+        .arg(&target)
+        .with_stderr_contains(&"[COMPILING] d1 v0.0.0 ([CWD]/d1)")
+        .with_stderr_contains(
+            "[RUNNING] `rustc [..] d1/build.rs [..] --out-dir [CWD]/target/debug/build/d1-[..]`",
+        )
+        .with_stderr_contains("[RUNNING] `[CWD]/target/debug/build/d1-[..]/build-script-build`")
+        .with_stderr_contains("[RUNNING] `rustc [..] d1/src/lib.rs [..]`")
+        .with_stderr_contains("[COMPILING] d2 v0.0.0 ([CWD]/d2)")
+        .with_stderr_contains(&format!(
+            "[RUNNING] `rustc [..] d2/src/lib.rs [..] -L /path/to/{host}`",
+            host = host
+        )).with_stderr_contains("[COMPILING] foo v0.0.0 ([CWD])")
+        .with_stderr_contains(&format!(
+            "[RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..] \
+             -L /path/to/{host}`",
+            host = host
+        )).with_stderr_contains(&format!(
+            "\
+             [RUNNING] `rustc [..] src/main.rs [..] --target {target} [..] \
+             -L /path/to/{target}`",
+            target = target
+        )).run();
+}
+
+#[test]
+fn build_deps_for_the_right_arch() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies.d2]
+            path = "d2"
+        "#,
+        ).file("src/main.rs", "extern crate d2; fn main() {}")
+        .file("d1/Cargo.toml", &basic_manifest("d1", "0.0.0"))
+        .file("d1/src/lib.rs", "pub fn d1() {}")
+        .file(
+            "d2/Cargo.toml",
+            r#"
+            [package]
+            name = "d2"
+            version = "0.0.0"
+            authors = []
+            build = "build.rs"
+
+            [build-dependencies.d1]
+            path = "../d1"
+        "#,
+        ).file("d2/build.rs", "extern crate d1; fn main() {}")
+        .file("d2/src/lib.rs", "")
+        .build();
+
+    let target = cross_compile::alternate();
+    p.cargo("build -v --target").arg(&target).run();
+}
+
+#[test]
+fn build_script_only_host() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+            build = "build.rs"
+
+            [build-dependencies.d1]
+            path = "d1"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("build.rs", "extern crate d1; fn main() {}")
+        .file(
+            "d1/Cargo.toml",
+            r#"
+            [package]
+            name = "d1"
+            version = "0.0.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("d1/src/lib.rs", "pub fn d1() {}")
+        .file(
+            "d1/build.rs",
+            r#"
+            use std::env;
+
+            fn main() {
+                assert!(env::var("OUT_DIR").unwrap().replace("\\", "/")
+                                           .contains("target/debug/build/d1-"),
+                        "bad: {:?}", env::var("OUT_DIR"));
+            }
+        "#,
+        ).build();
+
+    let target = cross_compile::alternate();
+    p.cargo("build -v --target").arg(&target).run();
+}
+
+#[test]
+fn plugin_build_script_right_arch() {
+    if cross_compile::disabled() {
+        return;
+    }
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+
+            [lib]
+            name = "foo"
+            plugin = true
+        "#,
+        ).file("build.rs", "fn main() {}")
+        .file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v --target")
+        .arg(cross_compile::alternate())
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] build.rs [..]`
+[RUNNING] `[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn build_script_with_platform_specific_dependencies() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let target = cross_compile::alternate();
+    let host = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+
+            [build-dependencies.d1]
+            path = "d1"
+        "#,
+        ).file(
+            "build.rs",
+            "
+            #[allow(unused_extern_crates)]
+            extern crate d1;
+            fn main() {}
+        ",
+        ).file("src/lib.rs", "")
+        .file(
+            "d1/Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "d1"
+            version = "0.0.0"
+            authors = []
+
+            [target.{}.dependencies]
+            d2 = {{ path = "../d2" }}
+        "#,
+                host
+            ),
+        ).file(
+            "d1/src/lib.rs",
+            "#[allow(unused_extern_crates)] extern crate d2;",
+        ).file("d2/Cargo.toml", &basic_manifest("d2", "0.0.0"))
+        .file("d2/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v --target")
+        .arg(&target)
+        .with_stderr(&format!(
+            "\
+[COMPILING] d2 v0.0.0 ([..])
+[RUNNING] `rustc [..] d2/src/lib.rs [..]`
+[COMPILING] d1 v0.0.0 ([..])
+[RUNNING] `rustc [..] d1/src/lib.rs [..]`
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] build.rs [..]`
+[RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build`
+[RUNNING] `rustc [..] src/lib.rs [..] --target {target} [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            target = target
+        )).run();
+}
+
+#[test]
+fn platform_specific_dependencies_do_not_leak() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let target = cross_compile::alternate();
+    let host = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+
+            [dependencies.d1]
+            path = "d1"
+
+            [build-dependencies.d1]
+            path = "d1"
+        "#,
+        ).file("build.rs", "extern crate d1; fn main() {}")
+        .file("src/lib.rs", "")
+        .file(
+            "d1/Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "d1"
+            version = "0.0.0"
+            authors = []
+
+            [target.{}.dependencies]
+            d2 = {{ path = "../d2" }}
+        "#,
+                host
+            ),
+        ).file("d1/src/lib.rs", "extern crate d2;")
+        .file("d1/Cargo.toml", &basic_manifest("d1", "0.0.0"))
+        .file("d2/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v --target")
+        .arg(&target)
+        .with_status(101)
+        .with_stderr_contains("[..] can't find crate for `d2`[..]")
+        .run();
+}
+
+#[test]
+fn platform_specific_variables_reflected_in_build_scripts() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let target = cross_compile::alternate();
+    let host = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+
+            [target.{host}.dependencies]
+            d1 = {{ path = "d1" }}
+
+            [target.{target}.dependencies]
+            d2 = {{ path = "d2" }}
+        "#,
+                host = host,
+                target = target
+            ),
+        ).file(
+            "build.rs",
+            &format!(
+                r#"
+            use std::env;
+
+            fn main() {{
+                let platform = env::var("TARGET").unwrap();
+                let (expected, not_expected) = match &platform[..] {{
+                    "{host}" => ("DEP_D1_VAL", "DEP_D2_VAL"),
+                    "{target}" => ("DEP_D2_VAL", "DEP_D1_VAL"),
+                    _ => panic!("unknown platform")
+                }};
+
+                env::var(expected).ok()
+                    .expect(&format!("missing {{}}", expected));
+                env::var(not_expected).err()
+                    .expect(&format!("found {{}}", not_expected));
+            }}
+        "#,
+                host = host,
+                target = target
+            ),
+        ).file("src/lib.rs", "")
+        .file(
+            "d1/Cargo.toml",
+            r#"
+            [package]
+            name = "d1"
+            version = "0.0.0"
+            authors = []
+            links = "d1"
+            build = "build.rs"
+        "#,
+        ).file("d1/build.rs", r#"fn main() { println!("cargo:val=1") }"#)
+        .file("d1/src/lib.rs", "")
+        .file(
+            "d2/Cargo.toml",
+            r#"
+            [package]
+            name = "d2"
+            version = "0.0.0"
+            authors = []
+            links = "d2"
+            build = "build.rs"
+        "#,
+        ).file("d2/build.rs", r#"fn main() { println!("cargo:val=1") }"#)
+        .file("d2/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v").run();
+    p.cargo("build -v --target").arg(&target).run();
+}
+
+#[test]
+fn cross_test_dylib() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let target = cross_compile::alternate();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "foo"
+            crate_type = ["dylib"]
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate bar as the_bar;
+
+            pub fn bar() { the_bar::baz(); }
+
+            #[test]
+            fn foo() { bar(); }
+        "#,
+        ).file(
+            "tests/test.rs",
+            r#"
+            extern crate foo as the_foo;
+
+            #[test]
+            fn foo() { the_foo::bar(); }
+        "#,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "bar"
+            crate_type = ["dylib"]
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            &format!(
+                r#"
+             use std::env;
+             pub fn baz() {{
+                assert_eq!(env::consts::ARCH, "{}");
+            }}
+        "#,
+                cross_compile::alternate_arch()
+            ),
+        ).build();
+
+    p.cargo("test --target")
+        .arg(&target)
+        .with_stderr(&format!(
+            "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/{arch}/debug/deps/foo-[..][EXE]
+[RUNNING] target/{arch}/debug/deps/test-[..][EXE]",
+            arch = cross_compile::alternate()
+        )).with_stdout_contains_n("test foo ... ok", 2)
+        .run();
+}
diff --git a/tests/testsuite/cross_publish.rs b/tests/testsuite/cross_publish.rs
new file mode 100644 (file)
index 0000000..389410b
--- /dev/null
@@ -0,0 +1,116 @@
+use std::fs::File;
+use std::io::prelude::*;
+use std::path::PathBuf;
+
+use flate2::read::GzDecoder;
+use support::{cross_compile, project, publish};
+use tar::Archive;
+
+#[test]
+fn simple_cross_package() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            repository = "bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            &format!(
+                r#"
+            use std::env;
+            fn main() {{
+                assert_eq!(env::consts::ARCH, "{}");
+            }}
+        "#,
+                cross_compile::alternate_arch()
+            ),
+        ).build();
+
+    let target = cross_compile::alternate();
+
+    p.cargo("package --target")
+        .arg(&target)
+        .with_stderr(
+            "   Packaging foo v0.0.0 ([CWD])
+   Verifying foo v0.0.0 ([CWD])
+   Compiling foo v0.0.0 ([CWD]/target/package/foo-0.0.0)
+    Finished dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    // Check that the tarball contains the files
+    let f = File::open(&p.root().join("target/package/foo-0.0.0.crate")).unwrap();
+    let mut rdr = GzDecoder::new(f);
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    let entries = ar.entries().unwrap();
+    let entry_paths = entries
+        .map(|entry| entry.unwrap().path().unwrap().into_owned())
+        .collect::<Vec<PathBuf>>();
+    assert!(entry_paths.contains(&PathBuf::from("foo-0.0.0/Cargo.toml")));
+    assert!(entry_paths.contains(&PathBuf::from("foo-0.0.0/Cargo.toml.orig")));
+    assert!(entry_paths.contains(&PathBuf::from("foo-0.0.0/src/main.rs")));
+}
+
+#[test]
+fn publish_with_target() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            repository = "bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            &format!(
+                r#"
+            use std::env;
+            fn main() {{
+                assert_eq!(env::consts::ARCH, "{}");
+            }}
+        "#,
+                cross_compile::alternate_arch()
+            ),
+        ).build();
+
+    let target = cross_compile::alternate();
+
+    p.cargo("publish --index")
+        .arg(publish::registry().to_string())
+        .arg("--target")
+        .arg(&target)
+        .with_stderr(&format!(
+            "    Updating `{registry}` index
+   Packaging foo v0.0.0 ([CWD])
+   Verifying foo v0.0.0 ([CWD])
+   Compiling foo v0.0.0 ([CWD]/target/package/foo-0.0.0)
+    Finished dev [unoptimized + debuginfo] target(s) in [..]
+   Uploading foo v0.0.0 ([CWD])
+",
+            registry = publish::registry_path().to_str().unwrap()
+        )).run();
+}
diff --git a/tests/testsuite/custom_target.rs b/tests/testsuite/custom_target.rs
new file mode 100644 (file)
index 0000000..11c31ca
--- /dev/null
@@ -0,0 +1,124 @@
+use support::is_nightly;
+use support::{basic_manifest, project};
+
+#[test]
+fn custom_target_minimal() {
+    if !is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            #![feature(no_core)]
+            #![feature(lang_items)]
+            #![no_core]
+
+            pub fn foo() -> u32 {
+                42
+            }
+
+            #[lang = "sized"]
+            pub trait Sized {
+                // Empty.
+            }
+            #[lang = "copy"]
+            pub trait Copy {
+                // Empty.
+            }
+        "#,
+        ).file(
+            "custom-target.json",
+            r#"
+            {
+                "llvm-target": "x86_64-unknown-none-gnu",
+                "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128",
+                "arch": "x86_64",
+                "target-endian": "little",
+                "target-pointer-width": "64",
+                "target-c-int-width": "32",
+                "os": "none",
+                "linker-flavor": "ld.lld"
+            }
+        "#,
+        ).build();
+
+    p.cargo("build --lib --target custom-target.json -v").run();
+    p.cargo("build --lib --target src/../custom-target.json -v")
+        .run();
+}
+
+#[test]
+fn custom_target_dependency() {
+    if !is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+
+            name = "foo"
+            version = "0.0.1"
+            authors = ["author@example.com"]
+
+            [dependencies]
+            bar = { path = "bar" }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #![feature(no_core)]
+            #![feature(lang_items)]
+            #![feature(optin_builtin_traits)]
+            #![no_core]
+
+            extern crate bar;
+
+            pub fn foo() -> u32 {
+                bar::bar()
+            }
+
+            #[lang = "freeze"]
+            unsafe auto trait Freeze {}
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file(
+            "bar/src/lib.rs",
+            r#"
+            #![feature(no_core)]
+            #![feature(lang_items)]
+            #![no_core]
+
+            pub fn bar() -> u32 {
+                42
+            }
+
+            #[lang = "sized"]
+            pub trait Sized {
+                // Empty.
+            }
+            #[lang = "copy"]
+            pub trait Copy {
+                // Empty.
+            }
+        "#,
+        ).file(
+            "custom-target.json",
+            r#"
+            {
+                "llvm-target": "x86_64-unknown-none-gnu",
+                "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128",
+                "arch": "x86_64",
+                "target-endian": "little",
+                "target-pointer-width": "64",
+                "target-c-int-width": "32",
+                "os": "none",
+                "linker-flavor": "ld.lld"
+            }
+        "#,
+        ).build();
+
+    p.cargo("build --lib --target custom-target.json -v").run();
+}
diff --git a/tests/testsuite/death.rs b/tests/testsuite/death.rs
new file mode 100644 (file)
index 0000000..ecda4f6
--- /dev/null
@@ -0,0 +1,144 @@
+use std::fs;
+use std::io::{self, Read};
+use std::net::TcpListener;
+use std::process::{Child, Stdio};
+use std::thread;
+use std::time::Duration;
+
+use support::project;
+
+#[cfg(unix)]
+fn enabled() -> bool {
+    true
+}
+
+// On Windows support for these tests is only enabled through the usage of job
+// objects. Support for nested job objects, however, was added in recent-ish
+// versions of Windows, so this test may not always be able to succeed.
+//
+// As a result, we try to add ourselves to a job object here
+// can succeed or not.
+#[cfg(windows)]
+fn enabled() -> bool {
+    use winapi::um::{handleapi, jobapi, jobapi2, processthreadsapi};
+
+    unsafe {
+        // If we're not currently in a job, then we can definitely run these
+        // tests.
+        let me = processthreadsapi::GetCurrentProcess();
+        let mut ret = 0;
+        let r = jobapi::IsProcessInJob(me, 0 as *mut _, &mut ret);
+        assert_ne!(r, 0);
+        if ret == ::winapi::shared::minwindef::FALSE {
+            return true;
+        }
+
+        // If we are in a job, then we can run these tests if we can be added to
+        // a nested job (as we're going to create a nested job no matter what as
+        // part of these tests.
+        //
+        // If we can't be added to a nested job, then these tests will
+        // definitely fail, and there's not much we can do about that.
+        let job = jobapi2::CreateJobObjectW(0 as *mut _, 0 as *const _);
+        assert!(!job.is_null());
+        let r = jobapi2::AssignProcessToJobObject(job, me);
+        handleapi::CloseHandle(job);
+        r != 0
+    }
+}
+
+#[test]
+fn ctrl_c_kills_everyone() {
+    if !enabled() {
+        return;
+    }
+
+    let listener = TcpListener::bind("127.0.0.1:0").unwrap();
+    let addr = listener.local_addr().unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            &format!(
+                r#"
+            use std::net::TcpStream;
+            use std::io::Read;
+
+            fn main() {{
+                let mut socket = TcpStream::connect("{}").unwrap();
+                let _ = socket.read(&mut [0; 10]);
+                panic!("that read should never return");
+            }}
+        "#,
+                addr
+            ),
+        ).build();
+
+    let mut cargo = p.cargo("build").build_command();
+    cargo
+        .stdin(Stdio::piped())
+        .stdout(Stdio::piped())
+        .stderr(Stdio::piped())
+        .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1");
+    let mut child = cargo.spawn().unwrap();
+
+    let mut sock = listener.accept().unwrap().0;
+    ctrl_c(&mut child);
+
+    assert!(!child.wait().unwrap().success());
+    match sock.read(&mut [0; 10]) {
+        Ok(n) => assert_eq!(n, 0),
+        Err(e) => assert_eq!(e.kind(), io::ErrorKind::ConnectionReset),
+    }
+
+    // Ok so what we just did was spawn cargo that spawned a build script, then
+    // we killed cargo in hopes of it killing the build script as well. If all
+    // went well the build script is now dead. On Windows, however, this is
+    // enforced with job objects which means that it may actually be in the
+    // *process* of being torn down at this point.
+    //
+    // Now on Windows we can't completely remove a file until all handles to it
+    // have been closed. Including those that represent running processes. So if
+    // we were to return here then there may still be an open reference to some
+    // file in the build directory. What we want to actually do is wait for the
+    // build script to *complete* exit. Take care of that by blowing away the
+    // build directory here, and panicking if we eventually spin too long
+    // without being able to.
+    for i in 0..10 {
+        match fs::remove_dir_all(&p.root().join("target")) {
+            Ok(()) => return,
+            Err(e) => println!("attempt {}: {}", i, e),
+        }
+        thread::sleep(Duration::from_millis(100));
+    }
+
+    panic!(
+        "couldn't remove build directory after a few tries, seems like \
+         we won't be able to!"
+    );
+}
+
+#[cfg(unix)]
+fn ctrl_c(child: &mut Child) {
+    use libc;
+
+    let r = unsafe { libc::kill(-(child.id() as i32), libc::SIGINT) };
+    if r < 0 {
+        panic!("failed to kill: {}", io::Error::last_os_error());
+    }
+}
+
+#[cfg(windows)]
+fn ctrl_c(child: &mut Child) {
+    child.kill().unwrap();
+}
diff --git a/tests/testsuite/dep_info.rs b/tests/testsuite/dep_info.rs
new file mode 100644 (file)
index 0000000..4fdb220
--- /dev/null
@@ -0,0 +1,102 @@
+use filetime::FileTime;
+use support::{basic_bin_manifest, main_file, project};
+
+#[test]
+fn build_dep_info() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("build").run();
+
+    let depinfo_bin_path = &p.bin("foo").with_extension("d");
+
+    assert!(depinfo_bin_path.is_file());
+}
+
+#[test]
+fn build_dep_info_lib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[example]]
+            name = "ex"
+            crate-type = ["lib"]
+        "#,
+        ).file("build.rs", "fn main() {}")
+        .file("src/lib.rs", "")
+        .file("examples/ex.rs", "")
+        .build();
+
+    p.cargo("build --example=ex").run();
+    assert!(p.example_lib("ex", "lib").with_extension("d").is_file());
+}
+
+#[test]
+fn build_dep_info_rlib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[example]]
+            name = "ex"
+            crate-type = ["rlib"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("examples/ex.rs", "")
+        .build();
+
+    p.cargo("build --example=ex").run();
+    assert!(p.example_lib("ex", "rlib").with_extension("d").is_file());
+}
+
+#[test]
+fn build_dep_info_dylib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[example]]
+            name = "ex"
+            crate-type = ["dylib"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("examples/ex.rs", "")
+        .build();
+
+    p.cargo("build --example=ex").run();
+    assert!(p.example_lib("ex", "dylib").with_extension("d").is_file());
+}
+
+#[test]
+fn no_rewrite_if_no_change() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("build").run();
+    let dep_info = p.root().join("target/debug/libfoo.d");
+    let metadata1 = dep_info.metadata().unwrap();
+    p.cargo("build").run();
+    let metadata2 = dep_info.metadata().unwrap();
+
+    assert_eq!(
+        FileTime::from_last_modification_time(&metadata1),
+        FileTime::from_last_modification_time(&metadata2),
+    );
+}
diff --git a/tests/testsuite/directory.rs b/tests/testsuite/directory.rs
new file mode 100644 (file)
index 0000000..a2ce591
--- /dev/null
@@ -0,0 +1,727 @@
+use serde_json;
+use std::collections::HashMap;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::str;
+
+use support::cargo_process;
+use support::git;
+use support::paths;
+use support::registry::{cksum, Package};
+use support::{basic_manifest, project, ProjectBuilder};
+
+fn setup() {
+    let root = paths::root();
+    t!(fs::create_dir(&root.join(".cargo")));
+    t!(t!(File::create(root.join(".cargo/config"))).write_all(
+        br#"
+            [source.crates-io]
+            replace-with = 'my-awesome-local-registry'
+
+            [source.my-awesome-local-registry]
+            directory = 'index'
+        "#
+    ));
+}
+
+struct VendorPackage {
+    p: Option<ProjectBuilder>,
+    cksum: Checksum,
+}
+
+#[derive(Serialize)]
+struct Checksum {
+    package: Option<String>,
+    files: HashMap<String, String>,
+}
+
+impl VendorPackage {
+    fn new(name: &str) -> VendorPackage {
+        VendorPackage {
+            p: Some(project().at(&format!("index/{}", name))),
+            cksum: Checksum {
+                package: Some(String::new()),
+                files: HashMap::new(),
+            },
+        }
+    }
+
+    fn file(&mut self, name: &str, contents: &str) -> &mut VendorPackage {
+        self.p = Some(self.p.take().unwrap().file(name, contents));
+        self.cksum
+            .files
+            .insert(name.to_string(), cksum(contents.as_bytes()));
+        self
+    }
+
+    fn disable_checksum(&mut self) -> &mut VendorPackage {
+        self.cksum.package = None;
+        self
+    }
+
+    fn no_manifest(mut self) -> Self {
+        self.p = self.p.map(|pb| pb.no_manifest());
+        self
+    }
+
+    fn build(&mut self) {
+        let p = self.p.take().unwrap();
+        let json = serde_json::to_string(&self.cksum).unwrap();
+        let p = p.file(".cargo-checksum.json", &json);
+        let _ = p.build();
+    }
+}
+
+#[test]
+fn simple() {
+    setup();
+
+    VendorPackage::new("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn simple_install() {
+    setup();
+
+    VendorPackage::new("foo")
+        .file("src/lib.rs", "pub fn foo() {}")
+        .build();
+
+    VendorPackage::new("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foo = "0.0.1"
+        "#,
+        ).file(
+            "src/main.rs",
+            "extern crate foo; pub fn main() { foo::foo(); }",
+        ).build();
+
+    cargo_process("install bar")
+        .with_stderr(
+            "  Installing bar v0.1.0
+   Compiling foo v0.0.1
+   Compiling bar v0.1.0
+    Finished release [optimized] target(s) in [..]s
+  Installing [..]bar[..]
+warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+        ).run();
+}
+
+#[test]
+fn simple_install_fail() {
+    setup();
+
+    VendorPackage::new("foo")
+        .file("src/lib.rs", "pub fn foo() {}")
+        .build();
+
+    VendorPackage::new("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foo = "0.1.0"
+            baz = "9.8.7"
+        "#,
+        ).file(
+            "src/main.rs",
+            "extern crate foo; pub fn main() { foo::foo(); }",
+        ).build();
+
+    cargo_process("install bar")
+        .with_status(101)
+        .with_stderr(
+            "  Installing bar v0.1.0
+error: failed to compile `bar v0.1.0`, intermediate artifacts can be found at `[..]`
+
+Caused by:
+  no matching package named `baz` found
+location searched: registry `https://github.com/rust-lang/crates.io-index`
+did you mean: bar, foo
+required by package `bar v0.1.0`
+",
+        ).run();
+}
+
+#[test]
+fn install_without_feature_dep() {
+    setup();
+
+    VendorPackage::new("foo")
+        .file("src/lib.rs", "pub fn foo() {}")
+        .build();
+
+    VendorPackage::new("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foo = "0.0.1"
+            baz = { version = "9.8.7", optional = true }
+
+            [features]
+            wantbaz = ["baz"]
+        "#,
+        ).file(
+            "src/main.rs",
+            "extern crate foo; pub fn main() { foo::foo(); }",
+        ).build();
+
+    cargo_process("install bar")
+        .with_stderr(
+            "  Installing bar v0.1.0
+   Compiling foo v0.0.1
+   Compiling bar v0.1.0
+    Finished release [optimized] target(s) in [..]s
+  Installing [..]bar[..]
+warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+        ).run();
+}
+
+#[test]
+fn not_there() {
+    setup();
+
+    let _ = project().at("index").build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: no matching package named `bar` found
+location searched: [..]
+required by package `foo v0.1.0 ([..])`
+",
+        ).run();
+}
+
+#[test]
+fn multiple() {
+    setup();
+
+    VendorPackage::new("bar-0.1.0")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .file(".cargo-checksum", "")
+        .build();
+
+    VendorPackage::new("bar-0.2.0")
+        .file("Cargo.toml", &basic_manifest("bar", "0.2.0"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .file(".cargo-checksum", "")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn crates_io_then_directory() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).build();
+
+    let cksum = Package::new("bar", "0.1.0")
+        .file("src/lib.rs", "pub fn bar() -> u32 { 0 }")
+        .publish();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 ([..])
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] [..]
+",
+        ).run();
+
+    setup();
+
+    let mut v = VendorPackage::new("bar");
+    v.file("Cargo.toml", &basic_manifest("bar", "0.1.0"));
+    v.file("src/lib.rs", "pub fn bar() -> u32 { 1 }");
+    v.cksum.package = Some(cksum);
+    v.build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.1.0 ([CWD])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn crates_io_then_bad_checksum() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    Package::new("bar", "0.1.0").publish();
+
+    p.cargo("build").run();
+    setup();
+
+    VendorPackage::new("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: checksum for `bar v0.1.0` changed between lock files
+
+this could be indicative of a few possible errors:
+
+    * the lock file is corrupt
+    * a replacement source in use (e.g. a mirror) returned a different checksum
+    * the source itself may be corrupt in one way or another
+
+unable to verify that `bar v0.1.0` is the same as when the lockfile was generated
+
+",
+        ).run();
+}
+
+#[test]
+fn bad_file_checksum() {
+    setup();
+
+    VendorPackage::new("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "")
+        .build();
+
+    let mut f = t!(File::create(paths::root().join("index/bar/src/lib.rs")));
+    t!(f.write_all(b"fn bar() -> u32 { 0 }"));
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: the listed checksum of `[..]lib.rs` has changed:
+expected: [..]
+actual:   [..]
+
+directory sources are not intended to be edited, if modifications are \
+required then it is recommended that [replace] is used with a forked copy of \
+the source
+",
+        ).run();
+}
+
+#[test]
+fn only_dot_files_ok() {
+    setup();
+
+    VendorPackage::new("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "")
+        .build();
+    VendorPackage::new("foo")
+        .no_manifest()
+        .file(".bar", "")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn random_files_ok() {
+    setup();
+
+    VendorPackage::new("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "")
+        .build();
+    VendorPackage::new("foo")
+        .no_manifest()
+        .file("bar", "")
+        .file("../test", "")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn git_lock_file_doesnt_change() {
+    let git = git::new("git", |p| {
+        p.file("Cargo.toml", &basic_manifest("git", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+
+    VendorPackage::new("git")
+        .file("Cargo.toml", &basic_manifest("git", "0.5.0"))
+        .file("src/lib.rs", "")
+        .disable_checksum()
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            git = {{ git = '{0}' }}
+        "#,
+                git.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    let mut lock1 = String::new();
+    t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lock1));
+
+    let root = paths::root();
+    t!(fs::create_dir(&root.join(".cargo")));
+    t!(t!(File::create(root.join(".cargo/config"))).write_all(
+        &format!(
+            r#"
+        [source.my-git-repo]
+        git = '{}'
+        replace-with = 'my-awesome-local-registry'
+
+        [source.my-awesome-local-registry]
+        directory = 'index'
+    "#,
+            git.url()
+        ).as_bytes()
+    ));
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] [..]
+[COMPILING] [..]
+[FINISHED] [..]
+",
+        ).run();
+
+    let mut lock2 = String::new();
+    t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lock2));
+    assert_eq!(lock1, lock2, "lock files changed");
+}
+
+#[test]
+fn git_override_requires_lockfile() {
+    VendorPackage::new("git")
+        .file("Cargo.toml", &basic_manifest("git", "0.5.0"))
+        .file("src/lib.rs", "")
+        .disable_checksum()
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            git = { git = 'https://example.com/' }
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    let root = paths::root();
+    t!(fs::create_dir(&root.join(".cargo")));
+    t!(t!(File::create(root.join(".cargo/config"))).write_all(
+        br#"
+        [source.my-git-repo]
+        git = 'https://example.com/'
+        replace-with = 'my-awesome-local-registry'
+
+        [source.my-awesome-local-registry]
+        directory = 'index'
+    "#
+    ));
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to load source for a dependency on `git`
+
+Caused by:
+  Unable to update [..]
+
+Caused by:
+  the source my-git-repo requires a lock file to be present first before it can be
+used against vendored source code
+
+remove the source replacement configuration, generate a lock file, and then
+restore the source replacement configuration to continue the build
+
+",
+        ).run();
+}
+
+#[test]
+fn workspace_different_locations() {
+    let p = project()
+        .no_manifest()
+        .file(
+            "foo/Cargo.toml",
+            r#"
+                [package]
+                name = 'foo'
+                version = '0.1.0'
+
+                [dependencies]
+                baz = "*"
+            "#,
+        ).file("foo/src/lib.rs", "")
+        .file("foo/vendor/baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("foo/vendor/baz/src/lib.rs", "")
+        .file("foo/vendor/baz/.cargo-checksum.json", "{\"files\":{}}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+                [package]
+                name = 'bar'
+                version = '0.1.0'
+
+                [dependencies]
+                baz = "*"
+            "#,
+        ).file("bar/src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+                [build]
+                target-dir = './target'
+
+                [source.crates-io]
+                replace-with = 'my-awesome-local-registry'
+
+                [source.my-awesome-local-registry]
+                directory = 'foo/vendor'
+            "#,
+        ).build();
+
+    p.cargo("build").cwd(p.root().join("foo")).run();
+    p.cargo("build")
+        .cwd(p.root().join("bar"))
+        .with_status(0)
+        .with_stderr(
+            "\
+[COMPILING] bar [..]
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn version_missing() {
+    setup();
+
+    VendorPackage::new("foo")
+        .file("src/lib.rs", "pub fn foo() {}")
+        .build();
+
+    VendorPackage::new("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.1.0"
+                authors = []
+
+                [dependencies]
+                foo = "2"
+            "#,
+        )
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install bar")
+        .with_stderr(
+            "\
+[INSTALLING] bar v0.1.0
+error: failed to compile [..]
+
+Caused by:
+  failed to select a version for the requirement `foo = \"^2\"`
+  candidate versions found which didn't match: 0.0.1
+  location searched: directory source `[..] (which is replacing registry `[..]`)
+required by package `bar v0.1.0`
+perhaps a crate was updated and forgotten to be re-vendored?
+",
+        )
+        .with_status(101)
+        .run();
+}
diff --git a/tests/testsuite/doc.rs b/tests/testsuite/doc.rs
new file mode 100644 (file)
index 0000000..2f2f073
--- /dev/null
@@ -0,0 +1,1338 @@
+use std::fs::{self, File};
+use std::io::Read;
+use std::str;
+use support;
+
+use glob::glob;
+use support::paths::CargoPathExt;
+use support::registry::Package;
+use support::{basic_lib_manifest, basic_manifest, git, project};
+use support::{is_nightly, rustc_host};
+
+#[test]
+fn simple() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("build.rs", "fn main() {}")
+        .file("src/lib.rs", "pub fn foo() {}")
+        .build();
+
+    p.cargo("doc")
+        .with_stderr(
+            "\
+[..] foo v0.0.1 ([CWD])
+[..] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    assert!(p.root().join("target/doc").is_dir());
+    assert!(p.root().join("target/doc/foo/index.html").is_file());
+}
+
+#[test]
+fn doc_no_libs() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[bin]]
+            name = "foo"
+            doc = false
+        "#,
+        ).file("src/main.rs", "bad code")
+        .build();
+
+    p.cargo("doc").run();
+}
+
+#[test]
+fn doc_twice() {
+    let p = project().file("src/lib.rs", "pub fn foo() {}").build();
+
+    p.cargo("doc")
+        .with_stderr(
+            "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("doc").with_stdout("").run();
+}
+
+#[test]
+fn doc_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/lib.rs", "extern crate bar; pub fn foo() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("doc")
+        .with_stderr(
+            "\
+[..] bar v0.0.1 ([CWD]/bar)
+[..] bar v0.0.1 ([CWD]/bar)
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    assert!(p.root().join("target/doc").is_dir());
+    assert!(p.root().join("target/doc/foo/index.html").is_file());
+    assert!(p.root().join("target/doc/bar/index.html").is_file());
+
+    // Verify that it only emits rmeta for the dependency.
+    assert_eq!(
+        glob(&p.root().join("target/debug/**/*.rlib").to_str().unwrap())
+            .unwrap()
+            .count(),
+        0
+    );
+    assert_eq!(
+        glob(
+            &p.root()
+                .join("target/debug/deps/libbar-*.rmeta")
+                .to_str()
+                .unwrap()
+        ).unwrap()
+        .count(),
+        1
+    );
+
+    p.cargo("doc")
+        .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint")
+        .with_stdout("")
+        .run();
+
+    assert!(p.root().join("target/doc").is_dir());
+    assert!(p.root().join("target/doc/foo/index.html").is_file());
+    assert!(p.root().join("target/doc/bar/index.html").is_file());
+}
+
+#[test]
+fn doc_no_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/lib.rs", "extern crate bar; pub fn foo() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("doc --no-deps")
+        .with_stderr(
+            "\
+[CHECKING] bar v0.0.1 ([CWD]/bar)
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    assert!(p.root().join("target/doc").is_dir());
+    assert!(p.root().join("target/doc/foo/index.html").is_file());
+    assert!(!p.root().join("target/doc/bar/index.html").is_file());
+}
+
+#[test]
+fn doc_only_bin() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", "extern crate bar; pub fn foo() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("doc -v").run();
+
+    assert!(p.root().join("target/doc").is_dir());
+    assert!(p.root().join("target/doc/bar/index.html").is_file());
+    assert!(p.root().join("target/doc/foo/index.html").is_file());
+}
+
+#[test]
+fn doc_multiple_targets_same_name_lib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["foo", "bar"]
+        "#,
+        ).file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            [lib]
+            name = "foo_lib"
+        "#,
+        ).file("foo/src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            [lib]
+            name = "foo_lib"
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("doc --all")
+        .with_status(101)
+        .with_stderr_contains("[..] library `foo_lib` is specified [..]")
+        .with_stderr_contains("[..] `foo v0.1.0[..]` [..]")
+        .with_stderr_contains("[..] `bar v0.1.0[..]` [..]")
+        .run();
+}
+
+#[test]
+fn doc_multiple_targets_same_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["foo", "bar"]
+        "#,
+        ).file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            [[bin]]
+            name = "foo_lib"
+            path = "src/foo_lib.rs"
+        "#,
+        ).file("foo/src/foo_lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            [lib]
+            name = "foo_lib"
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("doc --all")
+        .with_stderr_contains("[DOCUMENTING] foo v0.1.0 ([CWD]/foo)")
+        .with_stderr_contains("[DOCUMENTING] bar v0.1.0 ([CWD]/bar)")
+        .with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+    assert!(p.root().join("target/doc").is_dir());
+    let doc_file = p.root().join("target/doc/foo_lib/index.html");
+    assert!(doc_file.is_file());
+}
+
+#[test]
+fn doc_multiple_targets_same_name_bin() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["foo", "bar"]
+        "#,
+        ).file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            [[bin]]
+            name = "foo-cli"
+        "#,
+        ).file("foo/src/foo-cli.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            [[bin]]
+            name = "foo-cli"
+        "#,
+        ).file("bar/src/foo-cli.rs", "")
+        .build();
+
+    p.cargo("doc --all")
+        .with_status(101)
+        .with_stderr_contains("[..] binary `foo_cli` is specified [..]")
+        .with_stderr_contains("[..] `foo v0.1.0[..]` [..]")
+        .with_stderr_contains("[..] `bar v0.1.0[..]` [..]")
+        .run();
+}
+
+#[test]
+fn doc_multiple_targets_same_name_undoced() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["foo", "bar"]
+        "#,
+        ).file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            [[bin]]
+            name = "foo-cli"
+        "#,
+        ).file("foo/src/foo-cli.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            [[bin]]
+            name = "foo-cli"
+            doc = false
+        "#,
+        ).file("bar/src/foo-cli.rs", "")
+        .build();
+
+    p.cargo("doc --all").run();
+}
+
+#[test]
+fn doc_lib_bin_same_name_documents_lib() {
+    let p = project()
+        .file(
+            "src/main.rs",
+            r#"
+            //! Binary documentation
+            extern crate foo;
+            fn main() {
+                foo::foo();
+            }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            //! Library documentation
+            pub fn foo() {}
+        "#,
+        ).build();
+
+    p.cargo("doc")
+        .with_stderr(
+            "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    assert!(p.root().join("target/doc").is_dir());
+    let doc_file = p.root().join("target/doc/foo/index.html");
+    assert!(doc_file.is_file());
+    let mut doc_html = String::new();
+    File::open(&doc_file)
+        .unwrap()
+        .read_to_string(&mut doc_html)
+        .unwrap();
+    assert!(doc_html.contains("Library"));
+    assert!(!doc_html.contains("Binary"));
+}
+
+#[test]
+fn doc_lib_bin_same_name_documents_lib_when_requested() {
+    let p = project()
+        .file(
+            "src/main.rs",
+            r#"
+            //! Binary documentation
+            extern crate foo;
+            fn main() {
+                foo::foo();
+            }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            //! Library documentation
+            pub fn foo() {}
+        "#,
+        ).build();
+
+    p.cargo("doc --lib")
+        .with_stderr(
+            "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    assert!(p.root().join("target/doc").is_dir());
+    let doc_file = p.root().join("target/doc/foo/index.html");
+    assert!(doc_file.is_file());
+    let mut doc_html = String::new();
+    File::open(&doc_file)
+        .unwrap()
+        .read_to_string(&mut doc_html)
+        .unwrap();
+    assert!(doc_html.contains("Library"));
+    assert!(!doc_html.contains("Binary"));
+}
+
+#[test]
+fn doc_lib_bin_same_name_documents_named_bin_when_requested() {
+    let p = project()
+        .file(
+            "src/main.rs",
+            r#"
+            //! Binary documentation
+            extern crate foo;
+            fn main() {
+                foo::foo();
+            }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            //! Library documentation
+            pub fn foo() {}
+        "#,
+        ).build();
+
+    p.cargo("doc --bin foo")
+        .with_stderr(
+            "\
+[CHECKING] foo v0.0.1 ([CWD])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    assert!(p.root().join("target/doc").is_dir());
+    let doc_file = p.root().join("target/doc/foo/index.html");
+    assert!(doc_file.is_file());
+    let mut doc_html = String::new();
+    File::open(&doc_file)
+        .unwrap()
+        .read_to_string(&mut doc_html)
+        .unwrap();
+    assert!(!doc_html.contains("Library"));
+    assert!(doc_html.contains("Binary"));
+}
+
+#[test]
+fn doc_lib_bin_same_name_documents_bins_when_requested() {
+    let p = project()
+        .file(
+            "src/main.rs",
+            r#"
+            //! Binary documentation
+            extern crate foo;
+            fn main() {
+                foo::foo();
+            }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            //! Library documentation
+            pub fn foo() {}
+        "#,
+        ).build();
+
+    p.cargo("doc --bins")
+        .with_stderr(
+            "\
+[CHECKING] foo v0.0.1 ([CWD])
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    assert!(p.root().join("target/doc").is_dir());
+    let doc_file = p.root().join("target/doc/foo/index.html");
+    assert!(doc_file.is_file());
+    let mut doc_html = String::new();
+    File::open(&doc_file)
+        .unwrap()
+        .read_to_string(&mut doc_html)
+        .unwrap();
+    assert!(!doc_html.contains("Library"));
+    assert!(doc_html.contains("Binary"));
+}
+
+#[test]
+fn doc_dash_p() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "extern crate a;")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.b]
+            path = "../b"
+        "#,
+        ).file("a/src/lib.rs", "extern crate b;")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+        .file("b/src/lib.rs", "")
+        .build();
+
+    p.cargo("doc -p a")
+        .with_stderr(
+            "\
+[..] b v0.0.1 ([CWD]/b)
+[..] b v0.0.1 ([CWD]/b)
+[DOCUMENTING] a v0.0.1 ([CWD]/a)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn doc_same_name() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/bin/main.rs", "fn main() {}")
+        .file("examples/main.rs", "fn main() {}")
+        .file("tests/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("doc").run();
+}
+
+#[test]
+fn doc_target() {
+    const TARGET: &str = "arm-unknown-linux-gnueabihf";
+
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            #![feature(no_core, lang_items)]
+            #![no_core]
+
+            #[lang = "sized"]
+            trait Sized {}
+
+            extern {
+                pub static A: u32;
+            }
+        "#,
+        ).build();
+
+    p.cargo("doc --verbose --target").arg(TARGET).run();
+    assert!(p.root().join(&format!("target/{}/doc", TARGET)).is_dir());
+    assert!(
+        p.root()
+            .join(&format!("target/{}/doc/foo/index.html", TARGET))
+            .is_file()
+    );
+}
+
+#[test]
+fn target_specific_not_documented() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [target.foo.dependencies]
+            a = { path = "a" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "not rust")
+        .build();
+
+    p.cargo("doc").run();
+}
+
+#[test]
+fn output_not_captured() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file(
+            "a/src/lib.rs",
+            "
+            /// ```
+            /// ☃
+            /// ```
+            pub fn foo() {}
+        ",
+        ).build();
+
+    p.cargo("doc")
+        .without_status()
+        .with_stderr_contains("1 | ☃")
+        .with_stderr_contains(r"error: unknown start of token: \u{2603}")
+        .run();
+}
+
+#[test]
+fn target_specific_documented() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [target.foo.dependencies]
+            a = {{ path = "a" }}
+            [target.{}.dependencies]
+            a = {{ path = "a" }}
+        "#,
+                rustc_host()
+            ),
+        ).file(
+            "src/lib.rs",
+            "
+            extern crate a;
+
+            /// test
+            pub fn foo() {}
+        ",
+        ).file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file(
+            "a/src/lib.rs",
+            "
+            /// test
+            pub fn foo() {}
+        ",
+        ).build();
+
+    p.cargo("doc").run();
+}
+
+#[test]
+fn no_document_build_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [build-dependencies]
+            a = { path = "a" }
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file(
+            "a/src/lib.rs",
+            "
+            /// ```
+            /// ☃
+            /// ```
+            pub fn foo() {}
+        ",
+        ).build();
+
+    p.cargo("doc").run();
+}
+
+#[test]
+fn doc_release() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("build --release").run();
+    p.cargo("doc --release -v")
+        .with_stderr(
+            "\
+[DOCUMENTING] foo v0.0.1 ([..])
+[RUNNING] `rustdoc [..] src/lib.rs [..]`
+[FINISHED] release [optimized] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn doc_multiple_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+
+            [dependencies.baz]
+            path = "baz"
+        "#,
+        ).file("src/lib.rs", "extern crate bar; pub fn foo() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    p.cargo("doc -p bar -p baz -v").run();
+
+    assert!(p.root().join("target/doc").is_dir());
+    assert!(p.root().join("target/doc/bar/index.html").is_file());
+    assert!(p.root().join("target/doc/baz/index.html").is_file());
+}
+
+#[test]
+fn features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+
+            [features]
+            foo = ["bar/bar"]
+        "#,
+        ).file("src/lib.rs", r#"#[cfg(feature = "foo")] pub fn foo() {}"#)
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            bar = []
+        "#,
+        ).file(
+            "bar/build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rustc-cfg=bar");
+            }
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"#[cfg(feature = "bar")] pub fn bar() {}"#,
+        ).build();
+    p.cargo("doc --features foo").run();
+    assert!(p.root().join("target/doc").is_dir());
+    assert!(p.root().join("target/doc/foo/fn.foo.html").is_file());
+    assert!(p.root().join("target/doc/bar/fn.bar.html").is_file());
+}
+
+#[test]
+fn rerun_when_dir_removed() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            /// dox
+            pub fn foo() {}
+        "#,
+        ).build();
+
+    p.cargo("doc").run();
+    assert!(p.root().join("target/doc/foo/index.html").is_file());
+
+    fs::remove_dir_all(p.root().join("target/doc/foo")).unwrap();
+
+    p.cargo("doc").run();
+    assert!(p.root().join("target/doc/foo/index.html").is_file());
+}
+
+#[test]
+fn document_only_lib() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            /// dox
+            pub fn foo() {}
+        "#,
+        ).file(
+            "src/bin/bar.rs",
+            r#"
+            /// ```
+            /// ☃
+            /// ```
+            pub fn foo() {}
+            fn main() { foo(); }
+        "#,
+        ).build();
+    p.cargo("doc --lib").run();
+    assert!(p.root().join("target/doc/foo/index.html").is_file());
+}
+
+#[test]
+fn plugins_no_use_target() {
+    if !support::is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            proc-macro = true
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("doc --target=x86_64-unknown-openbsd -v").run();
+}
+
+#[test]
+fn doc_all_workspace() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    // The order in which bar is compiled or documented is not deterministic
+    p.cargo("doc --all")
+        .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+        .with_stderr_contains("[..] Checking bar v0.1.0 ([..])")
+        .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
+        .run();
+}
+
+#[test]
+fn doc_all_virtual_manifest() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    // The order in which bar and baz are documented is not guaranteed
+    p.cargo("doc --all")
+        .with_stderr_contains("[..] Documenting baz v0.1.0 ([..])")
+        .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+        .run();
+}
+
+#[test]
+fn doc_virtual_manifest_all_implied() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    // The order in which bar and baz are documented is not guaranteed
+    p.cargo("doc")
+        .with_stderr_contains("[..] Documenting baz v0.1.0 ([..])")
+        .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+        .run();
+}
+
+#[test]
+fn doc_all_member_dependency_same_name() {
+    if !is_nightly() {
+        // This can be removed once 1.29 is stable (rustdoc --cap-lints).
+        return;
+    }
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    Package::new("bar", "0.1.0").publish();
+
+    p.cargo("doc --all")
+        .with_stderr_contains("[..] Updating `[..]` index")
+        .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+        .run();
+}
+
+#[test]
+fn doc_workspace_open_help_message() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["foo", "bar"]
+        "#,
+        ).file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("foo/src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    // The order in which bar is compiled or documented is not deterministic
+    p.cargo("doc --all --open")
+        .with_status(101)
+        .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
+        .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
+        .with_stderr_contains(
+            "error: Passing multiple packages and `open` \
+             is not supported.",
+        ).with_stderr_contains(
+            "Please re-run this command with `-p <spec>` \
+             where `<spec>` is one of the following:",
+        ).with_stderr_contains("  foo")
+        .with_stderr_contains("  bar")
+        .run();
+}
+
+#[test]
+#[cfg(not(any(target_os = "windows", target_os = "macos")))]
+fn doc_workspace_open_different_library_and_package_names() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["foo"]
+        "#,
+        ).file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            [lib]
+            name = "foolib"
+        "#,
+        ).file("foo/src/lib.rs", "")
+        .build();
+
+    p.cargo("doc --open")
+        .env("BROWSER", "echo")
+        .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
+        .with_stderr_contains("[..] [CWD]/target/doc/foolib/index.html")
+        .run();
+}
+
+#[test]
+#[cfg(not(any(target_os = "windows", target_os = "macos")))]
+fn doc_workspace_open_binary() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["foo"]
+        "#,
+        ).file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            [[bin]]
+            name = "foobin"
+            path = "src/main.rs"
+        "#,
+        ).file("foo/src/main.rs", "")
+        .build();
+
+    p.cargo("doc --open")
+        .env("BROWSER", "echo")
+        .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
+        .with_stderr_contains("[..] Opening [CWD]/target/doc/foobin/index.html")
+        .run();
+}
+
+#[test]
+#[cfg(not(any(target_os = "windows", target_os = "macos")))]
+fn doc_workspace_open_binary_and_library() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["foo"]
+        "#,
+        ).file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            [lib]
+            name = "foolib"
+            [[bin]]
+            name = "foobin"
+            path = "src/main.rs"
+        "#,
+        ).file("foo/src/lib.rs", "")
+        .file("foo/src/main.rs", "")
+        .build();
+
+    p.cargo("doc --open")
+        .env("BROWSER", "echo")
+        .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
+        .with_stderr_contains("[..] Opening [CWD]/target/doc/foolib/index.html")
+        .run();
+}
+
+#[test]
+fn doc_edition() {
+    if !support::is_nightly() {
+        // Stable rustdoc won't have the edition option.  Remove this once it
+        // is stabilized.
+        return;
+    }
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["edition"]
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            edition = "2018"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("doc -v")
+        .masquerade_as_nightly_cargo()
+        .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]")
+        .run();
+
+    p.cargo("test -v")
+        .masquerade_as_nightly_cargo()
+        .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]")
+        .run();
+}
+
+#[test]
+fn doc_target_edition() {
+    if !support::is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["edition"]
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            edition = "2018"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("doc -v")
+        .masquerade_as_nightly_cargo()
+        .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]")
+        .run();
+
+    p.cargo("test -v")
+        .masquerade_as_nightly_cargo()
+        .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]")
+        .run();
+}
+
+// Tests an issue where depending on different versions of the same crate depending on `cfg`s
+// caused `cargo doc` to fail.
+#[test]
+fn issue_5345() {
+    if !is_nightly() {
+        // This can be removed once 1.29 is stable (rustdoc --cap-lints).
+        return;
+    }
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [target.'cfg(all(windows, target_arch = "x86"))'.dependencies]
+            bar = "0.1"
+
+            [target.'cfg(not(all(windows, target_arch = "x86")))'.dependencies]
+            bar = "0.2"
+        "#,
+        ).file("src/lib.rs", "extern crate bar;")
+        .build();
+    Package::new("bar", "0.1.0").publish();
+    Package::new("bar", "0.2.0").publish();
+
+    foo.cargo("build").run();
+    foo.cargo("doc").run();
+}
+
+#[test]
+fn doc_private_items() {
+    let foo = project()
+        .file("src/lib.rs", "mod private { fn private_item() {} }")
+        .build();
+    foo.cargo("doc --document-private-items").run();
+
+    assert!(foo.root().join("target/doc").is_dir());
+    assert!(
+        foo.root()
+            .join("target/doc/foo/private/index.html")
+            .is_file()
+    );
+}
+
+#[test]
+fn doc_private_ws() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["a", "b"]
+        "#,
+        ).file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "fn p() {}")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+        .file("b/src/lib.rs", "fn p2() {}")
+        .file("b/src/main.rs", "fn main() {}")
+        .build();
+    p.cargo("doc --all --bins --lib --document-private-items -v")
+        .with_stderr_contains(
+            "[RUNNING] `rustdoc [..] a/src/lib.rs [..]--document-private-items[..]",
+        ).with_stderr_contains(
+            "[RUNNING] `rustdoc [..] b/src/lib.rs [..]--document-private-items[..]",
+        ).with_stderr_contains(
+            "[RUNNING] `rustdoc [..] b/src/main.rs [..]--document-private-items[..]",
+        ).run();
+}
+
+const BAD_INTRA_LINK_LIB: &str = r#"
+#![deny(intra_doc_link_resolution_failure)]
+
+/// [bad_link]
+pub fn foo() {}
+"#;
+
+#[test]
+fn doc_cap_lints() {
+    if !is_nightly() {
+        // This can be removed once 1.29 is stable (rustdoc --cap-lints).
+        return;
+    }
+    let a = git::new("a", |p| {
+        p.file("Cargo.toml", &basic_lib_manifest("a"))
+            .file("src/lib.rs", BAD_INTRA_LINK_LIB)
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = {{ git = '{}' }}
+        "#,
+                a.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("doc")
+        .with_stderr_unordered(
+            "\
+[UPDATING] git repository `[..]`
+[DOCUMENTING] a v0.5.0 ([..])
+[CHECKING] a v0.5.0 ([..])
+[DOCUMENTING] foo v0.0.1 ([..])
+[FINISHED] dev [..]
+",
+        ).run();
+
+    p.root().join("target").rm_rf();
+
+    p.cargo("doc -vv")
+        .with_stderr_contains(
+            "\
+[WARNING] `[bad_link]` cannot be resolved, ignoring it...
+",
+        ).run();
+}
+
+#[test]
+fn doc_message_format() {
+    if !is_nightly() {
+        // This can be removed once 1.30 is stable (rustdoc --error-format stabilized).
+        return;
+    }
+    let p = project().file("src/lib.rs", BAD_INTRA_LINK_LIB).build();
+
+    p.cargo("doc --message-format=json")
+        .with_status(101)
+        .with_json(
+            r#"
+            {
+                "message": {
+                    "children": "{...}",
+                    "code": "{...}",
+                    "level": "error",
+                    "message": "[..]",
+                    "rendered": "[..]",
+                    "spans": "{...}"
+                },
+                "package_id": "foo [..]",
+                "reason": "compiler-message",
+                "target": "{...}"
+            }
+            "#,
+        ).run();
+}
+
+#[test]
+fn short_message_format() {
+    if !is_nightly() {
+        // This can be removed once 1.30 is stable (rustdoc --error-format stabilized).
+        return;
+    }
+    let p = project().file("src/lib.rs", BAD_INTRA_LINK_LIB).build();
+    p.cargo("doc --message-format=short")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+src/lib.rs:4:6: error: `[bad_link]` cannot be resolved, ignoring it...
+error: Could not document `foo`.
+",
+        ).run();
+}
diff --git a/tests/testsuite/edition.rs b/tests/testsuite/edition.rs
new file mode 100644 (file)
index 0000000..8b35c6c
--- /dev/null
@@ -0,0 +1,37 @@
+use support::{basic_lib_manifest, is_nightly, project};
+
+#[test]
+fn edition_works_for_build_script() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = 'foo'
+                version = '0.1.0'
+                edition = '2018'
+
+                [build-dependencies]
+                a = { path = 'a' }
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+                fn main() {
+                    a::foo();
+                }
+            "#,
+        ).file("a/Cargo.toml", &basic_lib_manifest("a"))
+        .file("a/src/lib.rs", "pub fn foo() {}")
+        .build();
+
+    p.cargo("build -v")
+        .masquerade_as_nightly_cargo()
+        .with_status(0)
+        .run();
+}
diff --git a/tests/testsuite/features.rs b/tests/testsuite/features.rs
new file mode 100644 (file)
index 0000000..daf68a6
--- /dev/null
@@ -0,0 +1,1732 @@
+use std::fs::File;
+use std::io::prelude::*;
+
+use support::paths::CargoPathExt;
+use support::registry::Package;
+use support::{basic_manifest, project};
+
+#[test]
+fn invalid1() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            bar = ["baz"]
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Feature `bar` includes `baz` which is neither a dependency nor another feature
+",
+        ).run();
+}
+
+#[test]
+fn invalid2() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            bar = ["baz"]
+
+            [dependencies.bar]
+            path = "foo"
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Features and dependencies cannot have the same name: `bar`
+",
+        ).run();
+}
+
+#[test]
+fn invalid3() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            bar = ["baz"]
+
+            [dependencies.baz]
+            path = "foo"
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Feature `bar` depends on `baz` which is not an optional dependency.
+Consider adding `optional = true` to the dependency
+",
+        ).run();
+}
+
+#[test]
+fn invalid4() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+            features = ["bar"]
+        "#,
+        ).file("src/main.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to select a version for `bar`.
+    ... required by package `foo v0.0.1 ([..])`
+versions that meet the requirements `*` are: 0.0.1
+
+the package `foo` depends on `bar`, with features: `bar` but `bar` does not have these features.
+
+
+failed to select a version for `bar` which could resolve this conflict",
+        ).run();
+
+    p.change_file("Cargo.toml", &basic_manifest("foo", "0.0.1"));
+
+    p.cargo("build --features test")
+        .with_status(101)
+        .with_stderr("error: Package `foo v0.0.1 ([..])` does not have these features: `test`")
+        .run();
+}
+
+#[test]
+fn invalid5() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dev-dependencies.bar]
+            path = "bar"
+            optional = true
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Dev-dependencies are not allowed to be optional: `bar`
+",
+        ).run();
+}
+
+#[test]
+fn invalid6() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            foo = ["bar/baz"]
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build --features foo")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Feature `foo` requires a feature of `bar` which is not a dependency
+",
+        ).run();
+}
+
+#[test]
+fn invalid7() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            foo = ["bar/baz"]
+            bar = []
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build --features foo")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Feature `foo` requires a feature of `bar` which is not a dependency
+",
+        ).run();
+}
+
+#[test]
+fn invalid8() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+            features = ["foo/bar"]
+        "#,
+        ).file("src/main.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build --features foo")
+        .with_status(101)
+        .with_stderr("[ERROR] feature names may not contain slashes: `foo/bar`")
+        .run();
+}
+
+#[test]
+fn invalid9() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build --features bar").with_stderr("\
+warning: Package `foo v0.0.1 ([..])` does not have feature `bar`. It has a required dependency with \
+that name, but only optional dependencies can be used as features. [..]
+   Compiling bar v0.0.1 ([..])
+   Compiling foo v0.0.1 ([..])
+    Finished dev [unoptimized + debuginfo] target(s) in [..]s
+").run();
+}
+
+#[test]
+fn invalid10() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+            features = ["baz"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.baz]
+            path = "baz"
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+        .file("bar/baz/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").with_stderr("\
+warning: Package `bar v0.0.1 ([..])` does not have feature `baz`. It has a required dependency with \
+that name, but only optional dependencies can be used as features. [..]
+   Compiling baz v0.0.1 ([..])
+   Compiling bar v0.0.1 ([..])
+   Compiling foo v0.0.1 ([..])
+    Finished dev [unoptimized + debuginfo] target(s) in [..]s
+").run();
+}
+
+#[test]
+fn no_transitive_dep_feature_requirement() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.derived]
+            path = "derived"
+
+            [features]
+            default = ["derived/bar/qux"]
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            extern crate derived;
+            fn main() { derived::test(); }
+        "#,
+        ).file(
+            "derived/Cargo.toml",
+            r#"
+            [package]
+            name = "derived"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("derived/src/lib.rs", "extern crate bar; pub use bar::test;")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            qux = []
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"
+            #[cfg(feature = "qux")]
+            pub fn test() { print!("test"); }
+        "#,
+        ).build();
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr("[ERROR] feature names may not contain slashes: `bar/qux`")
+        .run();
+}
+
+#[test]
+fn no_feature_doesnt_build() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+            optional = true
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[cfg(feature = "bar")]
+            extern crate bar;
+            #[cfg(feature = "bar")]
+            fn main() { bar::bar(); println!("bar") }
+            #[cfg(not(feature = "bar"))]
+            fn main() {}
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.process(&p.bin("foo")).with_stdout("").run();
+
+    p.cargo("build --features bar")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.process(&p.bin("foo")).with_stdout("bar\n").run();
+}
+
+#[test]
+fn default_feature_pulled_in() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["bar"]
+
+            [dependencies.bar]
+            path = "bar"
+            optional = true
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[cfg(feature = "bar")]
+            extern crate bar;
+            #[cfg(feature = "bar")]
+            fn main() { bar::bar(); println!("bar") }
+            #[cfg(not(feature = "bar"))]
+            fn main() {}
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.process(&p.bin("foo")).with_stdout("bar\n").run();
+
+    p.cargo("build --no-default-features")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.process(&p.bin("foo")).with_stdout("").run();
+}
+
+#[test]
+fn cyclic_feature() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["default"]
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr("[ERROR] Cyclic feature dependency: feature `default` depends on itself")
+        .run();
+}
+
+#[test]
+fn cyclic_feature2() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            foo = ["bar"]
+            bar = ["foo"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn groups_on_groups_on_groups() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["f1"]
+            f1 = ["f2", "bar"]
+            f2 = ["f3", "f4"]
+            f3 = ["f5", "f6", "baz"]
+            f4 = ["f5", "f7"]
+            f5 = ["f6"]
+            f6 = ["f7"]
+            f7 = ["bar"]
+
+            [dependencies.bar]
+            path = "bar"
+            optional = true
+
+            [dependencies.baz]
+            path = "baz"
+            optional = true
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate bar;
+            #[allow(unused_extern_crates)]
+            extern crate baz;
+            fn main() {}
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
+[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn many_cli_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+            optional = true
+
+            [dependencies.baz]
+            path = "baz"
+            optional = true
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate bar;
+            #[allow(unused_extern_crates)]
+            extern crate baz;
+            fn main() {}
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    p.cargo("build --features")
+        .arg("bar baz")
+        .with_stderr(
+            "\
+[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
+[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn union_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.d1]
+            path = "d1"
+            features = ["f1"]
+            [dependencies.d2]
+            path = "d2"
+            features = ["f2"]
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate d1;
+            extern crate d2;
+            fn main() {
+                d2::f1();
+                d2::f2();
+            }
+        "#,
+        ).file(
+            "d1/Cargo.toml",
+            r#"
+            [package]
+            name = "d1"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            f1 = ["d2"]
+
+            [dependencies.d2]
+            path = "../d2"
+            features = ["f1"]
+            optional = true
+        "#,
+        ).file("d1/src/lib.rs", "")
+        .file(
+            "d2/Cargo.toml",
+            r#"
+            [package]
+            name = "d2"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            f1 = []
+            f2 = []
+        "#,
+        ).file(
+            "d2/src/lib.rs",
+            r#"
+            #[cfg(feature = "f1")] pub fn f1() {}
+            #[cfg(feature = "f2")] pub fn f2() {}
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] d2 v0.0.1 ([CWD]/d2)
+[COMPILING] d1 v0.0.1 ([CWD]/d1)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn many_features_no_rebuilds() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name    = "b"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies.a]
+            path = "a"
+            features = ["fall"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name    = "a"
+            version = "0.1.0"
+            authors = []
+
+            [features]
+            ftest  = []
+            ftest2 = []
+            fall   = ["ftest", "ftest2"]
+        "#,
+        ).file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] a v0.1.0 ([CWD]/a)
+[COMPILING] b v0.1.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.root().move_into_the_past();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[FRESH] a v0.1.0 ([..]/a)
+[FRESH] b v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+// Tests that all cmd lines work with `--features ""`
+#[test]
+fn empty_features() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    p.cargo("build --features").arg("").run();
+}
+
+// Tests that all cmd lines work with `--features ""`
+#[test]
+fn transitive_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            foo = ["bar/baz"]
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", "extern crate bar; fn main() { bar::baz(); }")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            baz = []
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"#[cfg(feature = "baz")] pub fn baz() {}"#,
+        ).build();
+
+    p.cargo("build --features foo").run();
+}
+
+#[test]
+fn everything_in_the_lockfile() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            f1 = ["d1/f1"]
+            f2 = ["d2"]
+
+            [dependencies.d1]
+            path = "d1"
+            [dependencies.d2]
+            path = "d2"
+            optional = true
+            [dependencies.d3]
+            path = "d3"
+            optional = true
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "d1/Cargo.toml",
+            r#"
+            [package]
+            name = "d1"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            f1 = []
+        "#,
+        ).file("d1/src/lib.rs", "")
+        .file("d2/Cargo.toml", &basic_manifest("d2", "0.0.2"))
+        .file("d2/src/lib.rs", "")
+        .file(
+            "d3/Cargo.toml",
+            r#"
+            [package]
+            name = "d3"
+            version = "0.0.3"
+            authors = []
+
+            [features]
+            f3 = []
+        "#,
+        ).file("d3/src/lib.rs", "")
+        .build();
+
+    p.cargo("fetch").run();
+    let loc = p.root().join("Cargo.lock");
+    let mut lockfile = String::new();
+    t!(t!(File::open(&loc)).read_to_string(&mut lockfile));
+    assert!(
+        lockfile.contains(r#"name = "d1""#),
+        "d1 not found\n{}",
+        lockfile
+    );
+    assert!(
+        lockfile.contains(r#"name = "d2""#),
+        "d2 not found\n{}",
+        lockfile
+    );
+    assert!(
+        lockfile.contains(r#"name = "d3""#),
+        "d3 not found\n{}",
+        lockfile
+    );
+}
+
+#[test]
+fn no_rebuild_when_frobbing_default_feature() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+            b = { path = "b" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [package]
+            name = "b"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            a = { path = "../a", features = ["f1"], default-features = false }
+        "#,
+        ).file("b/src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.1.0"
+            authors = []
+
+            [features]
+            default = ["f1"]
+            f1 = []
+        "#,
+        ).file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("build").with_stdout("").run();
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn unions_work_with_no_default_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+            b = { path = "b" }
+        "#,
+        ).file("src/lib.rs", "extern crate a; pub fn foo() { a::a(); }")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [package]
+            name = "b"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            a = { path = "../a", features = [], default-features = false }
+        "#,
+        ).file("b/src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.1.0"
+            authors = []
+
+            [features]
+            default = ["f1"]
+            f1 = []
+        "#,
+        ).file("a/src/lib.rs", r#"#[cfg(feature = "f1")] pub fn a() {}"#)
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("build").with_stdout("").run();
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn optional_and_dev_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name    = "test"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foo = { path = "foo", optional = true }
+            [dev-dependencies]
+            foo = { path = "foo" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("foo/src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] test v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn activating_feature_activates_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name    = "test"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foo = { path = "foo", optional = true }
+
+            [features]
+            a = ["foo/a"]
+        "#,
+        ).file(
+            "src/lib.rs",
+            "extern crate foo; pub fn bar() { foo::bar(); }",
+        ).file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [features]
+            a = []
+        "#,
+        ).file("foo/src/lib.rs", r#"#[cfg(feature = "a")] pub fn bar() {}"#)
+        .build();
+
+    p.cargo("build --features a -v").run();
+}
+
+#[test]
+fn dep_feature_in_cmd_line() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.derived]
+            path = "derived"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            extern crate derived;
+            fn main() { derived::test(); }
+        "#,
+        ).file(
+            "derived/Cargo.toml",
+            r#"
+            [package]
+            name = "derived"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+
+            [features]
+            default = []
+            derived-feat = ["bar/some-feat"]
+        "#,
+        ).file("derived/src/lib.rs", "extern crate bar; pub use bar::test;")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            some-feat = []
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"
+            #[cfg(feature = "some-feat")]
+            pub fn test() { print!("test"); }
+        "#,
+        ).build();
+
+    // The foo project requires that feature "some-feat" in "bar" is enabled.
+    // Building without any features enabled should fail:
+    p.cargo("build").with_status(101).run();
+
+    // We should be able to enable the feature "derived-feat", which enables "some-feat",
+    // on the command line. The feature is enabled, thus building should be successful:
+    p.cargo("build --features derived/derived-feat").run();
+
+    // Trying to enable features of transitive dependencies is an error
+    p.cargo("build --features bar/some-feat")
+        .with_status(101)
+        .with_stderr("error: Package `foo v0.0.1 ([..])` does not have these features: `bar`")
+        .run();
+
+    // Hierarchical feature specification should still be disallowed
+    p.cargo("build --features derived/bar/some-feat")
+        .with_status(101)
+        .with_stderr("[ERROR] feature names may not contain slashes: `bar/some-feat`")
+        .run();
+}
+
+#[test]
+fn all_features_flag_enables_all_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            foo = []
+            bar = []
+
+            [dependencies.baz]
+            path = "baz"
+            optional = true
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[cfg(feature = "foo")]
+            pub fn foo() {}
+
+            #[cfg(feature = "bar")]
+            pub fn bar() {
+                extern crate baz;
+                baz::baz();
+            }
+
+            fn main() {
+                foo();
+                bar();
+            }
+        "#,
+        ).file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    p.cargo("build --all-features").run();
+}
+
+#[test]
+fn many_cli_features_comma_delimited() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+            optional = true
+
+            [dependencies.baz]
+            path = "baz"
+            optional = true
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate bar;
+            #[allow(unused_extern_crates)]
+            extern crate baz;
+            fn main() {}
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    p.cargo("build --features bar,baz")
+        .with_stderr(
+            "\
+[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
+[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn many_cli_features_comma_and_space_delimited() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+            optional = true
+
+            [dependencies.baz]
+            path = "baz"
+            optional = true
+
+            [dependencies.bam]
+            path = "bam"
+            optional = true
+
+            [dependencies.bap]
+            path = "bap"
+            optional = true
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate bar;
+            #[allow(unused_extern_crates)]
+            extern crate baz;
+            #[allow(unused_extern_crates)]
+            extern crate bam;
+            #[allow(unused_extern_crates)]
+            extern crate bap;
+            fn main() {}
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .file("bam/Cargo.toml", &basic_manifest("bam", "0.0.1"))
+        .file("bam/src/lib.rs", "pub fn bam() {}")
+        .file("bap/Cargo.toml", &basic_manifest("bap", "0.0.1"))
+        .file("bap/src/lib.rs", "pub fn bap() {}")
+        .build();
+
+    p.cargo("build --features")
+        .arg("bar,baz bam bap")
+        .with_stderr(
+            "\
+[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
+[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
+[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
+[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn combining_features_and_package() {
+    Package::new("dep", "1.0.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+
+            [dependencies]
+            dep = "1"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+            [features]
+            main = []
+        "#,
+        ).file(
+            "bar/src/main.rs",
+            r#"
+            #[cfg(feature = "main")]
+            fn main() {}
+        "#,
+        ).build();
+
+    p.cargo("build -Z package-features --all --features main")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+             [ERROR] cannot specify features for more than one package",
+        ).run();
+
+    p.cargo("build -Z package-features --package dep --features main")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+             [ERROR] cannot specify features for packages outside of workspace",
+        ).run();
+    p.cargo("build -Z package-features --package dep --all-features")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+             [ERROR] cannot specify features for packages outside of workspace",
+        ).run();
+    p.cargo("build -Z package-features --package dep --no-default-features")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+             [ERROR] cannot specify features for packages outside of workspace",
+        ).run();
+
+    p.cargo("build -Z package-features --all --all-features")
+        .masquerade_as_nightly_cargo()
+        .run();
+    p.cargo("run -Z package-features --package bar --features main")
+        .masquerade_as_nightly_cargo()
+        .run();
+}
+
+#[test]
+fn namespaced_invalid_feature() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["namespaced-features"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            namespaced-features = true
+
+            [features]
+            bar = ["baz"]
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Feature `bar` includes `baz` which is not defined as a feature
+",
+        ).run();
+}
+
+#[test]
+fn namespaced_invalid_dependency() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["namespaced-features"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            namespaced-features = true
+
+            [features]
+            bar = ["crate:baz"]
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Feature `bar` includes `crate:baz` which is not a known dependency
+",
+        ).run();
+}
+
+#[test]
+fn namespaced_non_optional_dependency() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["namespaced-features"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            namespaced-features = true
+
+            [features]
+            bar = ["crate:baz"]
+
+            [dependencies]
+            baz = "0.1"
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Feature `bar` includes `crate:baz` which is not an optional dependency.
+Consider adding `optional = true` to the dependency
+",
+        ).run();
+}
+
+#[test]
+fn namespaced_implicit_feature() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["namespaced-features"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            namespaced-features = true
+
+            [features]
+            bar = ["baz"]
+
+            [dependencies]
+            baz = { version = "0.1", optional = true }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").masquerade_as_nightly_cargo().run();
+}
+
+#[test]
+fn namespaced_shadowed_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["namespaced-features"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            namespaced-features = true
+
+            [features]
+            baz = []
+
+            [dependencies]
+            baz = { version = "0.1", optional = true }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").masquerade_as_nightly_cargo().with_status(101).with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Feature `baz` includes the optional dependency of the same name, but this is left implicit in the features included by this feature.
+Consider adding `crate:baz` to this feature's requirements.
+",
+        )
+        .run();
+}
+
+#[test]
+fn namespaced_shadowed_non_optional() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["namespaced-features"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            namespaced-features = true
+
+            [features]
+            baz = []
+
+            [dependencies]
+            baz = "0.1"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").masquerade_as_nightly_cargo().with_status(101).with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Feature `baz` includes the dependency of the same name, but this is left implicit in the features included by this feature.
+Additionally, the dependency must be marked as optional to be included in the feature definition.
+Consider adding `crate:baz` to this feature's requirements and marking the dependency as `optional = true`
+",
+        )
+        .run();
+}
+
+#[test]
+fn namespaced_implicit_non_optional() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["namespaced-features"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            namespaced-features = true
+
+            [features]
+            bar = ["baz"]
+
+            [dependencies]
+            baz = "0.1"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").masquerade_as_nightly_cargo().with_status(101).with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  Feature `bar` includes `baz` which is not defined as a feature.
+A non-optional dependency of the same name is defined; consider adding `optional = true` to its definition
+",
+        ).run(
+    );
+}
+
+#[test]
+fn namespaced_same_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["namespaced-features"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            namespaced-features = true
+
+            [features]
+            baz = ["crate:baz"]
+
+            [dependencies]
+            baz = { version = "0.1", optional = true }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").masquerade_as_nightly_cargo().run();
+}
+
+#[test]
+fn only_dep_is_optional() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+
+                [features]
+                foo = ['bar']
+
+                [dependencies]
+                bar = { version = "0.1", optional = true }
+
+                [dev-dependencies]
+                bar = "0.1"
+            "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn all_features_all_crates() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+
+                [workspace]
+                members = ['bar']
+            "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+                [project]
+                name = "bar"
+                version = "0.0.1"
+                authors = []
+
+                [features]
+                foo = []
+            "#,
+        ).file("bar/src/main.rs", "#[cfg(feature = \"foo\")] fn main() {}")
+        .build();
+
+    p.cargo("build --all-features --all").run();
+}
+
+#[test]
+fn feature_off_dylib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar"]
+
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [lib]
+            crate-type = ["dylib"]
+
+            [features]
+            f1 = []
+        "#,
+        )
+        .file(
+            "src/lib.rs",
+            r#"
+            pub fn hello() -> &'static str {
+                if cfg!(feature = "f1") {
+                    "f1"
+                } else {
+                    "no f1"
+                }
+            }
+        "#,
+        )
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+
+            [dependencies]
+            foo = { path = ".." }
+        "#,
+        )
+        .file(
+            "bar/src/main.rs",
+            r#"
+            extern crate foo;
+
+            fn main() {
+                assert_eq!(foo::hello(), "no f1");
+            }
+        "#,
+        )
+        .build();
+
+    // Build the dylib with `f1` feature.
+    p.cargo("build --features f1").run();
+    // Check that building without `f1` uses a dylib without `f1`.
+    p.cargo("run -p bar").run();
+}
diff --git a/tests/testsuite/fetch.rs b/tests/testsuite/fetch.rs
new file mode 100644 (file)
index 0000000..a76a4c5
--- /dev/null
@@ -0,0 +1,112 @@
+use support::registry::Package;
+use support::rustc_host;
+use support::{basic_manifest, cross_compile, project};
+
+#[test]
+fn no_deps() {
+    let p = project()
+        .file("src/main.rs", "mod a; fn main() {}")
+        .file("src/a.rs", "")
+        .build();
+
+    p.cargo("fetch").with_stdout("").run();
+}
+
+#[test]
+fn fetch_all_platform_dependencies_when_no_target_is_given() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    Package::new("d1", "1.2.3")
+        .file("Cargo.toml", &basic_manifest("d1", "1.2.3"))
+        .file("src/lib.rs", "")
+        .publish();
+
+    Package::new("d2", "0.1.2")
+        .file("Cargo.toml", &basic_manifest("d2", "0.1.2"))
+        .file("src/lib.rs", "")
+        .publish();
+
+    let target = cross_compile::alternate();
+    let host = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [target.{host}.dependencies]
+            d1 = "1.2.3"
+
+            [target.{target}.dependencies]
+            d2 = "0.1.2"
+        "#,
+                host = host,
+                target = target
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("fetch")
+        .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]")
+        .with_stderr_contains("[DOWNLOADED] d2 v0.1.2 [..]")
+        .run();
+}
+
+#[test]
+fn fetch_platform_specific_dependencies() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    Package::new("d1", "1.2.3")
+        .file("Cargo.toml", &basic_manifest("d1", "1.2.3"))
+        .file("src/lib.rs", "")
+        .publish();
+
+    Package::new("d2", "0.1.2")
+        .file("Cargo.toml", &basic_manifest("d2", "0.1.2"))
+        .file("src/lib.rs", "")
+        .publish();
+
+    let target = cross_compile::alternate();
+    let host = rustc_host();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [target.{host}.dependencies]
+            d1 = "1.2.3"
+
+            [target.{target}.dependencies]
+            d2 = "0.1.2"
+        "#,
+                host = host,
+                target = target
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("fetch --target")
+        .arg(&host)
+        .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]")
+        .with_stderr_does_not_contain("[DOWNLOADED] d2 v0.1.2 [..]")
+        .run();
+
+    p.cargo("fetch --target")
+        .arg(&target)
+        .with_stderr_contains("[DOWNLOADED] d2 v0.1.2[..]")
+        .with_stderr_does_not_contain("[DOWNLOADED] d1 v1.2.3 [..]")
+        .run();
+}
diff --git a/tests/testsuite/fix.rs b/tests/testsuite/fix.rs
new file mode 100644 (file)
index 0000000..0666e2c
--- /dev/null
@@ -0,0 +1,1189 @@
+use std::fs::File;
+
+use git2;
+
+use support::git;
+use support::is_nightly;
+use support::{basic_manifest, project};
+
+use std::io::Write;
+
+#[test]
+fn do_not_fix_broken_builds() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+                pub fn foo() {
+                    let mut x = 3;
+                    drop(x);
+                }
+
+                pub fn foo2() {
+                    let _x: u32 = "a";
+                }
+            "#,
+        ).build();
+
+    p.cargo("fix --allow-no-vcs")
+        .env("__CARGO_FIX_YOLO", "1")
+        .with_status(101)
+        .run();
+    assert!(p.read_file("src/lib.rs").contains("let mut x = 3;"));
+}
+
+#[test]
+fn fix_broken_if_requested() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+                fn foo(a: &u32) -> u32 { a + 1 }
+                pub fn bar() {
+                    foo(1);
+                }
+            "#,
+        ).build();
+
+    p.cargo("fix --allow-no-vcs --broken-code")
+        .env("__CARGO_FIX_YOLO", "1")
+        .run();
+}
+
+#[test]
+fn broken_fixes_backed_out() {
+    let p = project()
+        .file(
+            "foo/Cargo.toml",
+            r#"
+                [package]
+                name = 'foo'
+                version = '0.1.0'
+                [workspace]
+            "#,
+        ).file(
+            "foo/src/main.rs",
+            r##"
+                use std::env;
+                use std::fs;
+                use std::io::Write;
+                use std::path::{Path, PathBuf};
+                use std::process::{self, Command};
+
+                fn main() {
+                    let is_lib_rs = env::args_os()
+                        .map(PathBuf::from)
+                        .any(|l| l == Path::new("src/lib.rs"));
+                    if is_lib_rs {
+                        let path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+                        let path = path.join("foo");
+                        if path.exists() {
+                            fs::File::create("src/lib.rs")
+                                .unwrap()
+                                .write_all(b"not rust code")
+                                .unwrap();
+                        } else {
+                            fs::File::create(&path).unwrap();
+                        }
+                    }
+
+                    let status = Command::new("rustc")
+                        .args(env::args().skip(1))
+                        .status()
+                        .expect("failed to run rustc");
+                    process::exit(status.code().unwrap_or(2));
+                }
+            "##,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+                [package]
+                name = 'bar'
+                version = '0.1.0'
+                [workspace]
+            "#,
+        ).file("bar/build.rs", "fn main() {}")
+        .file(
+            "bar/src/lib.rs",
+            r#"
+                pub fn foo() {
+                    let mut x = 3;
+                    drop(x);
+                }
+            "#,
+        ).build();
+
+    // Build our rustc shim
+    p.cargo("build").cwd(p.root().join("foo")).run();
+
+    // Attempt to fix code, but our shim will always fail the second compile
+    p.cargo("fix --allow-no-vcs")
+        .cwd(p.root().join("bar"))
+        .env("__CARGO_FIX_YOLO", "1")
+        .env("RUSTC", p.root().join("foo/target/debug/foo"))
+        .with_status(101)
+        .with_stderr_contains("[..]not rust code[..]")
+        .with_stderr_contains(
+            "\
+             warning: failed to automatically apply fixes suggested by rustc \
+             to crate `bar`\n\
+             \n\
+             after fixes were automatically applied the compiler reported \
+             errors within these files:\n\
+             \n  \
+             * src/lib.rs\n\
+             \n\
+             This likely indicates a bug in either rustc or cargo itself,\n\
+             and we would appreciate a bug report! You're likely to see \n\
+             a number of compiler warnings after this message which cargo\n\
+             attempted to fix but failed. If you could open an issue at\n\
+             https://github.com/rust-lang/cargo/issues\n\
+             quoting the full output of this command we'd be very appreciative!\
+             ",
+        ).with_stderr_does_not_contain("[..][FIXING][..]")
+        .run();
+}
+
+#[test]
+fn fix_path_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.1.0"
+
+                [dependencies]
+                bar = { path = 'bar' }
+
+                [workspace]
+            "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+                extern crate bar;
+
+                pub fn foo() -> u32 {
+                    let mut x = 3;
+                    x
+                }
+            "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file(
+            "bar/src/lib.rs",
+            r#"
+                pub fn foo() -> u32 {
+                    let mut x = 3;
+                    x
+                }
+            "#,
+        ).build();
+
+    p.cargo("fix --allow-no-vcs -p foo -p bar")
+        .env("__CARGO_FIX_YOLO", "1")
+        .with_stdout("")
+        .with_stderr(
+            "\
+[CHECKING] bar v0.1.0 ([..])
+[FIXING] bar/src/lib.rs (1 fix)
+[CHECKING] foo v0.1.0 ([..])
+[FIXING] src/lib.rs (1 fix)
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn do_not_fix_non_relevant_deps() {
+    let p = project()
+        .no_manifest()
+        .file(
+            "foo/Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.1.0"
+
+                [dependencies]
+                bar = { path = '../bar' }
+
+                [workspace]
+            "#,
+        ).file("foo/src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file(
+            "bar/src/lib.rs",
+            r#"
+                pub fn foo() -> u32 {
+                    let mut x = 3;
+                    x
+                }
+            "#,
+        ).build();
+
+    p.cargo("fix --allow-no-vcs")
+        .env("__CARGO_FIX_YOLO", "1")
+        .cwd(p.root().join("foo"))
+        .run();
+
+    assert!(p.read_file("bar/src/lib.rs").contains("mut"));
+}
+
+#[test]
+fn prepare_for_2018() {
+    if !is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+                #![allow(unused)]
+                #![feature(rust_2018_preview)]
+
+                mod foo {
+                    pub const FOO: &str = "fooo";
+                }
+
+                mod bar {
+                    use ::foo::FOO;
+                }
+
+                fn main() {
+                    let x = ::foo::FOO;
+                }
+            "#,
+        ).build();
+
+    let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[FIXING] src/lib.rs (2 fixes)
+[FINISHED] [..]
+";
+    p.cargo("fix --edition --allow-no-vcs")
+        .with_stderr(stderr)
+        .with_stdout("")
+        .run();
+
+    println!("{}", p.read_file("src/lib.rs"));
+    assert!(p.read_file("src/lib.rs").contains("use crate::foo::FOO;"));
+    assert!(
+        p.read_file("src/lib.rs")
+            .contains("let x = crate::foo::FOO;")
+    );
+}
+
+#[test]
+fn local_paths() {
+    if !is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+                #![feature(rust_2018_preview)]
+
+                use test::foo;
+
+                mod test {
+                    pub fn foo() {}
+                }
+
+                pub fn f() {
+                    foo();
+                }
+            "#,
+        ).build();
+
+    let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[FIXING] src/lib.rs (1 fix)
+[FINISHED] [..]
+";
+
+    p.cargo("fix --edition --allow-no-vcs")
+        .with_stderr(stderr)
+        .with_stdout("")
+        .run();
+
+    println!("{}", p.read_file("src/lib.rs"));
+    assert!(p.read_file("src/lib.rs").contains("use crate::test::foo;"));
+}
+
+#[test]
+fn upgrade_extern_crate() {
+    if !is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.1.0"
+                edition = '2018'
+
+                [workspace]
+
+                [dependencies]
+                bar = { path = 'bar' }
+            "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+                #![warn(rust_2018_idioms)]
+                extern crate bar;
+
+                use bar::bar;
+
+                pub fn foo() {
+                    ::bar::bar();
+                    bar();
+                }
+            "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    let stderr = "\
+[CHECKING] bar v0.1.0 ([..])
+[CHECKING] foo v0.1.0 ([..])
+[FIXING] src/lib.rs (1 fix)
+[FINISHED] [..]
+";
+    p.cargo("fix --allow-no-vcs")
+        .env("__CARGO_FIX_YOLO", "1")
+        .with_stderr(stderr)
+        .with_stdout("")
+        .run();
+    println!("{}", p.read_file("src/lib.rs"));
+    assert!(!p.read_file("src/lib.rs").contains("extern crate"));
+}
+
+#[test]
+fn specify_rustflags() {
+    if !is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+                #![allow(unused)]
+                #![feature(rust_2018_preview)]
+
+                mod foo {
+                    pub const FOO: &str = "fooo";
+                }
+
+                fn main() {
+                    let x = ::foo::FOO;
+                }
+            "#,
+        ).build();
+
+    let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[FIXING] src/lib.rs (1 fix)
+[FINISHED] [..]
+";
+    p.cargo("fix --edition --allow-no-vcs")
+        .env("RUSTFLAGS", "-C target-cpu=native")
+        .with_stderr(stderr)
+        .with_stdout("")
+        .run();
+}
+
+#[test]
+fn no_changes_necessary() {
+    let p = project().file("src/lib.rs", "").build();
+
+    let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[FINISHED] [..]
+";
+    p.cargo("fix --allow-no-vcs")
+        .with_stderr(stderr)
+        .with_stdout("")
+        .run();
+}
+
+#[test]
+fn fixes_extra_mut() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+                pub fn foo() -> u32 {
+                    let mut x = 3;
+                    x
+                }
+            "#,
+        ).build();
+
+    let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[FIXING] src/lib.rs (1 fix)
+[FINISHED] [..]
+";
+    p.cargo("fix --allow-no-vcs")
+        .env("__CARGO_FIX_YOLO", "1")
+        .with_stderr(stderr)
+        .with_stdout("")
+        .run();
+}
+
+#[test]
+fn fixes_two_missing_ampersands() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+                pub fn foo() -> u32 {
+                    let mut x = 3;
+                    let mut y = 3;
+                    x + y
+                }
+            "#,
+        ).build();
+
+    let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[FIXING] src/lib.rs (2 fixes)
+[FINISHED] [..]
+";
+    p.cargo("fix --allow-no-vcs")
+        .env("__CARGO_FIX_YOLO", "1")
+        .with_stderr(stderr)
+        .with_stdout("")
+        .run();
+}
+
+#[test]
+fn tricky() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+                pub fn foo() -> u32 {
+                    let mut x = 3; let mut y = 3;
+                    x + y
+                }
+            "#,
+        ).build();
+
+    let stderr = "\
+[CHECKING] foo v0.0.1 ([..])
+[FIXING] src/lib.rs (2 fixes)
+[FINISHED] [..]
+";
+    p.cargo("fix --allow-no-vcs")
+        .env("__CARGO_FIX_YOLO", "1")
+        .with_stderr(stderr)
+        .with_stdout("")
+        .run();
+}
+
+#[test]
+fn preserve_line_endings() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            "\
+             fn add(a: &u32) -> u32 { a + 1 }\r\n\
+             pub fn foo() -> u32 { let mut x = 3; add(&x) }\r\n\
+             ",
+        ).build();
+
+    p.cargo("fix --allow-no-vcs")
+        .env("__CARGO_FIX_YOLO", "1")
+        .run();
+    assert!(p.read_file("src/lib.rs").contains("\r\n"));
+}
+
+#[test]
+fn fix_deny_warnings() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            "\
+                #![deny(warnings)]
+                pub fn foo() { let mut x = 3; drop(x); }
+            ",
+        ).build();
+
+    p.cargo("fix --allow-no-vcs")
+        .env("__CARGO_FIX_YOLO", "1")
+        .run();
+}
+
+#[test]
+fn fix_deny_warnings_but_not_others() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            "
+                #![deny(warnings)]
+
+                pub fn foo() -> u32 {
+                    let mut x = 3;
+                    x
+                }
+
+                fn bar() {}
+            ",
+        ).build();
+
+    p.cargo("fix --allow-no-vcs")
+        .env("__CARGO_FIX_YOLO", "1")
+        .run();
+    assert!(!p.read_file("src/lib.rs").contains("let mut x = 3;"));
+    assert!(p.read_file("src/lib.rs").contains("fn bar() {}"));
+}
+
+#[test]
+fn fix_two_files() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            "
+                pub mod bar;
+
+                pub fn foo() -> u32 {
+                    let mut x = 3;
+                    x
+                }
+            ",
+        ).file(
+            "src/bar.rs",
+            "
+                pub fn foo() -> u32 {
+                    let mut x = 3;
+                    x
+                }
+
+            ",
+        ).build();
+
+    p.cargo("fix --allow-no-vcs")
+        .env("__CARGO_FIX_YOLO", "1")
+        .with_stderr_contains("[FIXING] src/bar.rs (1 fix)")
+        .with_stderr_contains("[FIXING] src/lib.rs (1 fix)")
+        .run();
+    assert!(!p.read_file("src/lib.rs").contains("let mut x = 3;"));
+    assert!(!p.read_file("src/bar.rs").contains("let mut x = 3;"));
+}
+
+#[test]
+fn fixes_missing_ampersand() {
+    let p = project()
+        .file("src/main.rs", "fn main() { let mut x = 3; drop(x); }")
+        .file(
+            "src/lib.rs",
+            r#"
+                pub fn foo() { let mut x = 3; drop(x); }
+
+                #[test]
+                pub fn foo2() { let mut x = 3; drop(x); }
+            "#,
+        ).file(
+            "tests/a.rs",
+            r#"
+                #[test]
+                pub fn foo() { let mut x = 3; drop(x); }
+            "#,
+        ).file("examples/foo.rs", "fn main() { let mut x = 3; drop(x); }")
+        .file("build.rs", "fn main() { let mut x = 3; drop(x); }")
+        .build();
+
+    p.cargo("fix --all-targets --allow-no-vcs")
+            .env("__CARGO_FIX_YOLO", "1")
+            .with_stdout("")
+            .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])")
+            .with_stderr_contains("[FIXING] build.rs (1 fix)")
+            // Don't assert number of fixes for this one, as we don't know if we're
+            // fixing it once or twice! We run this all concurrently, and if we
+            // compile (and fix) in `--test` mode first, we get two fixes. Otherwise
+            // we'll fix one non-test thing, and then fix another one later in
+            // test mode.
+            .with_stderr_contains("[FIXING] src/lib.rs[..]")
+            .with_stderr_contains("[FIXING] src/main.rs (1 fix)")
+            .with_stderr_contains("[FIXING] examples/foo.rs (1 fix)")
+            .with_stderr_contains("[FIXING] tests/a.rs (1 fix)")
+            .with_stderr_contains("[FINISHED] [..]").run();
+    p.cargo("build").run();
+    p.cargo("test").run();
+}
+
+#[test]
+fn fix_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.1.0"
+
+                [features]
+                bar = []
+
+                [workspace]
+            "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #[cfg(feature = "bar")]
+            pub fn foo() -> u32 { let mut x = 3; x }
+        "#,
+        ).build();
+
+    p.cargo("fix --allow-no-vcs").run();
+    p.cargo("build").run();
+    p.cargo("fix --features bar --allow-no-vcs").run();
+    p.cargo("build --features bar").run();
+}
+
+#[test]
+fn shows_warnings() {
+    let p = project()
+        .file("src/lib.rs", "use std::default::Default; pub fn foo() {}")
+        .build();
+
+    p.cargo("fix --allow-no-vcs")
+        .with_stderr_contains("[..]warning: unused import[..]")
+        .run();
+}
+
+#[test]
+fn warns_if_no_vcs_detected() {
+    let p = project().file("src/lib.rs", "pub fn foo() {}").build();
+
+    p.cargo("fix")
+        .with_status(101)
+        .with_stderr(
+            "\
+             error: no VCS found for this package and `cargo fix` can potentially perform \
+             destructive changes; if you'd like to suppress this error pass `--allow-no-vcs`\
+             ",
+        ).run();
+    p.cargo("fix --allow-no-vcs").run();
+}
+
+#[test]
+fn warns_about_dirty_working_directory() {
+    let p = project().file("src/lib.rs", "pub fn foo() {}").build();
+
+    let repo = git2::Repository::init(&p.root()).unwrap();
+    let mut cfg = t!(repo.config());
+    t!(cfg.set_str("user.email", "foo@bar.com"));
+    t!(cfg.set_str("user.name", "Foo Bar"));
+    drop(cfg);
+    git::add(&repo);
+    git::commit(&repo);
+    File::create(p.root().join("src/lib.rs")).unwrap();
+
+    p.cargo("fix")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: the working directory of this package has uncommitted changes, \
+and `cargo fix` can potentially perform destructive changes; if you'd \
+like to suppress this error pass `--allow-dirty`, `--allow-staged`, or \
+commit the changes to these files:
+
+  * src/lib.rs (dirty)
+
+
+",
+        ).run();
+    p.cargo("fix --allow-dirty").run();
+}
+
+#[test]
+fn warns_about_staged_working_directory() {
+    let p = project().file("src/lib.rs", "pub fn foo() {}").build();
+
+    let repo = git2::Repository::init(&p.root()).unwrap();
+    let mut cfg = t!(repo.config());
+    t!(cfg.set_str("user.email", "foo@bar.com"));
+    t!(cfg.set_str("user.name", "Foo Bar"));
+    drop(cfg);
+    git::add(&repo);
+    git::commit(&repo);
+    File::create(&p.root().join("src/lib.rs"))
+        .unwrap()
+        .write_all("pub fn bar() {}".to_string().as_bytes())
+        .unwrap();
+    git::add(&repo);
+
+    p.cargo("fix")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: the working directory of this package has uncommitted changes, \
+and `cargo fix` can potentially perform destructive changes; if you'd \
+like to suppress this error pass `--allow-dirty`, `--allow-staged`, or \
+commit the changes to these files:
+
+  * src/lib.rs (staged)
+
+
+",
+        ).run();
+    p.cargo("fix --allow-staged").run();
+}
+
+#[test]
+fn does_not_warn_about_clean_working_directory() {
+    let p = project().file("src/lib.rs", "pub fn foo() {}").build();
+
+    let repo = git2::Repository::init(&p.root()).unwrap();
+    let mut cfg = t!(repo.config());
+    t!(cfg.set_str("user.email", "foo@bar.com"));
+    t!(cfg.set_str("user.name", "Foo Bar"));
+    drop(cfg);
+    git::add(&repo);
+    git::commit(&repo);
+
+    p.cargo("fix").run();
+}
+
+#[test]
+fn does_not_warn_about_dirty_ignored_files() {
+    let p = project()
+        .file("src/lib.rs", "pub fn foo() {}")
+        .file(".gitignore", "bar\n")
+        .build();
+
+    let repo = git2::Repository::init(&p.root()).unwrap();
+    let mut cfg = t!(repo.config());
+    t!(cfg.set_str("user.email", "foo@bar.com"));
+    t!(cfg.set_str("user.name", "Foo Bar"));
+    drop(cfg);
+    git::add(&repo);
+    git::commit(&repo);
+    File::create(p.root().join("bar")).unwrap();
+
+    p.cargo("fix").run();
+}
+
+#[test]
+fn fix_all_targets_by_default() {
+    let p = project()
+        .file("src/lib.rs", "pub fn foo() { let mut x = 3; drop(x); }")
+        .file("tests/foo.rs", "pub fn foo() { let mut x = 3; drop(x); }")
+        .build();
+    p.cargo("fix --allow-no-vcs")
+        .env("__CARGO_FIX_YOLO", "1")
+        .run();
+    assert!(!p.read_file("src/lib.rs").contains("let mut x"));
+    assert!(!p.read_file("tests/foo.rs").contains("let mut x"));
+}
+
+#[test]
+fn prepare_for_and_enable() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = 'foo'
+                version = '0.1.0'
+                edition = '2018'
+            "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    let stderr = "\
+error: cannot prepare for the 2018 edition when it is enabled, so cargo cannot
+automatically fix errors in `src/lib.rs`
+
+To prepare for the 2018 edition you should first remove `edition = '2018'` from
+your `Cargo.toml` and then rerun this command. Once all warnings have been fixed
+then you can re-enable the `edition` key in `Cargo.toml`. For some more
+information about transitioning to the 2018 edition see:
+
+  https://[..]
+
+";
+    p.cargo("fix --edition --allow-no-vcs")
+        .with_stderr_contains(stderr)
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn fix_overlapping() {
+    if !is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+                #![feature(rust_2018_preview)]
+
+                pub fn foo<T>() {}
+                pub struct A;
+
+                pub mod bar {
+                    pub fn baz() {
+                        ::foo::<::A>();
+                    }
+                }
+            "#,
+        ).build();
+
+    let stderr = "\
+[CHECKING] foo [..]
+[FIXING] src/lib.rs (2 fixes)
+[FINISHED] dev [..]
+";
+
+    p.cargo("fix --allow-no-vcs --prepare-for 2018 --lib")
+        .with_stderr(stderr)
+        .run();
+
+    let contents = p.read_file("src/lib.rs");
+    println!("{}", contents);
+    assert!(contents.contains("crate::foo::<crate::A>()"));
+}
+
+#[test]
+fn fix_idioms() {
+    if !is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = 'foo'
+                version = '0.1.0'
+                edition = '2018'
+            "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+                use std::any::Any;
+                pub fn foo() {
+                    let _x: Box<Any> = Box::new(3);
+                }
+            "#,
+        ).build();
+
+    let stderr = "\
+[CHECKING] foo [..]
+[FIXING] src/lib.rs (1 fix)
+[FINISHED] [..]
+";
+    p.cargo("fix --edition-idioms --allow-no-vcs")
+        .with_stderr(stderr)
+        .with_status(0)
+        .run();
+
+    assert!(p.read_file("src/lib.rs").contains("Box<dyn Any>"));
+}
+
+#[test]
+fn idioms_2015_ok() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("fix --edition-idioms --allow-no-vcs")
+        .masquerade_as_nightly_cargo()
+        .with_status(0)
+        .run();
+}
+
+#[test]
+fn both_edition_migrate_flags() {
+    let p = project().file("src/lib.rs", "").build();
+
+    let stderr = "\
+error: The argument '--edition' cannot be used with '--prepare-for <prepare-for>'
+
+USAGE:
+    cargo[..] fix --edition --message-format <FMT>
+
+For more information try --help
+";
+
+    p.cargo("fix --prepare-for 2018 --edition")
+        .with_status(1)
+        .with_stderr(stderr)
+        .run();
+}
+
+#[test]
+fn shows_warnings_on_second_run_without_changes() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+                use std::default::Default;
+
+                pub fn foo() {
+                }
+            "#,
+        )
+        .build();
+
+    p.cargo("fix --allow-no-vcs")
+        .with_stderr_contains("[..]warning: unused import[..]")
+        .run();
+
+    p.cargo("fix --allow-no-vcs")
+        .with_stderr_contains("[..]warning: unused import[..]")
+        .run();
+}
+
+#[test]
+fn shows_warnings_on_second_run_without_changes_on_multiple_targets() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+                use std::default::Default;
+
+                pub fn a() -> u32 { 3 }
+            "#,
+        )
+        .file(
+            "src/main.rs",
+            r#"
+                use std::default::Default;
+                fn main() { println!("3"); }
+            "#,
+        )
+        .file(
+            "tests/foo.rs",
+            r#"
+                use std::default::Default;
+                #[test]
+                fn foo_test() {
+                    println!("3");
+                }
+            "#,
+        )
+        .file(
+            "tests/bar.rs",
+            r#"
+                use std::default::Default;
+
+                #[test]
+                fn foo_test() {
+                    println!("3");
+                }
+            "#,
+        )
+        .file(
+            "examples/fooxample.rs",
+            r#"
+                use std::default::Default;
+
+                fn main() {
+                    println!("3");
+                }
+            "#,
+        )
+        .build();
+
+    p.cargo("fix --allow-no-vcs --all-targets")
+        .with_stderr_contains(" --> examples/fooxample.rs:2:21")
+        .with_stderr_contains(" --> src/lib.rs:2:21")
+        .with_stderr_contains(" --> src/main.rs:2:21")
+        .with_stderr_contains(" --> tests/bar.rs:2:21")
+        .with_stderr_contains(" --> tests/foo.rs:2:21")
+        .run();
+
+    p.cargo("fix --allow-no-vcs --all-targets")
+        .with_stderr_contains(" --> examples/fooxample.rs:2:21")
+        .with_stderr_contains(" --> src/lib.rs:2:21")
+        .with_stderr_contains(" --> src/main.rs:2:21")
+        .with_stderr_contains(" --> tests/bar.rs:2:21")
+        .with_stderr_contains(" --> tests/foo.rs:2:21")
+        .run();
+}
+
+#[test]
+fn doesnt_rebuild_dependencies() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.1.0"
+
+                [dependencies]
+                bar = { path = 'bar' }
+
+                [workspace]
+            "#,
+        ).file("src/lib.rs", "extern crate bar;")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("fix --allow-no-vcs -p foo")
+        .env("__CARGO_FIX_YOLO", "1")
+        .with_stdout("")
+        .with_stderr("\
+[CHECKING] bar v0.1.0 ([..])
+[CHECKING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+")
+        .run();
+
+    p.cargo("fix --allow-no-vcs -p foo")
+        .env("__CARGO_FIX_YOLO", "1")
+        .with_stdout("")
+        .with_stderr("\
+[CHECKING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+")
+        .run();
+}
+
+#[test]
+fn does_not_crash_with_rustc_wrapper() {
+    // We don't have /usr/bin/env on Windows.
+    if cfg!(windows) {
+        return;
+    }
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.1.0"
+            "#,
+        )
+        .file("src/lib.rs", "")
+        .build();
+
+    p.cargo("fix --allow-no-vcs")
+        .env("RUSTC_WRAPPER", "/usr/bin/env")
+        .run();
+}
+
+#[test]
+fn only_warn_for_relevant_crates() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.1.0"
+
+                [dependencies]
+                a = { path = 'a' }
+            "#,
+        )
+        .file("src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+                [package]
+                name = "a"
+                version = "0.1.0"
+            "#,
+        )
+        .file(
+            "a/src/lib.rs",
+            "
+                pub fn foo() {}
+                pub mod bar {
+                    use foo;
+                    pub fn baz() { foo() }
+                }
+            ",
+        )
+        .build();
+
+    p.cargo("fix --allow-no-vcs --edition")
+        .with_stderr("\
+[CHECKING] a v0.1.0 ([..])
+[CHECKING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+")
+        .run();
+}
+
+#[test]
+fn fix_to_broken_code() {
+    if !is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "foo/Cargo.toml",
+            r#"
+                [package]
+                name = 'foo'
+                version = '0.1.0'
+                [workspace]
+            "#,
+        ).file(
+            "foo/src/main.rs",
+            r##"
+                use std::env;
+                use std::fs;
+                use std::io::Write;
+                use std::path::{Path, PathBuf};
+                use std::process::{self, Command};
+
+                fn main() {
+                    let is_lib_rs = env::args_os()
+                        .map(PathBuf::from)
+                        .any(|l| l == Path::new("src/lib.rs"));
+                    if is_lib_rs {
+                        let path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+                        let path = path.join("foo");
+                        if path.exists() {
+                            panic!()
+                        } else {
+                            fs::File::create(&path).unwrap();
+                        }
+                    }
+
+                    let status = Command::new("rustc")
+                        .args(env::args().skip(1))
+                        .status()
+                        .expect("failed to run rustc");
+                    process::exit(status.code().unwrap_or(2));
+                }
+            "##,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+                [package]
+                name = 'bar'
+                version = '0.1.0'
+                [workspace]
+            "#,
+        ).file("bar/build.rs", "fn main() {}")
+        .file(
+            "bar/src/lib.rs",
+            "pub fn foo() { let mut x = 3; drop(x); }",
+        ).build();
+
+    // Build our rustc shim
+    p.cargo("build").cwd(p.root().join("foo")).run();
+
+    // Attempt to fix code, but our shim will always fail the second compile
+    p.cargo("fix --allow-no-vcs --broken-code")
+        .cwd(p.root().join("bar"))
+        .env("RUSTC", p.root().join("foo/target/debug/foo"))
+        .with_status(101)
+        .run();
+
+    assert_eq!(p.read_file("bar/src/lib.rs"), "pub fn foo() { let x = 3; drop(x); }");
+}
diff --git a/tests/testsuite/freshness.rs b/tests/testsuite/freshness.rs
new file mode 100644 (file)
index 0000000..00c50e6
--- /dev/null
@@ -0,0 +1,1179 @@
+use std::fs::{self, File};
+use std::io::prelude::*;
+
+use support::paths::CargoPathExt;
+use support::registry::Package;
+use support::sleep_ms;
+use support::{basic_manifest, project};
+
+#[test]
+fn modifying_and_moving() {
+    let p = project()
+        .file("src/main.rs", "mod a; fn main() {}")
+        .file("src/a.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build").with_stdout("").run();
+    p.root().move_into_the_past();
+    p.root().join("target").move_into_the_past();
+
+    File::create(&p.root().join("src/a.rs"))
+        .unwrap()
+        .write_all(b"#[allow(unused)]fn main() {}")
+        .unwrap();
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap();
+    p.cargo("build").with_status(101).run();
+}
+
+#[test]
+fn modify_only_some_files() {
+    let p = project()
+        .file("src/lib.rs", "mod a;")
+        .file("src/a.rs", "")
+        .file("src/main.rs", "mod b; fn main() {}")
+        .file("src/b.rs", "")
+        .file("tests/test.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.cargo("test").run();
+    sleep_ms(1000);
+
+    assert!(p.bin("foo").is_file());
+
+    let lib = p.root().join("src/lib.rs");
+    let bin = p.root().join("src/b.rs");
+
+    File::create(&lib)
+        .unwrap()
+        .write_all(b"invalid rust code")
+        .unwrap();
+    File::create(&bin)
+        .unwrap()
+        .write_all(b"#[allow(unused)]fn foo() {}")
+        .unwrap();
+    lib.move_into_the_past();
+
+    // Make sure the binary is rebuilt, not the lib
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    assert!(p.bin("foo").is_file());
+}
+
+#[test]
+fn rebuild_sub_package_then_while_package() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.1"
+
+            [dependencies.a]
+            path = "a"
+            [dependencies.b]
+            path = "b"
+        "#,
+        ).file("src/lib.rs", "extern crate a; extern crate b;")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            authors = []
+            version = "0.0.1"
+            [dependencies.b]
+            path = "../b"
+        "#,
+        ).file("a/src/lib.rs", "extern crate b;")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+        .file("b/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    File::create(&p.root().join("b/src/lib.rs"))
+        .unwrap()
+        .write_all(br#"pub fn b() {}"#)
+        .unwrap();
+
+    p.cargo("build -pb").run();
+
+    File::create(&p.root().join("src/lib.rs"))
+        .unwrap()
+        .write_all(br#"extern crate a; extern crate b; pub fn toplevel() {}"#)
+        .unwrap();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn changing_lib_features_caches_targets() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.1"
+
+            [features]
+            foo = []
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[..]Compiling foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build --features foo")
+        .with_stderr(
+            "\
+[..]Compiling foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    /* Targets should be cached from the first build */
+
+    p.cargo("build")
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+
+    p.cargo("build").with_stdout("").run();
+
+    p.cargo("build --features foo")
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+}
+
+#[test]
+fn changing_profiles_caches_targets() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.1"
+
+            [profile.dev]
+            panic = "abort"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[..]Compiling foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[..]Compiling foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target[..]debug[..]deps[..]foo-[..][EXE]
+[DOCTEST] foo
+",
+        ).run();
+
+    /* Targets should be cached from the first build */
+
+    p.cargo("build")
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+
+    p.cargo("test foo")
+        .with_stderr(
+            "\
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target[..]debug[..]deps[..]foo-[..][EXE]
+[DOCTEST] foo
+",
+        ).run();
+}
+
+#[test]
+fn changing_bin_paths_common_target_features_caches_targets() {
+    // Make sure dep_cache crate is built once per feature
+    let p = project()
+        .no_manifest()
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            target-dir = "./target"
+        "#,
+        ).file(
+            "dep_crate/Cargo.toml",
+            r#"
+            [package]
+            name    = "dep_crate"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            ftest  = []
+        "#,
+        ).file(
+            "dep_crate/src/lib.rs",
+            r#"
+            #[cfg(feature = "ftest")]
+            pub fn yo() {
+                println!("ftest on")
+            }
+            #[cfg(not(feature = "ftest"))]
+            pub fn yo() {
+                println!("ftest off")
+            }
+        "#,
+        ).file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name    = "a"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            dep_crate = {path = "../dep_crate", features = []}
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file(
+            "a/src/main.rs",
+            r#"
+            extern crate dep_crate;
+            use dep_crate::yo;
+            fn main() {
+                yo();
+            }
+        "#,
+        ).file(
+            "b/Cargo.toml",
+            r#"
+            [package]
+            name    = "b"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            dep_crate = {path = "../dep_crate", features = ["ftest"]}
+        "#,
+        ).file("b/src/lib.rs", "")
+        .file(
+            "b/src/main.rs",
+            r#"
+            extern crate dep_crate;
+            use dep_crate::yo;
+            fn main() {
+                yo();
+            }
+        "#,
+        ).build();
+
+    /* Build and rebuild a/. Ensure dep_crate only builds once */
+    p.cargo("run")
+        .cwd(p.root().join("a"))
+        .with_stdout("ftest off")
+        .with_stderr(
+            "\
+[..]Compiling dep_crate v0.0.1 ([..])
+[..]Compiling a v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/a[EXE]`
+",
+        ).run();
+    p.cargo("clean -p a").cwd(p.root().join("a")).run();
+    p.cargo("run")
+        .cwd(p.root().join("a"))
+        .with_stdout("ftest off")
+        .with_stderr(
+            "\
+[..]Compiling a v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/a[EXE]`
+",
+        ).run();
+
+    /* Build and rebuild b/. Ensure dep_crate only builds once */
+    p.cargo("run")
+        .cwd(p.root().join("b"))
+        .with_stdout("ftest on")
+        .with_stderr(
+            "\
+[..]Compiling dep_crate v0.0.1 ([..])
+[..]Compiling b v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/b[EXE]`
+",
+        ).run();
+    p.cargo("clean -p b").cwd(p.root().join("b")).run();
+    p.cargo("run")
+        .cwd(p.root().join("b"))
+        .with_stdout("ftest on")
+        .with_stderr(
+            "\
+[..]Compiling b v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/b[EXE]`
+",
+        ).run();
+
+    /* Build a/ package again. If we cache different feature dep builds correctly,
+     * this should not cause a rebuild of dep_crate */
+    p.cargo("clean -p a").cwd(p.root().join("a")).run();
+    p.cargo("run")
+        .cwd(p.root().join("a"))
+        .with_stdout("ftest off")
+        .with_stderr(
+            "\
+[..]Compiling a v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/a[EXE]`
+",
+        ).run();
+
+    /* Build b/ package again. If we cache different feature dep builds correctly,
+     * this should not cause a rebuild */
+    p.cargo("clean -p b").cwd(p.root().join("b")).run();
+    p.cargo("run")
+        .cwd(p.root().join("b"))
+        .with_stdout("ftest on")
+        .with_stderr(
+            "\
+[..]Compiling b v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/b[EXE]`
+",
+        ).run();
+}
+
+#[test]
+fn changing_bin_features_caches_targets() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.1"
+
+            [features]
+            foo = []
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                let msg = if cfg!(feature = "foo") { "feature on" } else { "feature off" };
+                println!("{}", msg);
+            }
+        "#,
+        ).build();
+
+    // Windows has a problem with replacing a binary that was just executed.
+    // Unlinking it will succeed, but then attempting to immediately replace
+    // it will sometimes fail with "Already Exists".
+    // See https://github.com/rust-lang/cargo/issues/5481
+    let foo_proc = |name: &str| {
+        let src = p.bin("foo");
+        let dst = p.bin(name);
+        fs::rename(&src, &dst).expect("Failed to rename foo");
+        p.process(dst)
+    };
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    foo_proc("off1").with_stdout("feature off").run();
+
+    p.cargo("build --features foo")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    foo_proc("on1").with_stdout("feature on").run();
+
+    /* Targets should be cached from the first build */
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    foo_proc("off2").with_stdout("feature off").run();
+
+    p.cargo("build --features foo")
+        .with_stderr(
+            "\
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    foo_proc("on2").with_stdout("feature on").run();
+}
+
+#[test]
+fn rebuild_tests_if_lib_changes() {
+    let p = project()
+        .file("src/lib.rs", "pub fn foo() {}")
+        .file(
+            "tests/foo.rs",
+            r#"
+            extern crate foo;
+            #[test]
+            fn test() { foo::foo(); }
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+    p.cargo("test").run();
+
+    sleep_ms(1000);
+    File::create(&p.root().join("src/lib.rs")).unwrap();
+
+    p.cargo("build -v").run();
+    p.cargo("test -v").with_status(101).run();
+}
+
+#[test]
+fn no_rebuild_transitive_target_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+            [dev-dependencies]
+            b = { path = "b" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("tests/foo.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+
+            [target.foo.dependencies]
+            c = { path = "../c" }
+        "#,
+        ).file("a/src/lib.rs", "")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [package]
+            name = "b"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            c = { path = "../c" }
+        "#,
+        ).file("b/src/lib.rs", "")
+        .file("c/Cargo.toml", &basic_manifest("c", "0.0.1"))
+        .file("c/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("test --no-run")
+        .with_stderr(
+            "\
+[COMPILING] c v0.0.1 ([..])
+[COMPILING] b v0.0.1 ([..])
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn rerun_if_changed_in_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "a/build.rs",
+            r#"
+            fn main() {
+                println!("cargo:rerun-if-changed=build.rs");
+            }
+        "#,
+        ).file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn same_build_dir_cached_packages() {
+    let p = project()
+        .no_manifest()
+        .file(
+            "a1/Cargo.toml",
+            r#"
+            [package]
+            name = "a1"
+            version = "0.0.1"
+            authors = []
+            [dependencies]
+            b = { path = "../b" }
+        "#,
+        ).file("a1/src/lib.rs", "")
+        .file(
+            "a2/Cargo.toml",
+            r#"
+            [package]
+            name = "a2"
+            version = "0.0.1"
+            authors = []
+            [dependencies]
+            b = { path = "../b" }
+        "#,
+        ).file("a2/src/lib.rs", "")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [package]
+            name = "b"
+            version = "0.0.1"
+            authors = []
+            [dependencies]
+            c = { path = "../c" }
+        "#,
+        ).file("b/src/lib.rs", "")
+        .file(
+            "c/Cargo.toml",
+            r#"
+            [package]
+            name = "c"
+            version = "0.0.1"
+            authors = []
+            [dependencies]
+            d = { path = "../d" }
+        "#,
+        ).file("c/src/lib.rs", "")
+        .file("d/Cargo.toml", &basic_manifest("d", "0.0.1"))
+        .file("d/src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            target-dir = "./target"
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .cwd(p.root().join("a1"))
+        .with_stderr(&format!(
+            "\
+[COMPILING] d v0.0.1 ({dir}/d)
+[COMPILING] c v0.0.1 ({dir}/c)
+[COMPILING] b v0.0.1 ({dir}/b)
+[COMPILING] a1 v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            dir = p.url().to_file_path().unwrap().to_str().unwrap()
+        )).run();
+    p.cargo("build")
+        .cwd(p.root().join("a2"))
+        .with_stderr(
+            "\
+[COMPILING] a2 v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn no_rebuild_if_build_artifacts_move_backwards_in_time() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    p.root().move_into_the_past();
+
+    p.cargo("build")
+        .with_stdout("")
+        .with_stderr("[FINISHED] [..]")
+        .run();
+}
+
+#[test]
+fn rebuild_if_build_artifacts_move_forward_in_time() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    p.root().move_into_the_future();
+
+    p.cargo("build")
+        .env("RUST_LOG", "")
+        .with_stdout("")
+        .with_stderr(
+            "\
+[COMPILING] a v0.0.1 ([..])
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn rebuild_if_environment_changes() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            description = "old desc"
+            version = "0.0.1"
+            authors = []
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                println!("{}", env!("CARGO_PKG_DESCRIPTION"));
+            }
+        "#,
+        ).build();
+
+    p.cargo("run")
+        .with_stdout("old desc")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+        ).run();
+
+    File::create(&p.root().join("Cargo.toml"))
+        .unwrap()
+        .write_all(
+            br#"
+        [package]
+        name = "foo"
+        description = "new desc"
+        version = "0.0.1"
+        authors = []
+    "#,
+        ).unwrap();
+
+    p.cargo("run")
+        .with_stdout("new desc")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`
+",
+        ).run();
+}
+
+#[test]
+fn no_rebuild_when_rename_dir() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            foo = { path = "foo" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("foo/Cargo.toml", &basic_manifest("foo", "0.0.1"))
+        .file("foo/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    let mut new = p.root();
+    new.pop();
+    new.push("bar");
+    fs::rename(p.root(), &new).unwrap();
+
+    p.cargo("build")
+        .cwd(&new)
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+}
+
+#[test]
+fn unused_optional_dep() {
+    Package::new("registry1", "0.1.0").publish();
+    Package::new("registry2", "0.1.0").publish();
+    Package::new("registry3", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "p"
+                authors = []
+                version = "0.1.0"
+
+                [dependencies]
+                bar = { path = "bar" }
+                baz = { path = "baz" }
+                registry1 = "*"
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.1.1"
+                authors = []
+
+                [dev-dependencies]
+                registry2 = "*"
+            "#,
+        ).file("bar/src/lib.rs", "")
+        .file(
+            "baz/Cargo.toml",
+            r#"
+                [package]
+                name = "baz"
+                version = "0.1.1"
+                authors = []
+
+                [dependencies]
+                registry3 = { version = "*", optional = true }
+            "#,
+        ).file("baz/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[test]
+fn path_dev_dep_registry_updates() {
+    Package::new("registry1", "0.1.0").publish();
+    Package::new("registry2", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "p"
+                authors = []
+                version = "0.1.0"
+
+                [dependencies]
+                bar = { path = "bar" }
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.1.1"
+                authors = []
+
+                [dependencies]
+                registry1 = "*"
+
+                [dev-dependencies]
+                baz = { path = "../baz"}
+            "#,
+        ).file("bar/src/lib.rs", "")
+        .file(
+            "baz/Cargo.toml",
+            r#"
+                [package]
+                name = "baz"
+                version = "0.1.1"
+                authors = []
+
+                [dependencies]
+                registry2 = "*"
+            "#,
+        ).file("baz/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[test]
+fn change_panic_mode() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [workspace]
+                members = ['bar', 'baz']
+                [profile.dev]
+                panic = 'abort'
+            "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+        .file("bar/src/lib.rs", "")
+        .file(
+            "baz/Cargo.toml",
+            r#"
+                [package]
+                name = "baz"
+                version = "0.1.1"
+                authors = []
+
+                [lib]
+                proc-macro = true
+
+                [dependencies]
+                bar = { path = '../bar' }
+            "#,
+        ).file("baz/src/lib.rs", "extern crate bar;")
+        .build();
+
+    p.cargo("build -p bar").run();
+    p.cargo("build -p baz").run();
+}
+
+#[test]
+fn dont_rebuild_based_on_plugins() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.1.1"
+
+                [workspace]
+                members = ['baz']
+
+                [dependencies]
+                proc-macro-thing = { path = 'proc-macro-thing' }
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "proc-macro-thing/Cargo.toml",
+            r#"
+                [package]
+                name = "proc-macro-thing"
+                version = "0.1.1"
+
+                [lib]
+                proc-macro = true
+
+                [dependencies]
+                qux = { path = '../qux' }
+            "#,
+        ).file("proc-macro-thing/src/lib.rs", "")
+        .file(
+            "baz/Cargo.toml",
+            r#"
+                [package]
+                name = "baz"
+                version = "0.1.1"
+
+                [dependencies]
+                qux = { path = '../qux' }
+            "#,
+        ).file("baz/src/main.rs", "fn main() {}")
+        .file("qux/Cargo.toml", &basic_manifest("qux", "0.1.1"))
+        .file("qux/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("build -p baz").run();
+    p.cargo("build").with_stderr("[FINISHED] [..]\n").run();
+    p.cargo("build -p bar")
+        .with_stderr("[FINISHED] [..]\n")
+        .run();
+}
+
+#[test]
+fn reuse_workspace_lib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.1.1"
+
+                [workspace]
+
+                [dependencies]
+                baz = { path = 'baz' }
+            "#,
+        ).file("src/lib.rs", "")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.1"))
+        .file("baz/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("test -p baz -v --no-run")
+        .with_stderr(
+            "\
+[COMPILING] baz v0.1.1 ([..])
+[RUNNING] `rustc[..] --test [..]`
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn reuse_shared_build_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            shared = {path = "shared"}
+
+            [workspace]
+            members = ["shared", "bar"]
+        "#,
+        )
+        .file("src/main.rs", "fn main() {}")
+        .file("shared/Cargo.toml", &basic_manifest("shared", "0.0.1"))
+        .file("shared/src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+
+            [build-dependencies]
+            shared = { path = "../shared" }
+        "#,
+        )
+        .file("bar/src/lib.rs", "")
+        .file("bar/build.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --all").run();
+    // This should not recompile!
+    p.cargo("build -p foo -v")
+        .with_stderr(
+            "\
+[FRESH] shared [..]
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+        )
+        .run();
+}
+
+#[test]
+fn reuse_panic_build_dep_test() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [build-dependencies]
+            bar = { path = "bar" }
+
+            [dev-dependencies]
+            bar = { path = "bar" }
+
+            [profile.dev]
+            panic = "abort"
+        "#,
+        )
+        .file("src/lib.rs", "")
+        .file("build.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    // Check that `bar` is not built twice. It is only needed once (without `panic`).
+    p.cargo("test --lib --no-run -v")
+        .with_stderr(
+            "\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build [..]
+[RUNNING] [..]build-script-build`
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--test[..]
+[FINISHED] [..]
+",
+        )
+        .run();
+}
+
+#[test]
+fn reuse_panic_pm() {
+    // foo(panic) -> bar(panic)
+    // somepm(nopanic) -> bar(nopanic)
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = { path = "bar" }
+            somepm = { path = "somepm" }
+
+            [profile.dev]
+            panic = "abort"
+        "#,
+        )
+        .file("src/lib.rs", "extern crate bar;")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .file(
+            "somepm/Cargo.toml",
+            r#"
+            [package]
+            name = "somepm"
+            version = "0.0.1"
+
+            [lib]
+            proc-macro = true
+
+            [dependencies]
+            bar = { path = "../bar" }
+        "#,
+        )
+        .file("somepm/src/lib.rs", "extern crate bar;")
+        .build();
+
+    // bar is built once without panic (for proc-macro) and once with (for the
+    // normal dependency).
+    p.cargo("build -v")
+        .with_stderr_unordered(
+            "\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C panic=abort -C debuginfo=2 [..]
+[COMPILING] somepm [..]
+[RUNNING] `rustc --crate-name somepm [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C panic=abort[..]
+[FINISHED] [..]
+",
+        )
+        .run();
+}
diff --git a/tests/testsuite/generate_lockfile.rs b/tests/testsuite/generate_lockfile.rs
new file mode 100644 (file)
index 0000000..5d00d47
--- /dev/null
@@ -0,0 +1,231 @@
+use std::fs::{self, File};
+use std::io::prelude::*;
+
+use support::registry::Package;
+use support::{basic_manifest, paths, project, ProjectBuilder};
+
+#[test]
+fn adding_and_removing_packages() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("generate-lockfile").run();
+
+    let toml = p.root().join("Cargo.toml");
+    let lock1 = p.read_lockfile();
+
+    // add a dep
+    File::create(&toml)
+        .unwrap()
+        .write_all(
+            br#"
+        [package]
+        name = "foo"
+        authors = []
+        version = "0.0.1"
+
+        [dependencies.bar]
+        path = "bar"
+    "#,
+        ).unwrap();
+    p.cargo("generate-lockfile").run();
+    let lock2 = p.read_lockfile();
+    assert_ne!(lock1, lock2);
+
+    // change the dep
+    File::create(&p.root().join("bar/Cargo.toml"))
+        .unwrap()
+        .write_all(basic_manifest("bar", "0.0.2").as_bytes())
+        .unwrap();
+    p.cargo("generate-lockfile").run();
+    let lock3 = p.read_lockfile();
+    assert_ne!(lock1, lock3);
+    assert_ne!(lock2, lock3);
+
+    // remove the dep
+    println!("lock4");
+    File::create(&toml)
+        .unwrap()
+        .write_all(
+            br#"
+        [package]
+        name = "foo"
+        authors = []
+        version = "0.0.1"
+    "#,
+        ).unwrap();
+    p.cargo("generate-lockfile").run();
+    let lock4 = p.read_lockfile();
+    assert_eq!(lock1, lock4);
+}
+
+#[test]
+fn no_index_update() {
+    Package::new("serde", "1.0.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.1"
+
+            [dependencies]
+            serde = "1.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("generate-lockfile")
+        .with_stderr("[UPDATING] `[..]` index")
+        .run();
+
+    p.cargo("generate-lockfile -Zno-index-update")
+        .masquerade_as_nightly_cargo()
+        .with_stdout("")
+        .with_stderr("")
+        .run();
+}
+
+#[test]
+fn preserve_metadata() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("generate-lockfile").run();
+
+    let metadata = r#"
+[metadata]
+bar = "baz"
+foo = "bar"
+"#;
+    let lockfile = p.root().join("Cargo.lock");
+    let lock = p.read_lockfile();
+    let data = lock + metadata;
+    File::create(&lockfile)
+        .unwrap()
+        .write_all(data.as_bytes())
+        .unwrap();
+
+    // Build and make sure the metadata is still there
+    p.cargo("build").run();
+    let lock = p.read_lockfile();
+    assert!(lock.contains(metadata.trim()), "{}", lock);
+
+    // Update and make sure the metadata is still there
+    p.cargo("update").run();
+    let lock = p.read_lockfile();
+    assert!(lock.contains(metadata.trim()), "{}", lock);
+}
+
+#[test]
+fn preserve_line_endings_issue_2076() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    let lockfile = p.root().join("Cargo.lock");
+    p.cargo("generate-lockfile").run();
+    assert!(lockfile.is_file());
+    p.cargo("generate-lockfile").run();
+
+    let lock0 = p.read_lockfile();
+
+    assert!(lock0.starts_with("[[package]]\n"));
+
+    let lock1 = lock0.replace("\n", "\r\n");
+    {
+        File::create(&lockfile)
+            .unwrap()
+            .write_all(lock1.as_bytes())
+            .unwrap();
+    }
+
+    p.cargo("generate-lockfile").run();
+
+    let lock2 = p.read_lockfile();
+
+    assert!(lock2.starts_with("[[package]]\r\n"));
+    assert_eq!(lock1, lock2);
+}
+
+#[test]
+fn cargo_update_generate_lockfile() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    let lockfile = p.root().join("Cargo.lock");
+    assert!(!lockfile.is_file());
+    p.cargo("update").with_stdout("").run();
+    assert!(lockfile.is_file());
+
+    fs::remove_file(p.root().join("Cargo.lock")).unwrap();
+
+    assert!(!lockfile.is_file());
+    p.cargo("update").with_stdout("").run();
+    assert!(lockfile.is_file());
+}
+
+#[test]
+fn duplicate_entries_in_lockfile() {
+    let _a = ProjectBuilder::new(paths::root().join("a"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            authors = []
+            version = "0.0.1"
+
+            [dependencies]
+            common = {path="common"}
+            "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    let common_toml = &basic_manifest("common", "0.0.1");
+
+    let _common_in_a = ProjectBuilder::new(paths::root().join("a/common"))
+        .file("Cargo.toml", common_toml)
+        .file("src/lib.rs", "")
+        .build();
+
+    let b = ProjectBuilder::new(paths::root().join("b"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "b"
+            authors = []
+            version = "0.0.1"
+
+            [dependencies]
+            common = {path="common"}
+            a = {path="../a"}
+            "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    let _common_in_b = ProjectBuilder::new(paths::root().join("b/common"))
+        .file("Cargo.toml", common_toml)
+        .file("src/lib.rs", "")
+        .build();
+
+    // should fail due to a duplicate package `common` in the lockfile
+    b.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(
+            "[..]package collision in the lockfile: packages common [..] and \
+             common [..] are different, but only one can be written to \
+             lockfile unambiguously",
+        ).run();
+}
diff --git a/tests/testsuite/git.rs b/tests/testsuite/git.rs
new file mode 100644 (file)
index 0000000..2e4d463
--- /dev/null
@@ -0,0 +1,2627 @@
+use git2;
+use std::env;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::net::{TcpListener, TcpStream};
+use std::path::Path;
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::Arc;
+use std::thread;
+
+use support::paths::{self, CargoPathExt};
+use support::sleep_ms;
+use support::{basic_lib_manifest, basic_manifest, git, main_file, path2url, project};
+
+#[test]
+fn cargo_compile_simple_git_dep() {
+    let project = project();
+    let git_project = git::new("dep1", |project| {
+        project
+            .file("Cargo.toml", &basic_lib_manifest("dep1"))
+            .file(
+                "src/dep1.rs",
+                r#"
+                pub fn hello() -> &'static str {
+                    "hello world"
+                }
+            "#,
+            )
+    }).unwrap();
+
+    let project = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep1]
+
+            git = '{}'
+        "#,
+                git_project.url()
+            ),
+        ).file(
+            "src/main.rs",
+            &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+        ).build();
+
+    let git_root = git_project.root();
+
+    project
+        .cargo("build")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`\n\
+             [COMPILING] dep1 v0.5.0 ({}#[..])\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+            path2url(&git_root),
+            path2url(&git_root),
+        )).run();
+
+    assert!(project.bin("foo").is_file());
+
+    project
+        .process(&project.bin("foo"))
+        .with_stdout("hello world\n")
+        .run();
+}
+
+#[test]
+fn cargo_compile_forbird_git_httpsrepo_offline() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = ["chabapok@example.com"]
+
+            [dependencies.dep1]
+            git = 'https://github.com/some_user/dep1.git'
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build -Zoffline").masquerade_as_nightly_cargo().with_status(101).
+                    with_stderr("\
+error: failed to load source for a dependency on `dep1`
+
+Caused by:
+  Unable to update https://github.com/some_user/dep1.git
+
+Caused by:
+  can't checkout from 'https://github.com/some_user/dep1.git': you are in the offline mode (-Z offline)").run();
+}
+
+#[test]
+fn cargo_compile_offline_with_cached_git_dep() {
+    let git_project = git::new("dep1", |project| {
+        project
+            .file("Cargo.toml", &basic_lib_manifest("dep1"))
+            .file(
+                "src/lib.rs",
+                r#"
+                pub static COOL_STR:&str = "cached git repo rev1";
+            "#,
+            )
+    }).unwrap();
+
+    let repo = git2::Repository::open(&git_project.root()).unwrap();
+    let rev1 = repo.revparse_single("HEAD").unwrap().id();
+
+    // Commit the changes and make sure we trigger a recompile
+    File::create(&git_project.root().join("src/lib.rs"))
+        .unwrap()
+        .write_all(br#"pub static COOL_STR:&str = "cached git repo rev2";"#)
+        .unwrap();
+    git::add(&repo);
+    let rev2 = git::commit(&repo);
+
+    {
+        // cache to registry rev1 and rev2
+        let prj = project()
+            .at("cache_git_dep")
+            .file(
+                "Cargo.toml",
+                &format!(
+                    r#"
+            [project]
+            name = "cache_git_dep"
+            version = "0.5.0"
+
+            [dependencies.dep1]
+            git = '{}'
+            rev = "{}"
+            "#,
+                    git_project.url(),
+                    rev1
+                ),
+            ).file("src/main.rs", "fn main(){}")
+            .build();
+        prj.cargo("build").run();
+
+        File::create(&prj.root().join("Cargo.toml"))
+            .unwrap()
+            .write_all(
+                &format!(
+                    r#"
+            [project]
+            name = "cache_git_dep"
+            version = "0.5.0"
+
+            [dependencies.dep1]
+            git = '{}'
+            rev = "{}"
+            "#,
+                    git_project.url(),
+                    rev2
+                ).as_bytes(),
+            ).unwrap();
+        prj.cargo("build").run();
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+
+            [dependencies.dep1]
+            git = '{}'
+        "#,
+                git_project.url()
+            ),
+        ).file(
+            "src/main.rs",
+            &main_file(r#""hello from {}", dep1::COOL_STR"#, &["dep1"]),
+        ).build();
+
+    let git_root = git_project.root();
+
+    p.cargo("build -Zoffline")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(format!(
+            "\
+[COMPILING] dep1 v0.5.0 ({}#[..])
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+            path2url(git_root),
+        )).run();
+
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo"))
+        .with_stdout("hello from cached git repo rev2\n")
+        .run();
+
+    File::create(&p.root().join("Cargo.toml"))
+        .unwrap()
+        .write_all(
+            &format!(
+                r#"
+        [project]
+        name = "foo"
+        version = "0.5.0"
+
+        [dependencies.dep1]
+        git = '{}'
+        rev = "{}"
+    "#,
+                git_project.url(),
+                rev1
+            ).as_bytes(),
+        ).unwrap();
+
+    p.cargo("build -Zoffline").masquerade_as_nightly_cargo().run();
+    p.process(&p.bin("foo"))
+        .with_stdout("hello from cached git repo rev1\n")
+        .run();
+}
+
+#[test]
+fn cargo_compile_git_dep_branch() {
+    let project = project();
+    let git_project = git::new("dep1", |project| {
+        project
+            .file("Cargo.toml", &basic_lib_manifest("dep1"))
+            .file(
+                "src/dep1.rs",
+                r#"
+                pub fn hello() -> &'static str {
+                    "hello world"
+                }
+            "#,
+            )
+    }).unwrap();
+
+    // Make a new branch based on the current HEAD commit
+    let repo = git2::Repository::open(&git_project.root()).unwrap();
+    let head = repo.head().unwrap().target().unwrap();
+    let head = repo.find_commit(head).unwrap();
+    repo.branch("branchy", &head, true).unwrap();
+
+    let project = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep1]
+
+            git = '{}'
+            branch = "branchy"
+
+        "#,
+                git_project.url()
+            ),
+        ).file(
+            "src/main.rs",
+            &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+        ).build();
+
+    let git_root = git_project.root();
+
+    project
+        .cargo("build")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`\n\
+             [COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+            path2url(&git_root),
+            path2url(&git_root),
+        )).run();
+
+    assert!(project.bin("foo").is_file());
+
+    project
+        .process(&project.bin("foo"))
+        .with_stdout("hello world\n")
+        .run();
+}
+
+#[test]
+fn cargo_compile_git_dep_tag() {
+    let project = project();
+    let git_project = git::new("dep1", |project| {
+        project
+            .file("Cargo.toml", &basic_lib_manifest("dep1"))
+            .file(
+                "src/dep1.rs",
+                r#"
+                pub fn hello() -> &'static str {
+                    "hello world"
+                }
+            "#,
+            )
+    }).unwrap();
+
+    // Make a tag corresponding to the current HEAD
+    let repo = git2::Repository::open(&git_project.root()).unwrap();
+    let head = repo.head().unwrap().target().unwrap();
+    repo.tag(
+        "v0.1.0",
+        &repo.find_object(head, None).unwrap(),
+        &repo.signature().unwrap(),
+        "make a new tag",
+        false,
+    ).unwrap();
+
+    let project = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep1]
+
+            git = '{}'
+            tag = "v0.1.0"
+        "#,
+                git_project.url()
+            ),
+        ).file(
+            "src/main.rs",
+            &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+        ).build();
+
+    let git_root = git_project.root();
+
+    project
+        .cargo("build")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`\n\
+             [COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+            path2url(&git_root),
+            path2url(&git_root),
+        )).run();
+
+    assert!(project.bin("foo").is_file());
+
+    project
+        .process(&project.bin("foo"))
+        .with_stdout("hello world\n")
+        .run();
+
+    project.cargo("build").run();
+}
+
+#[test]
+fn cargo_compile_with_nested_paths() {
+    let git_project = git::new("dep1", |project| {
+        project
+            .file(
+                "Cargo.toml",
+                r#"
+                [project]
+
+                name = "dep1"
+                version = "0.5.0"
+                authors = ["carlhuda@example.com"]
+
+                [dependencies.dep2]
+
+                version = "0.5.0"
+                path = "vendor/dep2"
+
+                [lib]
+
+                name = "dep1"
+            "#,
+            ).file(
+                "src/dep1.rs",
+                r#"
+                extern crate dep2;
+
+                pub fn hello() -> &'static str {
+                    dep2::hello()
+                }
+            "#,
+            ).file("vendor/dep2/Cargo.toml", &basic_lib_manifest("dep2"))
+            .file(
+                "vendor/dep2/src/dep2.rs",
+                r#"
+                pub fn hello() -> &'static str {
+                    "hello world"
+                }
+            "#,
+            )
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep1]
+
+            version = "0.5.0"
+            git = '{}'
+
+            [[bin]]
+
+            name = "foo"
+        "#,
+                git_project.url()
+            ),
+        ).file(
+            "src/foo.rs",
+            &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+        ).build();
+
+    p.cargo("build").run();
+
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("hello world\n").run();
+}
+
+#[test]
+fn cargo_compile_with_malformed_nested_paths() {
+    let git_project = git::new("dep1", |project| {
+        project
+            .file("Cargo.toml", &basic_lib_manifest("dep1"))
+            .file(
+                "src/dep1.rs",
+                r#"
+                pub fn hello() -> &'static str {
+                    "hello world"
+                }
+            "#,
+            ).file("vendor/dep2/Cargo.toml", "!INVALID!")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep1]
+
+            version = "0.5.0"
+            git = '{}'
+
+            [[bin]]
+
+            name = "foo"
+        "#,
+                git_project.url()
+            ),
+        ).file(
+            "src/foo.rs",
+            &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+        ).build();
+
+    p.cargo("build").run();
+
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("hello world\n").run();
+}
+
+#[test]
+fn cargo_compile_with_meta_package() {
+    let git_project = git::new("meta-dep", |project| {
+        project
+            .file("dep1/Cargo.toml", &basic_lib_manifest("dep1"))
+            .file(
+                "dep1/src/dep1.rs",
+                r#"
+                pub fn hello() -> &'static str {
+                    "this is dep1"
+                }
+            "#,
+            ).file("dep2/Cargo.toml", &basic_lib_manifest("dep2"))
+            .file(
+                "dep2/src/dep2.rs",
+                r#"
+                pub fn hello() -> &'static str {
+                    "this is dep2"
+                }
+            "#,
+            )
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep1]
+
+            version = "0.5.0"
+            git = '{}'
+
+            [dependencies.dep2]
+
+            version = "0.5.0"
+            git = '{}'
+
+            [[bin]]
+
+            name = "foo"
+        "#,
+                git_project.url(),
+                git_project.url()
+            ),
+        ).file(
+            "src/foo.rs",
+            &main_file(
+                r#""{} {}", dep1::hello(), dep2::hello()"#,
+                &["dep1", "dep2"],
+            ),
+        ).build();
+
+    p.cargo("build").run();
+
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo"))
+        .with_stdout("this is dep1 this is dep2\n")
+        .run();
+}
+
+#[test]
+fn cargo_compile_with_short_ssh_git() {
+    let url = "git@github.com:a/dep";
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep]
+
+            git = "{}"
+
+            [[bin]]
+
+            name = "foo"
+        "#,
+                url
+            ),
+        ).file(
+            "src/foo.rs",
+            &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stdout("")
+        .with_stderr(&format!(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  invalid url `{}`: relative URL without a base
+",
+            url
+        )).run();
+}
+
+#[test]
+fn two_revs_same_deps() {
+    let bar = git::new("meta-dep", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.0.0"))
+            .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+    }).unwrap();
+
+    let repo = git2::Repository::open(&bar.root()).unwrap();
+    let rev1 = repo.revparse_single("HEAD").unwrap().id();
+
+    // Commit the changes and make sure we trigger a recompile
+    File::create(&bar.root().join("src/lib.rs"))
+        .unwrap()
+        .write_all(br#"pub fn bar() -> i32 { 2 }"#)
+        .unwrap();
+    git::add(&repo);
+    let rev2 = git::commit(&repo);
+
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies.bar]
+            git = '{}'
+            rev = "{}"
+
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+                bar.url(),
+                rev1
+            ),
+        ).file(
+            "src/main.rs",
+            r#"
+            extern crate bar;
+            extern crate baz;
+
+            fn main() {
+                assert_eq!(bar::bar(), 1);
+                assert_eq!(baz::baz(), 2);
+            }
+        "#,
+        ).build();
+
+    let _baz = project()
+        .at("baz")
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "baz"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies.bar]
+            git = '{}'
+            rev = "{}"
+        "#,
+                bar.url(),
+                rev2
+            ),
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate bar;
+            pub fn baz() -> i32 { bar::bar() }
+        "#,
+        ).build();
+
+    foo.cargo("build -v").run();
+    assert!(foo.bin("foo").is_file());
+    foo.process(&foo.bin("foo")).run();
+}
+
+#[test]
+fn recompilation() {
+    let git_project = git::new("bar", |project| {
+        project
+            .file("Cargo.toml", &basic_lib_manifest("bar"))
+            .file("src/bar.rs", "pub fn bar() {}")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+
+            version = "0.5.0"
+            git = '{}'
+        "#,
+                git_project.url()
+            ),
+        ).file("src/main.rs", &main_file(r#""{:?}", bar::bar()"#, &["bar"]))
+        .build();
+
+    // First time around we should compile both foo and bar
+    p.cargo("build")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`\n\
+             [COMPILING] bar v0.5.0 ({}#[..])\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+            git_project.url(),
+            git_project.url(),
+        )).run();
+
+    // Don't recompile the second time
+    p.cargo("build").with_stdout("").run();
+
+    // Modify a file manually, shouldn't trigger a recompile
+    File::create(&git_project.root().join("src/bar.rs"))
+        .unwrap()
+        .write_all(br#"pub fn bar() { println!("hello!"); }"#)
+        .unwrap();
+
+    p.cargo("build").with_stdout("").run();
+
+    p.cargo("update")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`",
+            git_project.url()
+        )).run();
+
+    p.cargo("build").with_stdout("").run();
+
+    // Commit the changes and make sure we don't trigger a recompile because the
+    // lockfile says not to change
+    let repo = git2::Repository::open(&git_project.root()).unwrap();
+    git::add(&repo);
+    git::commit(&repo);
+
+    println!("compile after commit");
+    p.cargo("build").with_stdout("").run();
+    p.root().move_into_the_past();
+
+    // Update the dependency and carry on!
+    p.cargo("update")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`\n\
+             [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\
+             ",
+            git_project.url()
+        )).run();
+    println!("going for the last compile");
+    p.cargo("build")
+        .with_stderr(&format!(
+            "[COMPILING] bar v0.5.0 ({}#[..])\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+            git_project.url(),
+        )).run();
+
+    // Make sure clean only cleans one dep
+    p.cargo("clean -p foo").with_stdout("").run();
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]"
+        ).run();
+}
+
+#[test]
+fn update_with_shared_deps() {
+    let git_project = git::new("bar", |project| {
+        project
+            .file("Cargo.toml", &basic_lib_manifest("bar"))
+            .file("src/bar.rs", "pub fn bar() {}")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep1]
+            path = "dep1"
+            [dependencies.dep2]
+            path = "dep2"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate dep1;
+            #[allow(unused_extern_crates)]
+            extern crate dep2;
+            fn main() {}
+        "#,
+        ).file(
+            "dep1/Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "dep1"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            version = "0.5.0"
+            git = '{}'
+        "#,
+                git_project.url()
+            ),
+        ).file("dep1/src/lib.rs", "")
+        .file(
+            "dep2/Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "dep2"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            version = "0.5.0"
+            git = '{}'
+        "#,
+                git_project.url()
+            ),
+        ).file("dep2/src/lib.rs", "")
+        .build();
+
+    // First time around we should compile both foo and bar
+    p.cargo("build")
+        .with_stderr(&format!(
+            "\
+[UPDATING] git repository `{git}`
+[COMPILING] bar v0.5.0 ({git}#[..])
+[COMPILING] [..] v0.5.0 ([..])
+[COMPILING] [..] v0.5.0 ([..])
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+            git = git_project.url(),
+        )).run();
+
+    // Modify a file manually, and commit it
+    File::create(&git_project.root().join("src/bar.rs"))
+        .unwrap()
+        .write_all(br#"pub fn bar() { println!("hello!"); }"#)
+        .unwrap();
+    let repo = git2::Repository::open(&git_project.root()).unwrap();
+    let old_head = repo.head().unwrap().target().unwrap();
+    git::add(&repo);
+    git::commit(&repo);
+
+    sleep_ms(1000);
+
+    // By default, not transitive updates
+    println!("dep1 update");
+    p.cargo("update -p dep1").with_stdout("").run();
+
+    // Don't do anything bad on a weird --precise argument
+    println!("bar bad precise update");
+    p.cargo("update -p bar --precise 0.1.2")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] git repository [..]
+[ERROR] Unable to update [..]
+
+Caused by:
+  revspec '0.1.2' not found; [..]
+",
+        ).run();
+
+    // Specifying a precise rev to the old rev shouldn't actually update
+    // anything because we already have the rev in the db.
+    println!("bar precise update");
+    p.cargo("update -p bar --precise")
+        .arg(&old_head.to_string())
+        .with_stdout("")
+        .run();
+
+    // Updating aggressively should, however, update the repo.
+    println!("dep1 aggressive update");
+    p.cargo("update -p dep1 --aggressive")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`\n\
+             [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\
+             ",
+            git_project.url()
+        )).run();
+
+    // Make sure we still only compile one version of the git repo
+    println!("build");
+    p.cargo("build")
+        .with_stderr(&format!(
+            "\
+[COMPILING] bar v0.5.0 ({git}#[..])
+[COMPILING] [..] v0.5.0 ([CWD][..]dep[..])
+[COMPILING] [..] v0.5.0 ([CWD][..]dep[..])
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+            git = git_project.url(),
+        )).run();
+
+    // We should be able to update transitive deps
+    p.cargo("update -p bar")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`",
+            git_project.url()
+        )).run();
+}
+
+#[test]
+fn dep_with_submodule() {
+    let project = project();
+    let git_project = git::new("dep1", |project| {
+        project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+    }).unwrap();
+    let git_project2 =
+        git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")).unwrap();
+
+    let repo = git2::Repository::open(&git_project.root()).unwrap();
+    let url = path2url(git_project2.root()).to_string();
+    git::add_submodule(&repo, &url, Path::new("src"));
+    git::commit(&repo);
+
+    let project = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep1]
+
+            git = '{}'
+        "#,
+                git_project.url()
+            ),
+        ).file(
+            "src/lib.rs",
+            "extern crate dep1; pub fn foo() { dep1::dep() }",
+        ).build();
+
+    project
+        .cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] git repository [..]
+[COMPILING] dep1 [..]
+[COMPILING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+        ).run();
+}
+
+#[test]
+fn dep_with_bad_submodule() {
+    let project = project();
+    let git_project = git::new("dep1", |project| {
+        project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+    }).unwrap();
+    let git_project2 =
+        git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")).unwrap();
+
+    let repo = git2::Repository::open(&git_project.root()).unwrap();
+    let url = path2url(git_project2.root()).to_string();
+    git::add_submodule(&repo, &url, Path::new("src"));
+    git::commit(&repo);
+
+    // now amend the first commit on git_project2 to make submodule ref point to not-found
+    // commit
+    let repo = git2::Repository::open(&git_project2.root()).unwrap();
+    let original_submodule_ref = repo.refname_to_id("refs/heads/master").unwrap();
+    let commit = repo.find_commit(original_submodule_ref).unwrap();
+    commit
+        .amend(
+            Some("refs/heads/master"),
+            None,
+            None,
+            None,
+            Some("something something"),
+            None,
+        ).unwrap();
+
+    let p = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep1]
+
+            git = '{}'
+        "#,
+                git_project.url()
+            ),
+        ).file(
+            "src/lib.rs",
+            "extern crate dep1; pub fn foo() { dep1::dep() }",
+        ).build();
+
+    let expected = format!(
+        "\
+[UPDATING] git repository [..]
+[ERROR] failed to load source for a dependency on `dep1`
+
+Caused by:
+  Unable to update {}
+
+Caused by:
+  failed to update submodule `src`
+
+Caused by:
+  object not found - no match for id [..]
+",
+        path2url(git_project.root())
+    );
+
+    p.cargo("build")
+        .with_stderr(expected)
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn two_deps_only_update_one() {
+    let project = project();
+    let git1 = git::new("dep1", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+    let git2 = git::new("dep2", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("dep2", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+
+    let p = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep1]
+            git = '{}'
+            [dependencies.dep2]
+            git = '{}'
+        "#,
+                git1.url(),
+                git2.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "[UPDATING] git repository `[..]`\n\
+             [UPDATING] git repository `[..]`\n\
+             [COMPILING] [..] v0.5.0 ([..])\n\
+             [COMPILING] [..] v0.5.0 ([..])\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+        ).run();
+
+    File::create(&git1.root().join("src/lib.rs"))
+        .unwrap()
+        .write_all(br#"pub fn foo() {}"#)
+        .unwrap();
+    let repo = git2::Repository::open(&git1.root()).unwrap();
+    git::add(&repo);
+    git::commit(&repo);
+
+    p.cargo("update -p dep1")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`\n\
+             [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\
+             ",
+            git1.url()
+        )).run();
+}
+
+#[test]
+fn stale_cached_version() {
+    let bar = git::new("meta-dep", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.0.0"))
+            .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+    }).unwrap();
+
+    // Update the git database in the cache with the current state of the git
+    // repo
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [dependencies.bar]
+            git = '{}'
+        "#,
+                bar.url()
+            ),
+        ).file(
+            "src/main.rs",
+            r#"
+            extern crate bar;
+
+            fn main() { assert_eq!(bar::bar(), 1) }
+        "#,
+        ).build();
+
+    foo.cargo("build").run();
+    foo.process(&foo.bin("foo")).run();
+
+    // Update the repo, and simulate someone else updating the lockfile and then
+    // us pulling it down.
+    File::create(&bar.root().join("src/lib.rs"))
+        .unwrap()
+        .write_all(br#"pub fn bar() -> i32 { 1 + 0 }"#)
+        .unwrap();
+    let repo = git2::Repository::open(&bar.root()).unwrap();
+    git::add(&repo);
+    git::commit(&repo);
+
+    sleep_ms(1000);
+
+    let rev = repo.revparse_single("HEAD").unwrap().id();
+
+    File::create(&foo.root().join("Cargo.lock"))
+        .unwrap()
+        .write_all(
+            format!(
+                r#"
+        [[package]]
+        name = "foo"
+        version = "0.0.0"
+        dependencies = [
+         'bar 0.0.0 (git+{url}#{hash})'
+        ]
+
+        [[package]]
+        name = "bar"
+        version = "0.0.0"
+        source = 'git+{url}#{hash}'
+    "#,
+                url = bar.url(),
+                hash = rev
+            ).as_bytes(),
+        ).unwrap();
+
+    // Now build!
+    foo.cargo("build")
+        .with_stderr(&format!(
+            "\
+[UPDATING] git repository `{bar}`
+[COMPILING] bar v0.0.0 ({bar}#[..])
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            bar = bar.url(),
+        )).run();
+    foo.process(&foo.bin("foo")).run();
+}
+
+#[test]
+fn dep_with_changed_submodule() {
+    let project = project();
+    let git_project = git::new("dep1", |project| {
+        project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+    }).unwrap();
+
+    let git_project2 = git::new("dep2", |project| {
+        project.file("lib.rs", "pub fn dep() -> &'static str { \"project2\" }")
+    }).unwrap();
+
+    let git_project3 = git::new("dep3", |project| {
+        project.file("lib.rs", "pub fn dep() -> &'static str { \"project3\" }")
+    }).unwrap();
+
+    let repo = git2::Repository::open(&git_project.root()).unwrap();
+    let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), Path::new("src"));
+    git::commit(&repo);
+
+    let p = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+            [dependencies.dep1]
+            git = '{}'
+        "#,
+                git_project.url()
+            ),
+        ).file(
+            "src/main.rs",
+            "
+            extern crate dep1;
+            pub fn main() { println!(\"{}\", dep1::dep()) }
+        ",
+        ).build();
+
+    println!("first run");
+    p.cargo("run")
+        .with_stderr(
+            "[UPDATING] git repository `[..]`\n\
+             [COMPILING] dep1 v0.5.0 ([..])\n\
+             [COMPILING] foo v0.5.0 ([..])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+             [..]\n\
+             [RUNNING] `target/debug/foo[EXE]`\n",
+        ).with_stdout("project2\n")
+        .run();
+
+    File::create(&git_project.root().join(".gitmodules"))
+        .unwrap()
+        .write_all(
+            format!(
+                "[submodule \"src\"]\n\tpath = src\n\turl={}",
+                git_project3.url()
+            ).as_bytes(),
+        ).unwrap();
+
+    // Sync the submodule and reset it to the new remote.
+    sub.sync().unwrap();
+    {
+        let subrepo = sub.open().unwrap();
+        subrepo
+            .remote_add_fetch("origin", "refs/heads/*:refs/heads/*")
+            .unwrap();
+        subrepo
+            .remote_set_url("origin", &git_project3.url().to_string())
+            .unwrap();
+        let mut origin = subrepo.find_remote("origin").unwrap();
+        origin.fetch(&[], None, None).unwrap();
+        let id = subrepo.refname_to_id("refs/remotes/origin/master").unwrap();
+        let obj = subrepo.find_object(id, None).unwrap();
+        subrepo.reset(&obj, git2::ResetType::Hard, None).unwrap();
+    }
+    sub.add_to_index(true).unwrap();
+    git::add(&repo);
+    git::commit(&repo);
+
+    sleep_ms(1000);
+    // Update the dependency and carry on!
+    println!("update");
+    p.cargo("update -v")
+        .with_stderr("")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`\n\
+             [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\
+             ",
+            git_project.url()
+        )).run();
+
+    println!("last run");
+    p.cargo("run")
+        .with_stderr(
+            "[COMPILING] dep1 v0.5.0 ([..])\n\
+             [COMPILING] foo v0.5.0 ([..])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+             [..]\n\
+             [RUNNING] `target/debug/foo[EXE]`\n",
+        ).with_stdout("project3\n")
+        .run();
+}
+
+#[test]
+fn dev_deps_with_testing() {
+    let p2 = git::new("bar", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+            .file(
+                "src/lib.rs",
+                r#"
+            pub fn gimme() -> &'static str { "zoidberg" }
+        "#,
+            )
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dev-dependencies.bar]
+            version = "0.5.0"
+            git = '{}'
+        "#,
+                p2.url()
+            ),
+        ).file(
+            "src/main.rs",
+            r#"
+            fn main() {}
+
+            #[cfg(test)]
+            mod tests {
+                extern crate bar;
+                #[test] fn foo() { bar::gimme(); }
+            }
+        "#,
+        ).build();
+
+    // Generate a lockfile which did not use `bar` to compile, but had to update
+    // `bar` to generate the lockfile
+    p.cargo("build")
+        .with_stderr(&format!(
+            "\
+[UPDATING] git repository `{bar}`
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            bar = p2.url()
+        )).run();
+
+    // Make sure we use the previous resolution of `bar` instead of updating it
+    // a second time.
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] [..] v0.5.0 ([..])
+[COMPILING] [..] v0.5.0 ([..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test tests::foo ... ok")
+        .run();
+}
+
+#[test]
+fn git_build_cmd_freshness() {
+    let foo = git::new("foo", |project| {
+        project
+            .file(
+                "Cargo.toml",
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+            build = "build.rs"
+        "#,
+            ).file("build.rs", "fn main() {}")
+            .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+            .file(".gitignore", "src/bar.rs")
+    }).unwrap();
+    foo.root().move_into_the_past();
+
+    sleep_ms(1000);
+
+    foo.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    // Smoke test to make sure it doesn't compile again
+    println!("first pass");
+    foo.cargo("build").with_stdout("").run();
+
+    // Modify an ignored file and make sure we don't rebuild
+    println!("second pass");
+    File::create(&foo.root().join("src/bar.rs")).unwrap();
+    foo.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn git_name_not_always_needed() {
+    let p2 = git::new("bar", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+            .file(
+                "src/lib.rs",
+                r#"
+            pub fn gimme() -> &'static str { "zoidberg" }
+        "#,
+            )
+    }).unwrap();
+
+    let repo = git2::Repository::open(&p2.root()).unwrap();
+    let mut cfg = repo.config().unwrap();
+    let _ = cfg.remove("user.name");
+    let _ = cfg.remove("user.email");
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dev-dependencies.bar]
+            git = '{}'
+        "#,
+                p2.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    // Generate a lockfile which did not use `bar` to compile, but had to update
+    // `bar` to generate the lockfile
+    p.cargo("build")
+        .with_stderr(&format!(
+            "\
+[UPDATING] git repository `{bar}`
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            bar = p2.url()
+        )).run();
+}
+
+#[test]
+fn git_repo_changing_no_rebuild() {
+    let bar = git::new("bar", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+            .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+    }).unwrap();
+
+    // Lock p1 to the first rev in the git repo
+    let p1 = project()
+        .at("p1")
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "p1"
+            version = "0.5.0"
+            authors = []
+            build = 'build.rs'
+            [dependencies.bar]
+            git = '{}'
+        "#,
+                bar.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .file("build.rs", "fn main() {}")
+        .build();
+    p1.root().move_into_the_past();
+    p1.cargo("build")
+        .with_stderr(&format!(
+            "\
+[UPDATING] git repository `{bar}`
+[COMPILING] [..]
+[COMPILING] [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            bar = bar.url()
+        )).run();
+
+    // Make a commit to lock p2 to a different rev
+    File::create(&bar.root().join("src/lib.rs"))
+        .unwrap()
+        .write_all(br#"pub fn bar() -> i32 { 2 }"#)
+        .unwrap();
+    let repo = git2::Repository::open(&bar.root()).unwrap();
+    git::add(&repo);
+    git::commit(&repo);
+
+    // Lock p2 to the second rev
+    let p2 = project()
+        .at("p2")
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "p2"
+            version = "0.5.0"
+            authors = []
+            [dependencies.bar]
+            git = '{}'
+        "#,
+                bar.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+    p2.cargo("build")
+        .with_stderr(&format!(
+            "\
+[UPDATING] git repository `{bar}`
+[COMPILING] [..]
+[COMPILING] [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            bar = bar.url()
+        )).run();
+
+    // And now for the real test! Make sure that p1 doesn't get rebuilt
+    // even though the git repo has changed.
+    p1.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn git_dep_build_cmd() {
+    let p = git::new("foo", |project| {
+        project
+            .file(
+                "Cargo.toml",
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+
+            version = "0.5.0"
+            path = "bar"
+
+            [[bin]]
+
+            name = "foo"
+        "#,
+            ).file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+            .file(
+                "bar/Cargo.toml",
+                r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+            build = "build.rs"
+
+            [lib]
+            name = "bar"
+            path = "src/bar.rs"
+        "#,
+            ).file(
+                "bar/src/bar.rs.in",
+                r#"
+            pub fn gimme() -> i32 { 0 }
+        "#,
+            ).file(
+                "bar/build.rs",
+                r#"
+            use std::fs;
+            fn main() {
+                fs::copy("src/bar.rs.in", "src/bar.rs").unwrap();
+            }
+        "#,
+            )
+    }).unwrap();
+
+    p.root().join("bar").move_into_the_past();
+
+    p.cargo("build").run();
+
+    p.process(&p.bin("foo")).with_stdout("0\n").run();
+
+    // Touching bar.rs.in should cause the `build` command to run again.
+    fs::File::create(&p.root().join("bar/src/bar.rs.in"))
+        .unwrap()
+        .write_all(b"pub fn gimme() -> i32 { 1 }")
+        .unwrap();
+
+    p.cargo("build").run();
+
+    p.process(&p.bin("foo")).with_stdout("1\n").run();
+}
+
+#[test]
+fn fetch_downloads() {
+    let bar = git::new("bar", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+            .file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies.bar]
+            git = '{}'
+        "#,
+                bar.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+    p.cargo("fetch")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{url}`",
+            url = bar.url()
+        )).run();
+
+    p.cargo("fetch").with_stdout("").run();
+}
+
+#[test]
+fn warnings_in_git_dep() {
+    let bar = git::new("bar", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+            .file("src/lib.rs", "fn unused() {}")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies.bar]
+            git = '{}'
+        "#,
+                bar.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`\n\
+             [COMPILING] bar v0.5.0 ({}#[..])\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
+            bar.url(),
+            bar.url(),
+        )).run();
+}
+
+#[test]
+fn update_ambiguous() {
+    let bar1 = git::new("bar1", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+    let bar2 = git::new("bar2", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.6.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+    let baz = git::new("baz", |project| {
+        project
+            .file(
+                "Cargo.toml",
+                &format!(
+                    r#"
+            [package]
+            name = "baz"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            git = '{}'
+        "#,
+                    bar2.url()
+                ),
+            ).file("src/lib.rs", "")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies.bar]
+            git = '{}'
+            [dependencies.baz]
+            git = '{}'
+        "#,
+                bar1.url(),
+                baz.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("generate-lockfile").run();
+    p.cargo("update -p bar")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] There are multiple `bar` packages in your project, and the specification `bar` \
+is ambiguous.
+Please re-run this command with `-p <spec>` where `<spec>` is one of the \
+following:
+  bar:0.[..].0
+  bar:0.[..].0
+",
+        ).run();
+}
+
+#[test]
+fn update_one_dep_in_repo_with_many_deps() {
+    let bar = git::new("bar", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+            .file("src/lib.rs", "")
+            .file("a/Cargo.toml", &basic_manifest("a", "0.5.0"))
+            .file("a/src/lib.rs", "")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies.bar]
+            git = '{}'
+            [dependencies.a]
+            git = '{}'
+        "#,
+                bar.url(),
+                bar.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("generate-lockfile").run();
+    p.cargo("update -p bar")
+        .with_stderr(&format!("[UPDATING] git repository `{}`", bar.url()))
+        .run();
+}
+
+#[test]
+fn switch_deps_does_not_update_transitive() {
+    let transitive = git::new("transitive", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("transitive", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+    let dep1 = git::new("dep1", |project| {
+        project
+            .file(
+                "Cargo.toml",
+                &format!(
+                    r#"
+            [package]
+            name = "dep"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.transitive]
+            git = '{}'
+        "#,
+                    transitive.url()
+                ),
+            ).file("src/lib.rs", "")
+    }).unwrap();
+    let dep2 = git::new("dep2", |project| {
+        project
+            .file(
+                "Cargo.toml",
+                &format!(
+                    r#"
+            [package]
+            name = "dep"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.transitive]
+            git = '{}'
+        "#,
+                    transitive.url()
+                ),
+            ).file("src/lib.rs", "")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies.dep]
+            git = '{}'
+        "#,
+                dep1.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(&format!(
+            "\
+[UPDATING] git repository `{}`
+[UPDATING] git repository `{}`
+[COMPILING] transitive [..]
+[COMPILING] dep [..]
+[COMPILING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            dep1.url(),
+            transitive.url()
+        )).run();
+
+    // Update the dependency to point to the second repository, but this
+    // shouldn't update the transitive dependency which is the same.
+    File::create(&p.root().join("Cargo.toml"))
+        .unwrap()
+        .write_all(
+            format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies.dep]
+            git = '{}'
+    "#,
+                dep2.url()
+            ).as_bytes(),
+        ).unwrap();
+
+    p.cargo("build")
+        .with_stderr(&format!(
+            "\
+[UPDATING] git repository `{}`
+[COMPILING] dep [..]
+[COMPILING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            dep2.url()
+        )).run();
+}
+
+#[test]
+fn update_one_source_updates_all_packages_in_that_git_source() {
+    let dep = git::new("dep", |project| {
+        project
+            .file(
+                "Cargo.toml",
+                r#"
+            [package]
+            name = "dep"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies.a]
+            path = "a"
+        "#,
+            ).file("src/lib.rs", "")
+            .file("a/Cargo.toml", &basic_manifest("a", "0.5.0"))
+            .file("a/src/lib.rs", "")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies.dep]
+            git = '{}'
+        "#,
+                dep.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").run();
+
+    let repo = git2::Repository::open(&dep.root()).unwrap();
+    let rev1 = repo.revparse_single("HEAD").unwrap().id();
+
+    // Just be sure to change a file
+    File::create(&dep.root().join("src/lib.rs"))
+        .unwrap()
+        .write_all(br#"pub fn bar() -> i32 { 2 }"#)
+        .unwrap();
+    git::add(&repo);
+    git::commit(&repo);
+
+    p.cargo("update -p dep").run();
+    let mut lockfile = String::new();
+    File::open(&p.root().join("Cargo.lock"))
+        .unwrap()
+        .read_to_string(&mut lockfile)
+        .unwrap();
+    assert!(
+        !lockfile.contains(&rev1.to_string()),
+        "{} in {}",
+        rev1,
+        lockfile
+    );
+}
+
+#[test]
+fn switch_sources() {
+    let a1 = git::new("a1", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("a", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+    let a2 = git::new("a2", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("a", "0.5.1"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies.b]
+            path = "b"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "b/Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "b"
+            version = "0.5.0"
+            authors = []
+            [dependencies.a]
+            git = '{}'
+        "#,
+                a1.url()
+            ),
+        ).file("b/src/lib.rs", "pub fn main() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] git repository `file://[..]a1`
+[COMPILING] a v0.5.0 ([..]a1#[..]
+[COMPILING] b v0.5.0 ([..])
+[COMPILING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    File::create(&p.root().join("b/Cargo.toml"))
+        .unwrap()
+        .write_all(
+            format!(
+                r#"
+        [project]
+        name = "b"
+        version = "0.5.0"
+        authors = []
+        [dependencies.a]
+        git = '{}'
+    "#,
+                a2.url()
+            ).as_bytes(),
+        ).unwrap();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] git repository `file://[..]a2`
+[COMPILING] a v0.5.1 ([..]a2#[..]
+[COMPILING] b v0.5.0 ([..])
+[COMPILING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn dont_require_submodules_are_checked_out() {
+    let p = project().build();
+    let git1 = git::new("dep1", |p| {
+        p.file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("build.rs", "fn main() {}")
+        .file("src/lib.rs", "")
+        .file("a/foo", "")
+    }).unwrap();
+    let git2 = git::new("dep2", |p| p).unwrap();
+
+    let repo = git2::Repository::open(&git1.root()).unwrap();
+    let url = path2url(git2.root()).to_string();
+    git::add_submodule(&repo, &url, Path::new("a/submodule"));
+    git::commit(&repo);
+
+    git2::Repository::init(&p.root()).unwrap();
+    let url = path2url(git1.root()).to_string();
+    let dst = paths::home().join("foo");
+    git2::Repository::clone(&url, &dst).unwrap();
+
+    git1.cargo("build -v").cwd(&dst).run();
+}
+
+#[test]
+fn doctest_same_name() {
+    let a2 = git::new("a2", |p| {
+        p.file("Cargo.toml", &basic_manifest("a", "0.5.0"))
+            .file("src/lib.rs", "pub fn a2() {}")
+    }).unwrap();
+
+    let a1 = git::new("a1", |p| {
+        p.file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "a"
+            version = "0.5.0"
+            authors = []
+            [dependencies]
+            a = {{ git = '{}' }}
+        "#,
+                a2.url()
+            ),
+        ).file("src/lib.rs", "extern crate a; pub fn a1() {}")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = {{ git = '{}' }}
+        "#,
+                a1.url()
+            ),
+        ).file(
+            "src/lib.rs",
+            r#"
+            #[macro_use]
+            extern crate a;
+        "#,
+        ).build();
+
+    p.cargo("test -v").run();
+}
+
+#[test]
+fn lints_are_suppressed() {
+    let a = git::new("a", |p| {
+        p.file("Cargo.toml", &basic_manifest("a", "0.5.0")).file(
+            "src/lib.rs",
+            "
+            use std::option;
+        ",
+        )
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = {{ git = '{}' }}
+        "#,
+                a.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] git repository `[..]`
+[COMPILING] a v0.5.0 ([..])
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn denied_lints_are_allowed() {
+    let a = git::new("a", |p| {
+        p.file("Cargo.toml", &basic_manifest("a", "0.5.0")).file(
+            "src/lib.rs",
+            "
+            #![deny(warnings)]
+            use std::option;
+        ",
+        )
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = {{ git = '{}' }}
+        "#,
+                a.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] git repository `[..]`
+[COMPILING] a v0.5.0 ([..])
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn add_a_git_dep() {
+    let git = git::new("git", |p| {
+        p.file("Cargo.toml", &basic_manifest("git", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = {{ path = 'a' }}
+            git = {{ git = '{}' }}
+        "#,
+                git.url()
+            ),
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    File::create(p.root().join("a/Cargo.toml"))
+        .unwrap()
+        .write_all(
+            format!(
+                r#"
+        [package]
+        name = "a"
+        version = "0.0.1"
+        authors = []
+
+        [dependencies]
+        git = {{ git = '{}' }}
+    "#,
+                git.url()
+            ).as_bytes(),
+        ).unwrap();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn two_at_rev_instead_of_tag() {
+    let git = git::new("git", |p| {
+        p.file("Cargo.toml", &basic_manifest("git1", "0.5.0"))
+            .file("src/lib.rs", "")
+            .file("a/Cargo.toml", &basic_manifest("git2", "0.5.0"))
+            .file("a/src/lib.rs", "")
+    }).unwrap();
+
+    // Make a tag corresponding to the current HEAD
+    let repo = git2::Repository::open(&git.root()).unwrap();
+    let head = repo.head().unwrap().target().unwrap();
+    repo.tag(
+        "v0.1.0",
+        &repo.find_object(head, None).unwrap(),
+        &repo.signature().unwrap(),
+        "make a new tag",
+        false,
+    ).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            git1 = {{ git = '{0}', rev = 'v0.1.0' }}
+            git2 = {{ git = '{0}', rev = 'v0.1.0' }}
+        "#,
+                git.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("generate-lockfile").run();
+    p.cargo("build -v").run();
+}
+
+#[test]
+#[ignore] // accesses crates.io
+fn include_overrides_gitignore() {
+    let p = git::new("reduction", |repo| {
+        repo.file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "reduction"
+            version = "0.5.0"
+            authors = ["pnkfelix"]
+            build = "tango-build.rs"
+            include = ["src/lib.rs", "src/incl.rs", "src/mod.md", "tango-build.rs", "Cargo.toml"]
+
+            [build-dependencies]
+            filetime = "0.1"
+        "#,
+        ).file(
+            ".gitignore",
+            r#"
+            target
+            Cargo.lock
+            # Below files represent generated code, thus not managed by `git`
+            src/incl.rs
+            src/not_incl.rs
+        "#,
+        ).file(
+            "tango-build.rs",
+            r#"
+            extern crate filetime;
+            use filetime::FileTime;
+            use std::fs::{self, File};
+
+            fn main() {
+                // generate files, or bring their timestamps into sync.
+                let source = "src/mod.md";
+
+                let metadata = fs::metadata(source).unwrap();
+                let mtime = FileTime::from_last_modification_time(&metadata);
+                let atime = FileTime::from_last_access_time(&metadata);
+
+                // sync time stamps for generated files with time stamp of source file.
+
+                let files = ["src/not_incl.rs", "src/incl.rs"];
+                for file in files.iter() {
+                    File::create(file).unwrap();
+                    filetime::set_file_times(file, atime, mtime).unwrap();
+                }
+            }
+        "#,
+        ).file("src/lib.rs", "mod not_incl; mod incl;")
+        .file(
+            "src/mod.md",
+            r#"
+            (The content of this file does not matter since we are not doing real codegen.)
+        "#,
+        )
+    }).unwrap();
+
+    println!("build 1: all is new");
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADED] filetime [..]
+[DOWNLOADED] libc [..]
+[COMPILING] libc [..]
+[RUNNING] `rustc --crate-name libc [..]`
+[COMPILING] filetime [..]
+[RUNNING] `rustc --crate-name filetime [..]`
+[COMPILING] reduction [..]
+[RUNNING] `rustc --crate-name build_script_tango_build tango-build.rs --crate-type bin [..]`
+[RUNNING] `[..]/build-script-tango-build`
+[RUNNING] `rustc --crate-name reduction src/lib.rs --crate-type lib [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    println!("build 2: nothing changed; file timestamps reset by build script");
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[FRESH] libc [..]
+[FRESH] filetime [..]
+[FRESH] reduction [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    println!("build 3: touch `src/not_incl.rs`; expect build script *not* re-run");
+    sleep_ms(1000);
+    File::create(p.root().join("src").join("not_incl.rs")).unwrap();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[FRESH] libc [..]
+[FRESH] filetime [..]
+[COMPILING] reduction [..]
+[RUNNING] `rustc --crate-name reduction src/lib.rs --crate-type lib [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    // This final case models the bug from rust-lang/cargo#4135: an
+    // explicitly included file should cause a build-script re-run,
+    // even if that same file is matched by `.gitignore`.
+    println!("build 4: touch `src/incl.rs`; expect build script re-run");
+    sleep_ms(1000);
+    File::create(p.root().join("src").join("incl.rs")).unwrap();
+
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[FRESH] libc [..]
+[FRESH] filetime [..]
+[COMPILING] reduction [..]
+[RUNNING] `[..]/build-script-tango-build`
+[RUNNING] `rustc --crate-name reduction src/lib.rs --crate-type lib [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn invalid_git_dependency_manifest() {
+    let project = project();
+    let git_project = git::new("dep1", |project| {
+        project
+            .file(
+                "Cargo.toml",
+                r#"
+                [project]
+
+                name = "dep1"
+                version = "0.5.0"
+                authors = ["carlhuda@example.com"]
+                categories = ["algorithms"]
+                categories = ["algorithms"]
+
+                [lib]
+
+                name = "dep1"
+            "#,
+            ).file(
+                "src/dep1.rs",
+                r#"
+                pub fn hello() -> &'static str {
+                    "hello world"
+                }
+            "#,
+            )
+    }).unwrap();
+
+    let project = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.dep1]
+
+            git = '{}'
+        "#,
+                git_project.url()
+            ),
+        ).file(
+            "src/main.rs",
+            &main_file(r#""{}", dep1::hello()"#, &["dep1"]),
+        ).build();
+
+    let git_root = git_project.root();
+
+    project
+        .cargo("build")
+        .with_status(101)
+        .with_stderr(&format!(
+            "[UPDATING] git repository `{}`\n\
+             error: failed to load source for a dependency on `dep1`\n\
+             \n\
+             Caused by:\n  \
+             Unable to update {}\n\
+             \n\
+             Caused by:\n  \
+             failed to parse manifest at `[..]`\n\
+             \n\
+             Caused by:\n  \
+             could not parse input as TOML\n\
+             \n\
+             Caused by:\n  \
+             duplicate key: `categories` for key `project`",
+            path2url(&git_root),
+            path2url(&git_root),
+        )).run();
+}
+
+#[test]
+fn failed_submodule_checkout() {
+    let project = project();
+    let git_project = git::new("dep1", |project| {
+        project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+    }).unwrap();
+
+    let git_project2 = git::new("dep2", |project| project.file("lib.rs", "")).unwrap();
+
+    let listener = TcpListener::bind("127.0.0.1:0").unwrap();
+    let addr = listener.local_addr().unwrap();
+    let done = Arc::new(AtomicBool::new(false));
+    let done2 = done.clone();
+
+    let t = thread::spawn(move || {
+        while !done2.load(Ordering::SeqCst) {
+            if let Ok((mut socket, _)) = listener.accept() {
+                drop(socket.write_all(b"foo\r\n"));
+            }
+        }
+    });
+
+    let repo = git2::Repository::open(&git_project2.root()).unwrap();
+    let url = format!("http://{}:{}/", addr.ip(), addr.port());
+    {
+        let mut s = repo.submodule(&url, Path::new("bar"), false).unwrap();
+        let subrepo = s.open().unwrap();
+        let mut cfg = subrepo.config().unwrap();
+        cfg.set_str("user.email", "foo@bar.com").unwrap();
+        cfg.set_str("user.name", "Foo Bar").unwrap();
+        git::commit(&subrepo);
+        s.add_finalize().unwrap();
+    }
+    git::commit(&repo);
+    drop((repo, url));
+
+    let repo = git2::Repository::open(&git_project.root()).unwrap();
+    let url = path2url(git_project2.root()).to_string();
+    git::add_submodule(&repo, &url, Path::new("src"));
+    git::commit(&repo);
+    drop(repo);
+
+    let project = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            dep1 = {{ git = '{}' }}
+        "#,
+                git_project.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    project
+        .cargo("build")
+        .with_status(101)
+        .with_stderr_contains("  failed to update submodule `src`")
+        .with_stderr_contains("  failed to update submodule `bar`")
+        .run();
+    project
+        .cargo("build")
+        .with_status(101)
+        .with_stderr_contains("  failed to update submodule `src`")
+        .with_stderr_contains("  failed to update submodule `bar`")
+        .run();
+
+    done.store(true, Ordering::SeqCst);
+    drop(TcpStream::connect(&addr));
+    t.join().unwrap();
+}
+
+#[test]
+fn use_the_cli() {
+    if env::var("CARGO_TEST_DISABLE_GIT_CLI") == Ok("1".to_string()) {
+        // mingw git on Windows does not support Windows-style file URIs.
+        // Appveyor in the rust repo has that git up front in the PATH instead
+        // of Git-for-Windows, which causes this to fail.
+        return;
+    }
+    let project = project();
+    let git_project = git::new("dep1", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("dep1", "0.5.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+
+    let project = project
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+                    [project]
+                    name = "foo"
+                    version = "0.5.0"
+                    authors = []
+
+                    [dependencies]
+                    dep1 = {{ git = '{}' }}
+                "#,
+                git_project.url()
+            ),
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            "
+                [net]
+                git-fetch-with-cli = true
+            ",
+        ).build();
+
+    let stderr = "\
+[UPDATING] git repository `[..]`
+[RUNNING] `git fetch [..]`
+[COMPILING] dep1 [..]
+[RUNNING] `rustc [..]`
+[COMPILING] foo [..]
+[RUNNING] `rustc [..]`
+[FINISHED] [..]
+";
+
+    project.cargo("build -v").with_stderr(stderr).run();
+}
diff --git a/tests/testsuite/init.rs b/tests/testsuite/init.rs
new file mode 100644 (file)
index 0000000..3dcf655
--- /dev/null
@@ -0,0 +1,517 @@
+use std::env;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use support;
+
+use support::{paths, Execs};
+
+fn cargo_process(s: &str) -> Execs {
+    let mut execs = support::cargo_process(s);
+    execs.cwd(&paths::root()).env("HOME", &paths::home());
+    execs
+}
+
+#[test]
+fn simple_lib() {
+    cargo_process("init --lib --vcs none --edition 2015")
+        .env("USER", "foo")
+        .with_stderr("[CREATED] library package")
+        .run();
+
+    assert!(paths::root().join("Cargo.toml").is_file());
+    assert!(paths::root().join("src/lib.rs").is_file());
+    assert!(!paths::root().join(".gitignore").is_file());
+
+    cargo_process("build").run();
+}
+
+#[test]
+fn simple_bin() {
+    let path = paths::root().join("foo");
+    fs::create_dir(&path).unwrap();
+    cargo_process("init --bin --vcs none --edition 2015")
+        .env("USER", "foo")
+        .cwd(&path)
+        .with_stderr("[CREATED] binary (application) package")
+        .run();
+
+    assert!(paths::root().join("foo/Cargo.toml").is_file());
+    assert!(paths::root().join("foo/src/main.rs").is_file());
+
+    cargo_process("build").cwd(&path).run();
+    assert!(
+        paths::root()
+            .join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX))
+            .is_file()
+    );
+}
+
+#[test]
+fn both_lib_and_bin() {
+    cargo_process("init --lib --bin")
+        .env("USER", "foo")
+        .with_status(101)
+        .with_stderr("[ERROR] can't specify both lib and binary outputs")
+        .run();
+}
+
+fn bin_already_exists(explicit: bool, rellocation: &str) {
+    let path = paths::root().join("foo");
+    fs::create_dir_all(&path.join("src")).unwrap();
+
+    let sourcefile_path = path.join(rellocation);
+
+    let content = br#"
+        fn main() {
+            println!("Hello, world 2!");
+        }
+    "#;
+
+    File::create(&sourcefile_path)
+        .unwrap()
+        .write_all(content)
+        .unwrap();
+
+    if explicit {
+        cargo_process("init --bin --vcs none")
+            .env("USER", "foo")
+            .cwd(&path)
+            .run();
+    } else {
+        cargo_process("init --vcs none")
+            .env("USER", "foo")
+            .cwd(&path)
+            .run();
+    }
+
+    assert!(paths::root().join("foo/Cargo.toml").is_file());
+    assert!(!paths::root().join("foo/src/lib.rs").is_file());
+
+    // Check that our file is not overwritten
+    let mut new_content = Vec::new();
+    File::open(&sourcefile_path)
+        .unwrap()
+        .read_to_end(&mut new_content)
+        .unwrap();
+    assert_eq!(Vec::from(content as &[u8]), new_content);
+}
+
+#[test]
+fn bin_already_exists_explicit() {
+    bin_already_exists(true, "src/main.rs")
+}
+
+#[test]
+fn bin_already_exists_implicit() {
+    bin_already_exists(false, "src/main.rs")
+}
+
+#[test]
+fn bin_already_exists_explicit_nosrc() {
+    bin_already_exists(true, "main.rs")
+}
+
+#[test]
+fn bin_already_exists_implicit_nosrc() {
+    bin_already_exists(false, "main.rs")
+}
+
+#[test]
+fn bin_already_exists_implicit_namenosrc() {
+    bin_already_exists(false, "foo.rs")
+}
+
+#[test]
+fn bin_already_exists_implicit_namesrc() {
+    bin_already_exists(false, "src/foo.rs")
+}
+
+#[test]
+fn confused_by_multiple_lib_files() {
+    let path = paths::root().join("foo");
+    fs::create_dir_all(&path.join("src")).unwrap();
+
+    let sourcefile_path1 = path.join("src/lib.rs");
+
+    File::create(&sourcefile_path1)
+        .unwrap()
+        .write_all(br#"fn qqq () { println!("Hello, world 2!"); }"#)
+        .unwrap();
+
+    let sourcefile_path2 = path.join("lib.rs");
+
+    File::create(&sourcefile_path2)
+        .unwrap()
+        .write_all(br#" fn qqq () { println!("Hello, world 3!"); }"#)
+        .unwrap();
+
+    cargo_process("init --vcs none").env("USER", "foo").cwd(&path).with_status(101).with_stderr(
+            "[ERROR] cannot have a package with multiple libraries, found both `src/lib.rs` and `lib.rs`",
+        )
+        .run();
+
+    assert!(!paths::root().join("foo/Cargo.toml").is_file());
+}
+
+#[test]
+fn multibin_project_name_clash() {
+    let path = paths::root().join("foo");
+    fs::create_dir(&path).unwrap();
+
+    let sourcefile_path1 = path.join("foo.rs");
+
+    File::create(&sourcefile_path1)
+        .unwrap()
+        .write_all(br#"fn main () { println!("Hello, world 2!"); }"#)
+        .unwrap();
+
+    let sourcefile_path2 = path.join("main.rs");
+
+    File::create(&sourcefile_path2)
+        .unwrap()
+        .write_all(br#"fn main () { println!("Hello, world 3!"); }"#)
+        .unwrap();
+
+    cargo_process("init --lib --vcs none")
+        .env("USER", "foo")
+        .cwd(&path)
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] multiple possible binary sources found:
+  main.rs
+  foo.rs
+cannot automatically generate Cargo.toml as the main target would be ambiguous
+",
+        ).run();
+
+    assert!(!paths::root().join("foo/Cargo.toml").is_file());
+}
+
+fn lib_already_exists(rellocation: &str) {
+    let path = paths::root().join("foo");
+    fs::create_dir_all(&path.join("src")).unwrap();
+
+    let sourcefile_path = path.join(rellocation);
+
+    let content = br#"
+        pub fn qqq() {}
+    "#;
+
+    File::create(&sourcefile_path)
+        .unwrap()
+        .write_all(content)
+        .unwrap();
+
+    cargo_process("init --vcs none")
+        .env("USER", "foo")
+        .cwd(&path)
+        .run();
+
+    assert!(paths::root().join("foo/Cargo.toml").is_file());
+    assert!(!paths::root().join("foo/src/main.rs").is_file());
+
+    // Check that our file is not overwritten
+    let mut new_content = Vec::new();
+    File::open(&sourcefile_path)
+        .unwrap()
+        .read_to_end(&mut new_content)
+        .unwrap();
+    assert_eq!(Vec::from(content as &[u8]), new_content);
+}
+
+#[test]
+fn lib_already_exists_src() {
+    lib_already_exists("src/lib.rs");
+}
+
+#[test]
+fn lib_already_exists_nosrc() {
+    lib_already_exists("lib.rs");
+}
+
+#[test]
+fn simple_git() {
+    cargo_process("init --lib --vcs git")
+        .env("USER", "foo")
+        .run();
+
+    assert!(paths::root().join("Cargo.toml").is_file());
+    assert!(paths::root().join("src/lib.rs").is_file());
+    assert!(paths::root().join(".git").is_dir());
+    assert!(paths::root().join(".gitignore").is_file());
+}
+
+#[test]
+fn auto_git() {
+    cargo_process("init --lib").env("USER", "foo").run();
+
+    assert!(paths::root().join("Cargo.toml").is_file());
+    assert!(paths::root().join("src/lib.rs").is_file());
+    assert!(paths::root().join(".git").is_dir());
+    assert!(paths::root().join(".gitignore").is_file());
+}
+
+#[test]
+fn invalid_dir_name() {
+    let foo = &paths::root().join("foo.bar");
+    fs::create_dir_all(&foo).unwrap();
+    cargo_process("init")
+        .cwd(foo.clone())
+        .env("USER", "foo")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] Invalid character `.` in crate name: `foo.bar`
+use --name to override crate name
+",
+        ).run();
+
+    assert!(!foo.join("Cargo.toml").is_file());
+}
+
+#[test]
+fn reserved_name() {
+    let test = &paths::root().join("test");
+    fs::create_dir_all(&test).unwrap();
+    cargo_process("init")
+        .cwd(test.clone())
+        .env("USER", "foo")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] The name `test` cannot be used as a crate name\n\
+use --name to override crate name
+",
+        ).run();
+
+    assert!(!test.join("Cargo.toml").is_file());
+}
+
+#[test]
+fn git_autodetect() {
+    fs::create_dir(&paths::root().join(".git")).unwrap();
+
+    cargo_process("init --lib").env("USER", "foo").run();
+
+    assert!(paths::root().join("Cargo.toml").is_file());
+    assert!(paths::root().join("src/lib.rs").is_file());
+    assert!(paths::root().join(".git").is_dir());
+    assert!(paths::root().join(".gitignore").is_file());
+}
+
+#[test]
+fn mercurial_autodetect() {
+    fs::create_dir(&paths::root().join(".hg")).unwrap();
+
+    cargo_process("init --lib").env("USER", "foo").run();
+
+    assert!(paths::root().join("Cargo.toml").is_file());
+    assert!(paths::root().join("src/lib.rs").is_file());
+    assert!(!paths::root().join(".git").is_dir());
+    assert!(paths::root().join(".hgignore").is_file());
+}
+
+#[test]
+fn gitignore_appended_not_replaced() {
+    fs::create_dir(&paths::root().join(".git")).unwrap();
+
+    File::create(&paths::root().join(".gitignore"))
+        .unwrap()
+        .write_all(b"qqqqqq\n")
+        .unwrap();
+
+    cargo_process("init --lib").env("USER", "foo").run();
+
+    assert!(paths::root().join("Cargo.toml").is_file());
+    assert!(paths::root().join("src/lib.rs").is_file());
+    assert!(paths::root().join(".git").is_dir());
+    assert!(paths::root().join(".gitignore").is_file());
+
+    let mut contents = String::new();
+    File::open(&paths::root().join(".gitignore"))
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"qqqqqq"#));
+}
+
+#[test]
+fn gitignore_added_newline_in_existing() {
+    fs::create_dir(&paths::root().join(".git")).unwrap();
+
+    File::create(&paths::root().join(".gitignore"))
+        .unwrap()
+        .write_all(b"first")
+        .unwrap();
+
+    cargo_process("init --lib").env("USER", "foo").run();
+
+    assert!(paths::root().join(".gitignore").is_file());
+
+    let mut contents = String::new();
+    File::open(&paths::root().join(".gitignore"))
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.starts_with("first\n"));
+}
+
+#[test]
+fn gitignore_no_newline_in_new() {
+    fs::create_dir(&paths::root().join(".git")).unwrap();
+
+    cargo_process("init --lib").env("USER", "foo").run();
+
+    assert!(paths::root().join(".gitignore").is_file());
+
+    let mut contents = String::new();
+    File::open(&paths::root().join(".gitignore"))
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(!contents.starts_with('\n'));
+}
+
+#[test]
+fn mercurial_added_newline_in_existing() {
+    fs::create_dir(&paths::root().join(".hg")).unwrap();
+
+    File::create(&paths::root().join(".hgignore"))
+        .unwrap()
+        .write_all(b"first")
+        .unwrap();
+
+    cargo_process("init --lib").env("USER", "foo").run();
+
+    assert!(paths::root().join(".hgignore").is_file());
+
+    let mut contents = String::new();
+    File::open(&paths::root().join(".hgignore"))
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.starts_with("first\n"));
+}
+
+#[test]
+fn mercurial_no_newline_in_new() {
+    fs::create_dir(&paths::root().join(".hg")).unwrap();
+
+    cargo_process("init --lib").env("USER", "foo").run();
+
+    assert!(paths::root().join(".hgignore").is_file());
+
+    let mut contents = String::new();
+    File::open(&paths::root().join(".hgignore"))
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(!contents.starts_with('\n'));
+}
+
+#[test]
+fn cargo_lock_gitignored_if_lib1() {
+    fs::create_dir(&paths::root().join(".git")).unwrap();
+
+    cargo_process("init --lib --vcs git")
+        .env("USER", "foo")
+        .run();
+
+    assert!(paths::root().join(".gitignore").is_file());
+
+    let mut contents = String::new();
+    File::open(&paths::root().join(".gitignore"))
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"Cargo.lock"#));
+}
+
+#[test]
+fn cargo_lock_gitignored_if_lib2() {
+    fs::create_dir(&paths::root().join(".git")).unwrap();
+
+    File::create(&paths::root().join("lib.rs"))
+        .unwrap()
+        .write_all(br#""#)
+        .unwrap();
+
+    cargo_process("init --vcs git").env("USER", "foo").run();
+
+    assert!(paths::root().join(".gitignore").is_file());
+
+    let mut contents = String::new();
+    File::open(&paths::root().join(".gitignore"))
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"Cargo.lock"#));
+}
+
+#[test]
+fn cargo_lock_not_gitignored_if_bin1() {
+    fs::create_dir(&paths::root().join(".git")).unwrap();
+
+    cargo_process("init --vcs git --bin")
+        .env("USER", "foo")
+        .run();
+
+    assert!(paths::root().join(".gitignore").is_file());
+
+    let mut contents = String::new();
+    File::open(&paths::root().join(".gitignore"))
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(!contents.contains(r#"Cargo.lock"#));
+}
+
+#[test]
+fn cargo_lock_not_gitignored_if_bin2() {
+    fs::create_dir(&paths::root().join(".git")).unwrap();
+
+    File::create(&paths::root().join("main.rs"))
+        .unwrap()
+        .write_all(br#""#)
+        .unwrap();
+
+    cargo_process("init --vcs git").env("USER", "foo").run();
+
+    assert!(paths::root().join(".gitignore").is_file());
+
+    let mut contents = String::new();
+    File::open(&paths::root().join(".gitignore"))
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(!contents.contains(r#"Cargo.lock"#));
+}
+
+#[test]
+fn with_argument() {
+    cargo_process("init foo --vcs none")
+        .env("USER", "foo")
+        .run();
+    assert!(paths::root().join("foo/Cargo.toml").is_file());
+}
+
+#[test]
+fn unknown_flags() {
+    cargo_process("init foo --flag")
+        .with_status(1)
+        .with_stderr_contains(
+            "error: Found argument '--flag' which wasn't expected, or isn't valid in this context",
+        ).run();
+}
+
+#[cfg(not(windows))]
+#[test]
+fn no_filename() {
+    cargo_process("init /")
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] cannot auto-detect package name from path \"/\" ; use --name to override"
+                .to_string(),
+        ).run();
+}
diff --git a/tests/testsuite/install.rs b/tests/testsuite/install.rs
new file mode 100644 (file)
index 0000000..c11adbf
--- /dev/null
@@ -0,0 +1,1294 @@
+use std::fs::{self, File, OpenOptions};
+use std::io::prelude::*;
+use support;
+
+use git2;
+use support::cross_compile;
+use support::git;
+use support::install::{assert_has_installed_exe, assert_has_not_installed_exe, cargo_home};
+use support::paths;
+use support::registry::Package;
+use support::{basic_manifest, cargo_process, project};
+
+fn pkg(name: &str, vers: &str) {
+    Package::new(name, vers)
+        .file("src/lib.rs", "")
+        .file(
+            "src/main.rs",
+            &format!("extern crate {}; fn main() {{}}", name),
+        ).publish();
+}
+
+#[test]
+fn simple() {
+    pkg("foo", "0.0.1");
+
+    cargo_process("install foo")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.0.1 (registry [..])
+[INSTALLING] foo v0.0.1
+[COMPILING] foo v0.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+        ).run();
+    assert_has_installed_exe(cargo_home(), "foo");
+
+    cargo_process("uninstall foo")
+        .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]")
+        .run();
+    assert_has_not_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn multiple_pkgs() {
+    pkg("foo", "0.0.1");
+    pkg("bar", "0.0.2");
+
+    cargo_process("install foo bar baz")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.0.1 (registry `[CWD]/registry`)
+[INSTALLING] foo v0.0.1
+[COMPILING] foo v0.0.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.2 (registry `[CWD]/registry`)
+[INSTALLING] bar v0.0.2
+[COMPILING] bar v0.0.2
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/bar[EXE]
+error: could not find `baz` in registry `[..]`
+[SUMMARY] Successfully installed foo, bar! Failed to install baz (see error(s) above).
+warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
+error: some crates failed to install
+",
+        ).run();
+    assert_has_installed_exe(cargo_home(), "foo");
+    assert_has_installed_exe(cargo_home(), "bar");
+
+    cargo_process("uninstall foo bar")
+        .with_stderr(
+            "\
+[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]
+[REMOVING] [CWD]/home/.cargo/bin/bar[EXE]
+[SUMMARY] Successfully uninstalled foo, bar!
+",
+        ).run();
+
+    assert_has_not_installed_exe(cargo_home(), "foo");
+    assert_has_not_installed_exe(cargo_home(), "bar");
+}
+
+#[test]
+fn pick_max_version() {
+    pkg("foo", "0.1.0");
+    pkg("foo", "0.2.0");
+    pkg("foo", "0.2.1");
+    pkg("foo", "0.2.1-pre.1");
+    pkg("foo", "0.3.0-pre.2");
+
+    cargo_process("install foo")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.2.1 (registry [..])
+[INSTALLING] foo v0.2.1
+[COMPILING] foo v0.2.1
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+        ).run();
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn installs_beta_version_by_explicit_name_from_git() {
+    let p = git::repo(&paths::root().join("foo"))
+        .file("Cargo.toml", &basic_manifest("foo", "0.3.0-beta.1"))
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --git")
+        .arg(p.url().to_string())
+        .arg("foo")
+        .run();
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn missing() {
+    pkg("foo", "0.0.1");
+    cargo_process("install bar")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+[ERROR] could not find `bar` in registry `[..]`
+",
+        ).run();
+}
+
+#[test]
+fn bad_version() {
+    pkg("foo", "0.0.1");
+    cargo_process("install foo --vers=0.2.0")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+[ERROR] could not find `foo` in registry `[..]` with version `=0.2.0`
+",
+        ).run();
+}
+
+#[test]
+fn no_crate() {
+    cargo_process("install")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] `[..]` is not a crate root; specify a crate to install [..]
+
+Caused by:
+  failed to read `[..]Cargo.toml`
+
+Caused by:
+  [..] (os error [..])
+",
+        ).run();
+}
+
+#[test]
+fn install_location_precedence() {
+    pkg("foo", "0.0.1");
+
+    let root = paths::root();
+    let t1 = root.join("t1");
+    let t2 = root.join("t2");
+    let t3 = root.join("t3");
+    let t4 = cargo_home();
+
+    fs::create_dir(root.join(".cargo")).unwrap();
+    File::create(root.join(".cargo/config"))
+        .unwrap()
+        .write_all(
+            format!(
+                "\
+        [install]
+        root = '{}'
+    ",
+                t3.display()
+            ).as_bytes(),
+        ).unwrap();
+
+    println!("install --root");
+
+    cargo_process("install foo --root")
+        .arg(&t1)
+        .env("CARGO_INSTALL_ROOT", &t2)
+        .run();
+    assert_has_installed_exe(&t1, "foo");
+    assert_has_not_installed_exe(&t2, "foo");
+
+    println!("install CARGO_INSTALL_ROOT");
+
+    cargo_process("install foo")
+        .env("CARGO_INSTALL_ROOT", &t2)
+        .run();
+    assert_has_installed_exe(&t2, "foo");
+    assert_has_not_installed_exe(&t3, "foo");
+
+    println!("install install.root");
+
+    cargo_process("install foo").run();
+    assert_has_installed_exe(&t3, "foo");
+    assert_has_not_installed_exe(&t4, "foo");
+
+    fs::remove_file(root.join(".cargo/config")).unwrap();
+
+    println!("install cargo home");
+
+    cargo_process("install foo").run();
+    assert_has_installed_exe(&t4, "foo");
+}
+
+#[test]
+fn install_path() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    cargo_process("install --path").arg(p.root()).run();
+    assert_has_installed_exe(cargo_home(), "foo");
+    p.cargo("install --path .")
+        .with_status(101)
+        .with_stderr(
+            "\
+[INSTALLING] foo v0.0.1 [..]
+[ERROR] binary `foo[..]` already exists in destination as part of `foo v0.0.1 [..]`
+Add --force to overwrite
+",
+        ).run();
+}
+
+#[test]
+fn multiple_crates_error() {
+    let p = git::repo(&paths::root().join("foo"))
+        .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("src/main.rs", "fn main() {}")
+        .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("a/src/main.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --git")
+        .arg(p.url().to_string())
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] git repository [..]
+[ERROR] multiple packages with binaries found: bar, foo
+",
+        ).run();
+}
+
+#[test]
+fn multiple_crates_select() {
+    let p = git::repo(&paths::root().join("foo"))
+        .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("src/main.rs", "fn main() {}")
+        .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("a/src/main.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --git")
+        .arg(p.url().to_string())
+        .arg("foo")
+        .run();
+    assert_has_installed_exe(cargo_home(), "foo");
+    assert_has_not_installed_exe(cargo_home(), "bar");
+
+    cargo_process("install --git")
+        .arg(p.url().to_string())
+        .arg("bar")
+        .run();
+    assert_has_installed_exe(cargo_home(), "bar");
+}
+
+#[test]
+fn multiple_crates_git_all() {
+    let p = git::repo(&paths::root().join("foo"))
+        .file("Cargo.toml", r#"\
+[workspace]
+members = ["bin1", "bin2"]
+"#)
+        .file("bin1/Cargo.toml", &basic_manifest("bin1", "0.1.0"))
+        .file("bin2/Cargo.toml", &basic_manifest("bin2", "0.1.0"))
+        .file("bin1/src/main.rs", r#"fn main() { println!("Hello, world!"); }"#)
+        .file("bin2/src/main.rs", r#"fn main() { println!("Hello, world!"); }"#)
+        .build();
+
+    cargo_process(&format!("install --git {} bin1 bin2", p.url().to_string())).run();
+}
+
+#[test]
+fn multiple_crates_auto_binaries() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "a" }
+        "#,
+        ).file("src/main.rs", "extern crate bar; fn main() {}")
+        .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    cargo_process("install --path").arg(p.root()).run();
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn multiple_crates_auto_examples() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "a" }
+        "#,
+        ).file("src/lib.rs", "extern crate bar;")
+        .file(
+            "examples/foo.rs",
+            "
+            extern crate bar;
+            extern crate foo;
+            fn main() {}
+        ",
+        ).file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    cargo_process("install --path")
+        .arg(p.root())
+        .arg("--example=foo")
+        .run();
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn no_binaries_or_examples() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "a" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    cargo_process("install --path")
+        .arg(p.root())
+        .with_status(101)
+        .with_stderr("[ERROR] no packages found with binaries or examples")
+        .run();
+}
+
+#[test]
+fn no_binaries() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("examples/foo.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --path")
+        .arg(p.root())
+        .arg("foo")
+        .with_status(101)
+        .with_stderr(
+            "\
+[INSTALLING] foo [..]
+[ERROR] specified package has no binaries
+",
+        ).run();
+}
+
+#[test]
+fn examples() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("examples/foo.rs", "extern crate foo; fn main() {}")
+        .build();
+
+    cargo_process("install --path")
+        .arg(p.root())
+        .arg("--example=foo")
+        .run();
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn install_twice() {
+    let p = project()
+        .file("src/bin/foo-bin1.rs", "fn main() {}")
+        .file("src/bin/foo-bin2.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --path").arg(p.root()).run();
+    cargo_process("install --path")
+        .arg(p.root())
+        .with_status(101)
+        .with_stderr(
+            "\
+[INSTALLING] foo v0.0.1 [..]
+[ERROR] binary `foo-bin1[..]` already exists in destination as part of `foo v0.0.1 ([..])`
+binary `foo-bin2[..]` already exists in destination as part of `foo v0.0.1 ([..])`
+Add --force to overwrite
+",
+        ).run();
+}
+
+#[test]
+fn install_force() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    cargo_process("install --path").arg(p.root()).run();
+
+    let p = project()
+        .at("foo2")
+        .file("Cargo.toml", &basic_manifest("foo", "0.2.0"))
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --force --path")
+        .arg(p.root())
+        .with_stderr(
+            "\
+[INSTALLING] foo v0.2.0 ([..])
+[COMPILING] foo v0.2.0 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[REPLACING] [CWD]/home/.cargo/bin/foo[EXE]
+warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+        ).run();
+
+    cargo_process("install --list")
+        .with_stdout(
+            "\
+foo v0.2.0 ([..]):
+    foo[..]
+",
+        ).run();
+}
+
+#[test]
+fn install_force_partial_overlap() {
+    let p = project()
+        .file("src/bin/foo-bin1.rs", "fn main() {}")
+        .file("src/bin/foo-bin2.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --path").arg(p.root()).run();
+
+    let p = project()
+        .at("foo2")
+        .file("Cargo.toml", &basic_manifest("foo", "0.2.0"))
+        .file("src/bin/foo-bin2.rs", "fn main() {}")
+        .file("src/bin/foo-bin3.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --force --path")
+        .arg(p.root())
+        .with_stderr(
+            "\
+[INSTALLING] foo v0.2.0 ([..])
+[COMPILING] foo v0.2.0 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo-bin3[EXE]
+[REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE]
+warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+        ).run();
+
+    cargo_process("install --list")
+        .with_stdout(
+            "\
+foo v0.0.1 ([..]):
+    foo-bin1[..]
+foo v0.2.0 ([..]):
+    foo-bin2[..]
+    foo-bin3[..]
+",
+        ).run();
+}
+
+#[test]
+fn install_force_bin() {
+    let p = project()
+        .file("src/bin/foo-bin1.rs", "fn main() {}")
+        .file("src/bin/foo-bin2.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --path").arg(p.root()).run();
+
+    let p = project()
+        .at("foo2")
+        .file("Cargo.toml", &basic_manifest("foo", "0.2.0"))
+        .file("src/bin/foo-bin1.rs", "fn main() {}")
+        .file("src/bin/foo-bin2.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --force --bin foo-bin2 --path")
+        .arg(p.root())
+        .with_stderr(
+            "\
+[INSTALLING] foo v0.2.0 ([..])
+[COMPILING] foo v0.2.0 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE]
+warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+        ).run();
+
+    cargo_process("install --list")
+        .with_stdout(
+            "\
+foo v0.0.1 ([..]):
+    foo-bin1[..]
+foo v0.2.0 ([..]):
+    foo-bin2[..]
+",
+        ).run();
+}
+
+#[test]
+fn compile_failure() {
+    let p = project().file("src/main.rs", "").build();
+
+    cargo_process("install --path")
+        .arg(p.root())
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be \
+    found at `[..]target`
+
+Caused by:
+  Could not compile `foo`.
+
+To learn more, run the command again with --verbose.
+",
+        ).run();
+}
+
+#[test]
+fn git_repo() {
+    let p = git::repo(&paths::root().join("foo"))
+        .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    // use `--locked` to test that we don't even try to write a lockfile
+    cargo_process("install --locked --git")
+        .arg(p.url().to_string())
+        .with_stderr(
+            "\
+[UPDATING] git repository `[..]`
+[INSTALLING] foo v0.1.0 ([..])
+[COMPILING] foo v0.1.0 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
+warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+        ).run();
+    assert_has_installed_exe(cargo_home(), "foo");
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn list() {
+    pkg("foo", "0.0.1");
+    pkg("bar", "0.2.1");
+    pkg("bar", "0.2.2");
+
+    cargo_process("install --list").with_stdout("").run();
+
+    cargo_process("install bar --vers =0.2.1").run();
+    cargo_process("install foo").run();
+    cargo_process("install --list")
+        .with_stdout(
+            "\
+bar v0.2.1:
+    bar[..]
+foo v0.0.1:
+    foo[..]
+",
+        ).run();
+}
+
+#[test]
+fn list_error() {
+    pkg("foo", "0.0.1");
+    cargo_process("install foo").run();
+    cargo_process("install --list")
+        .with_stdout(
+            "\
+foo v0.0.1:
+    foo[..]
+",
+        ).run();
+    let mut worldfile_path = cargo_home();
+    worldfile_path.push(".crates.toml");
+    let mut worldfile = OpenOptions::new()
+        .write(true)
+        .open(worldfile_path)
+        .expect(".crates.toml should be there");
+    worldfile.write_all(b"\x00").unwrap();
+    drop(worldfile);
+    cargo_process("install --list --verbose")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse crate metadata at `[..]`
+
+Caused by:
+  invalid TOML found for metadata
+
+Caused by:
+  unexpected character[..]
+",
+        ).run();
+}
+
+#[test]
+fn uninstall_pkg_does_not_exist() {
+    cargo_process("uninstall foo")
+        .with_status(101)
+        .with_stderr("[ERROR] package id specification `foo` matched no packages")
+        .run();
+}
+
+#[test]
+fn uninstall_bin_does_not_exist() {
+    pkg("foo", "0.0.1");
+
+    cargo_process("install foo").run();
+    cargo_process("uninstall foo --bin=bar")
+        .with_status(101)
+        .with_stderr("[ERROR] binary `bar[..]` not installed as part of `foo v0.0.1`")
+        .run();
+}
+
+#[test]
+fn uninstall_piecemeal() {
+    let p = project()
+        .file("src/bin/foo.rs", "fn main() {}")
+        .file("src/bin/bar.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --path").arg(p.root()).run();
+    assert_has_installed_exe(cargo_home(), "foo");
+    assert_has_installed_exe(cargo_home(), "bar");
+
+    cargo_process("uninstall foo --bin=bar")
+        .with_stderr("[REMOVING] [..]bar[..]")
+        .run();
+
+    assert_has_installed_exe(cargo_home(), "foo");
+    assert_has_not_installed_exe(cargo_home(), "bar");
+
+    cargo_process("uninstall foo --bin=foo")
+        .with_stderr("[REMOVING] [..]foo[..]")
+        .run();
+    assert_has_not_installed_exe(cargo_home(), "foo");
+
+    cargo_process("uninstall foo")
+        .with_status(101)
+        .with_stderr("[ERROR] package id specification `foo` matched no packages")
+        .run();
+}
+
+#[test]
+fn subcommand_works_out_of_the_box() {
+    Package::new("cargo-foo", "1.0.0")
+        .file("src/main.rs", r#"fn main() { println!("bar"); }"#)
+        .publish();
+    cargo_process("install cargo-foo").run();
+    cargo_process("foo").with_stdout("bar\n").run();
+    cargo_process("--list")
+        .with_stdout_contains("    foo\n")
+        .run();
+}
+
+#[test]
+fn installs_from_cwd_by_default() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    p.cargo("install")
+        .with_stderr_contains(
+            "warning: Using `cargo install` to install the binaries for the \
+             package in current working directory is deprecated, \
+             use `cargo install --path .` instead. \
+             Use `cargo build` if you want to simply build the package.",
+        ).run();
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn installs_from_cwd_with_2018_warnings() {
+    if !support::is_nightly() {
+        // Stable rust won't have the edition option.  Remove this once it
+        // is stabilized.
+        return;
+    }
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["edition"]
+
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            edition = "2018"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("install")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains(
+            "error: Using `cargo install` to install the binaries for the \
+             package in current working directory is no longer supported, \
+             use `cargo install --path .` instead. \
+             Use `cargo build` if you want to simply build the package.",
+        ).run();
+    assert_has_not_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn uninstall_cwd() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+    p.cargo("install --path .")
+        .with_stderr(&format!(
+            "\
+[INSTALLING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] {home}/bin/foo[EXE]
+warning: be sure to add `{home}/bin` to your PATH to be able to run the installed binaries",
+            home = cargo_home().display(),
+        )).run();
+    assert_has_installed_exe(cargo_home(), "foo");
+
+    p.cargo("uninstall")
+        .with_stdout("")
+        .with_stderr(&format!(
+            "\
+             [REMOVING] {home}/bin/foo[EXE]",
+            home = cargo_home().display()
+        )).run();
+    assert_has_not_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn uninstall_cwd_not_installed() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+    p.cargo("uninstall")
+        .with_status(101)
+        .with_stdout("")
+        .with_stderr(
+            "\
+             error: package `foo v0.0.1 ([CWD])` is not installed",
+        ).run();
+}
+
+#[test]
+fn uninstall_cwd_no_project() {
+    let err_msg = if cfg!(windows) {
+        "The system cannot find the file specified."
+    } else {
+        "No such file or directory"
+    };
+    cargo_process("uninstall")
+        .with_status(101)
+        .with_stdout("")
+        .with_stderr(format!(
+            "\
+[ERROR] failed to read `[CWD]/Cargo.toml`
+
+Caused by:
+  {err_msg} (os error 2)",
+            err_msg = err_msg,
+        )).run();
+}
+
+#[test]
+fn do_not_rebuilds_on_local_install() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    p.cargo("build --release").run();
+    cargo_process("install --path")
+        .arg(p.root())
+        .with_stderr(
+            "[INSTALLING] [..]
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]
+warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+        ).run();
+
+    assert!(p.build_dir().exists());
+    assert!(p.release_bin("foo").exists());
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn reports_unsuccessful_subcommand_result() {
+    Package::new("cargo-fail", "1.0.0")
+        .file("src/main.rs", "fn main() { panic!(); }")
+        .publish();
+    cargo_process("install cargo-fail").run();
+    cargo_process("--list")
+        .with_stdout_contains("    fail\n")
+        .run();
+    cargo_process("fail")
+        .with_status(101)
+        .with_stderr_contains("thread '[..]' panicked at 'explicit panic', [..]")
+        .run();
+}
+
+#[test]
+fn git_with_lockfile() {
+    let p = git::repo(&paths::root().join("foo"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "fn main() {}")
+        .file(
+            "Cargo.lock",
+            r#"
+            [[package]]
+            name = "foo"
+            version = "0.1.0"
+            dependencies = [ "bar 0.1.0" ]
+
+            [[package]]
+            name = "bar"
+            version = "0.1.0"
+        "#,
+        ).build();
+
+    cargo_process("install --git")
+        .arg(p.url().to_string())
+        .run();
+}
+
+#[test]
+fn q_silences_warnings() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+
+    cargo_process("install -q --path")
+        .arg(p.root())
+        .with_stderr("")
+        .run();
+}
+
+#[test]
+fn readonly_dir() {
+    pkg("foo", "0.0.1");
+
+    let root = paths::root();
+    let dir = &root.join("readonly");
+    fs::create_dir(root.join("readonly")).unwrap();
+    let mut perms = fs::metadata(dir).unwrap().permissions();
+    perms.set_readonly(true);
+    fs::set_permissions(dir, perms).unwrap();
+
+    cargo_process("install foo").cwd(dir).run();
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn use_path_workspace() {
+    Package::new("foo", "1.0.0").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["baz"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "baz/Cargo.toml",
+            r#"
+            [package]
+            name = "baz"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foo = "1"
+        "#,
+        ).file("baz/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    let lock = p.read_lockfile();
+    p.cargo("install").run();
+    let lock2 = p.read_lockfile();
+    assert_eq!(lock, lock2, "different lockfiles");
+}
+
+#[test]
+fn dev_dependencies_no_check() {
+    Package::new("foo", "1.0.0").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dev-dependencies]
+            baz = "1.0.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").with_status(101).run();
+    p.cargo("install").run();
+}
+
+#[test]
+fn dev_dependencies_lock_file_untouched() {
+    Package::new("foo", "1.0.0").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dev-dependencies]
+            bar = { path = "a" }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    let lock = p.read_lockfile();
+    p.cargo("install").run();
+    let lock2 = p.read_lockfile();
+    assert!(lock == lock2, "different lockfiles");
+}
+
+#[test]
+fn install_target_native() {
+    pkg("foo", "0.1.0");
+
+    cargo_process("install foo --target")
+        .arg(support::rustc_host())
+        .run();
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn install_target_foreign() {
+    if cross_compile::disabled() {
+        return;
+    }
+
+    pkg("foo", "0.1.0");
+
+    cargo_process("install foo --target")
+        .arg(cross_compile::alternate())
+        .run();
+    assert_has_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn vers_precise() {
+    pkg("foo", "0.1.1");
+    pkg("foo", "0.1.2");
+
+    cargo_process("install foo --vers 0.1.1")
+        .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])")
+        .run();
+}
+
+#[test]
+fn version_too() {
+    pkg("foo", "0.1.1");
+    pkg("foo", "0.1.2");
+
+    cargo_process("install foo --version 0.1.1")
+        .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])")
+        .run();
+}
+
+#[test]
+fn not_both_vers_and_version() {
+    pkg("foo", "0.1.1");
+    pkg("foo", "0.1.2");
+
+    cargo_process("install foo --version 0.1.1 --vers 0.1.2")
+        .with_status(1)
+        .with_stderr_contains(
+            "\
+error: The argument '--version <VERSION>' was provided more than once, \
+but cannot be used multiple times
+",
+        ).run();
+}
+
+#[test]
+fn legacy_version_requirement() {
+    pkg("foo", "0.1.1");
+
+    cargo_process("install foo --vers 0.1")
+        .with_stderr_contains(
+            "\
+warning: the `--vers` provided, `0.1`, is not a valid semver version
+
+historically Cargo treated this as a semver version requirement accidentally
+and will continue to do so, but this behavior will be removed eventually
+",
+        ).run();
+}
+
+#[test]
+fn test_install_git_cannot_be_a_base_url() {
+    cargo_process("install --git github.com:rust-lang-nursery/rustfmt.git").with_status(101).with_stderr("error: invalid url `github.com:rust-lang-nursery/rustfmt.git`: cannot-be-a-base-URLs are not supported").run();
+}
+
+#[test]
+fn uninstall_multiple_and_specifying_bin() {
+    cargo_process("uninstall foo bar --bin baz").with_status(101).with_stderr("error: A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.").run();
+}
+
+#[test]
+fn uninstall_multiple_and_some_pkg_does_not_exist() {
+    pkg("foo", "0.0.1");
+
+    cargo_process("install foo").run();
+
+    cargo_process("uninstall foo bar")
+        .with_status(101)
+        .with_stderr(
+            "\
+[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]
+error: package id specification `bar` matched no packages
+[SUMMARY] Successfully uninstalled foo! Failed to uninstall bar (see error(s) above).
+error: some packages failed to uninstall
+",
+        ).run();
+
+    assert_has_not_installed_exe(cargo_home(), "foo");
+    assert_has_not_installed_exe(cargo_home(), "bar");
+}
+
+#[test]
+fn custom_target_dir_for_git_source() {
+    let p = git::repo(&paths::root().join("foo"))
+        .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    cargo_process("install --git")
+        .arg(p.url().to_string())
+        .run();
+    assert!(!paths::root().join("target/release").is_dir());
+
+    cargo_process("install --force --git")
+        .arg(p.url().to_string())
+        .env("CARGO_TARGET_DIR", "target")
+        .run();
+    assert!(paths::root().join("target/release").is_dir());
+}
+
+#[test]
+fn install_respects_lock_file() {
+    Package::new("bar", "0.1.0").publish();
+    Package::new("bar", "0.1.1")
+        .file("src/lib.rs", "not rust")
+        .publish();
+    Package::new("foo", "0.1.0")
+        .dep("bar", "0.1")
+        .file("src/lib.rs", "")
+        .file(
+            "src/main.rs",
+            "extern crate foo; extern crate bar; fn main() {}",
+        ).file(
+            "Cargo.lock",
+            r#"
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+"#,
+        ).publish();
+
+    cargo_process("install foo").run();
+}
+
+#[test]
+fn lock_file_path_deps_ok() {
+    Package::new("bar", "0.1.0").publish();
+
+    Package::new("foo", "0.1.0")
+        .dep("bar", "0.1")
+        .file("src/lib.rs", "")
+        .file(
+            "src/main.rs",
+            "extern crate foo; extern crate bar; fn main() {}",
+        ).file(
+            "Cargo.lock",
+            r#"
+[[package]]
+name = "bar"
+version = "0.1.0"
+
+[[package]]
+name = "foo"
+version = "0.1.0"
+dependencies = [
+ "bar 0.1.0",
+]
+"#,
+        ).publish();
+
+    cargo_process("install foo").run();
+}
+
+#[test]
+fn install_empty_argument() {
+    // Bug 5229
+    cargo_process("install")
+        .arg("")
+        .with_status(1)
+        .with_stderr_contains(
+            "[ERROR] The argument '<crate>...' requires a value but none was supplied",
+        ).run();
+}
+
+#[test]
+fn git_repo_replace() {
+    let p = git::repo(&paths::root().join("foo"))
+        .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("src/main.rs", "fn main() {}")
+        .build();
+    let repo = git2::Repository::open(&p.root()).unwrap();
+    let old_rev = repo.revparse_single("HEAD").unwrap().id();
+    cargo_process("install --git")
+        .arg(p.url().to_string())
+        .run();
+    git::commit(&repo);
+    let new_rev = repo.revparse_single("HEAD").unwrap().id();
+    let mut path = paths::home();
+    path.push(".cargo/.crates.toml");
+
+    assert_ne!(old_rev, new_rev);
+    assert!(
+        fs::read_to_string(path.clone())
+            .unwrap()
+            .contains(&format!("{}", old_rev))
+    );
+    cargo_process("install --force --git")
+        .arg(p.url().to_string())
+        .run();
+    assert!(
+        fs::read_to_string(path)
+            .unwrap()
+            .contains(&format!("{}", new_rev))
+    );
+}
+
+#[test]
+fn workspace_uses_workspace_target_dir() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.1.0"
+                authors = []
+
+                [workspace]
+
+                [dependencies]
+                bar = { path = 'bar' }
+            "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --release").cwd(p.root().join("bar")).run();
+    cargo_process("install --path")
+        .arg(p.root().join("bar"))
+        .with_stderr(
+            "[INSTALLING] [..]
+[FINISHED] release [optimized] target(s) in [..]
+[INSTALLING] [..]
+warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
+",
+        ).run();
+}
+
+#[test]
+fn install_ignores_local_cargo_config() {
+    pkg("bar", "0.0.1");
+
+    let p = project()
+        .file(
+            ".cargo/config",
+            r#"
+                [build]
+                target = "non-existing-target"
+            "#,
+        )
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("install bar").run();
+    assert_has_installed_exe(cargo_home(), "bar");
+}
+
+#[test]
+fn install_global_cargo_config() {
+    pkg("bar", "0.0.1");
+
+    let config = cargo_home().join("config");
+    let mut toml = fs::read_to_string(&config).unwrap_or(String::new());
+
+    toml.push_str(r#"
+        [build]
+        target = 'nonexistent'
+    "#);
+    fs::write(&config, toml).unwrap();
+
+    cargo_process("install bar")
+        .with_status(101)
+        .with_stderr_contains("[..]--target nonexistent[..]")
+        .run();
+}
diff --git a/tests/testsuite/jobserver.rs b/tests/testsuite/jobserver.rs
new file mode 100644 (file)
index 0000000..d2d0333
--- /dev/null
@@ -0,0 +1,192 @@
+use std::net::TcpListener;
+use std::process::Command;
+use std::thread;
+
+use support::{cargo_exe, project};
+
+#[test]
+fn jobserver_exists() {
+    let p = project()
+        .file(
+            "build.rs",
+            r#"
+            use std::env;
+
+            fn main() {
+                let var = env::var("CARGO_MAKEFLAGS").unwrap();
+                let arg = var.split(' ')
+                             .find(|p| p.starts_with("--jobserver"))
+                             .unwrap();
+                let val = &arg[arg.find('=').unwrap() + 1..];
+                validate(val);
+            }
+
+            #[cfg(unix)]
+            fn validate(s: &str) {
+                use std::fs::File;
+                use std::io::*;
+                use std::os::unix::prelude::*;
+
+                let fds = s.split(',').collect::<Vec<_>>();
+                println!("{}", s);
+                assert_eq!(fds.len(), 2);
+                unsafe {
+                    let mut read = File::from_raw_fd(fds[0].parse().unwrap());
+                    let mut write = File::from_raw_fd(fds[1].parse().unwrap());
+
+                    let mut buf = [0];
+                    assert_eq!(read.read(&mut buf).unwrap(), 1);
+                    assert_eq!(write.write(&buf).unwrap(), 1);
+                }
+            }
+
+            #[cfg(windows)]
+            fn validate(_: &str) {
+                // a little too complicated for a test...
+            }
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn makes_jobserver_used() {
+    let make = if cfg!(windows) {
+        "mingw32-make"
+    } else {
+        "make"
+    };
+    if Command::new(make).arg("--version").output().is_err() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            d1 = { path = "d1" }
+            d2 = { path = "d2" }
+            d3 = { path = "d3" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "d1/Cargo.toml",
+            r#"
+            [package]
+            name = "d1"
+            version = "0.0.1"
+            authors = []
+            build = "../dbuild.rs"
+        "#,
+        ).file("d1/src/lib.rs", "")
+        .file(
+            "d2/Cargo.toml",
+            r#"
+            [package]
+            name = "d2"
+            version = "0.0.1"
+            authors = []
+            build = "../dbuild.rs"
+        "#,
+        ).file("d2/src/lib.rs", "")
+        .file(
+            "d3/Cargo.toml",
+            r#"
+            [package]
+            name = "d3"
+            version = "0.0.1"
+            authors = []
+            build = "../dbuild.rs"
+        "#,
+        ).file("d3/src/lib.rs", "")
+        .file(
+            "dbuild.rs",
+            r#"
+            use std::net::TcpStream;
+            use std::env;
+            use std::io::Read;
+
+            fn main() {
+                let addr = env::var("ADDR").unwrap();
+                let mut stream = TcpStream::connect(addr).unwrap();
+                let mut v = Vec::new();
+                stream.read_to_end(&mut v).unwrap();
+            }
+        "#,
+        ).file(
+            "Makefile",
+            "\
+all:
+\t+$(CARGO) build
+",
+        ).build();
+
+    let l = TcpListener::bind("127.0.0.1:0").unwrap();
+    let addr = l.local_addr().unwrap();
+
+    let child = thread::spawn(move || {
+        let a1 = l.accept().unwrap();
+        let a2 = l.accept().unwrap();
+        l.set_nonblocking(true).unwrap();
+
+        for _ in 0..1000 {
+            assert!(l.accept().is_err());
+            thread::yield_now();
+        }
+
+        drop(a1);
+        l.set_nonblocking(false).unwrap();
+        let a3 = l.accept().unwrap();
+
+        drop((a2, a3));
+    });
+
+    p.process(make)
+        .env("CARGO", cargo_exe())
+        .env("ADDR", addr.to_string())
+        .arg("-j2")
+        .run();
+    child.join().unwrap();
+}
+
+#[test]
+fn jobserver_and_j() {
+    let make = if cfg!(windows) {
+        "mingw32-make"
+    } else {
+        "make"
+    };
+    if Command::new(make).arg("--version").output().is_err() {
+        return;
+    }
+
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            "Makefile",
+            "\
+all:
+\t+$(CARGO) build -j2
+",
+        ).build();
+
+    p.process(make)
+        .env("CARGO", cargo_exe())
+        .arg("-j2")
+        .with_stderr(
+            "\
+warning: a `-j` argument was passed to Cargo but Cargo is also configured \
+with an external jobserver in its environment, ignoring the `-j` parameter
+[COMPILING] [..]
+[FINISHED] [..]
+",
+        ).run();
+}
diff --git a/tests/testsuite/local_registry.rs b/tests/testsuite/local_registry.rs
new file mode 100644 (file)
index 0000000..08076b7
--- /dev/null
@@ -0,0 +1,422 @@
+use std::fs::{self, File};
+use std::io::prelude::*;
+
+use support::paths::{self, CargoPathExt};
+use support::registry::Package;
+use support::{basic_manifest, project};
+
+fn setup() {
+    let root = paths::root();
+    t!(fs::create_dir(&root.join(".cargo")));
+    t!(t!(File::create(root.join(".cargo/config"))).write_all(
+        br#"
+        [source.crates-io]
+        registry = 'https://wut'
+        replace-with = 'my-awesome-local-registry'
+
+        [source.my-awesome-local-registry]
+        local-registry = 'registry'
+    "#
+    ));
+}
+
+#[test]
+fn simple() {
+    setup();
+    Package::new("bar", "0.0.1")
+        .local(true)
+        .file("src/lib.rs", "pub fn bar() {}")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.0.1"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UNPACKING] bar v0.0.1 ([..])
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+        ).run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+    p.cargo("test").run();
+}
+
+#[test]
+fn multiple_versions() {
+    setup();
+    Package::new("bar", "0.0.1").local(true).publish();
+    Package::new("bar", "0.1.0")
+        .local(true)
+        .file("src/lib.rs", "pub fn bar() {}")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UNPACKING] bar v0.1.0 ([..])
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+        ).run();
+
+    Package::new("bar", "0.2.0")
+        .local(true)
+        .file("src/lib.rs", "pub fn bar() {}")
+        .publish();
+
+    p.cargo("update -v")
+        .with_stderr("[UPDATING] bar v0.1.0 -> v0.2.0")
+        .run();
+}
+
+#[test]
+fn multiple_names() {
+    setup();
+    Package::new("bar", "0.0.1")
+        .local(true)
+        .file("src/lib.rs", "pub fn bar() {}")
+        .publish();
+    Package::new("baz", "0.1.0")
+        .local(true)
+        .file("src/lib.rs", "pub fn baz() {}")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+            baz = "*"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate bar;
+            extern crate baz;
+            pub fn foo() {
+                bar::bar();
+                baz::baz();
+            }
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UNPACKING] [..]
+[UNPACKING] [..]
+[COMPILING] [..]
+[COMPILING] [..]
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn interdependent() {
+    setup();
+    Package::new("bar", "0.0.1")
+        .local(true)
+        .file("src/lib.rs", "pub fn bar() {}")
+        .publish();
+    Package::new("baz", "0.1.0")
+        .local(true)
+        .dep("bar", "*")
+        .file("src/lib.rs", "extern crate bar; pub fn baz() {}")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+            baz = "*"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate bar;
+            extern crate baz;
+            pub fn foo() {
+                bar::bar();
+                baz::baz();
+            }
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UNPACKING] [..]
+[UNPACKING] [..]
+[COMPILING] bar v0.0.1
+[COMPILING] baz v0.1.0
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn path_dep_rewritten() {
+    setup();
+    Package::new("bar", "0.0.1")
+        .local(true)
+        .file("src/lib.rs", "pub fn bar() {}")
+        .publish();
+    Package::new("baz", "0.1.0")
+        .local(true)
+        .dep("bar", "*")
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "baz"
+                version = "0.1.0"
+                authors = []
+
+                [dependencies]
+                bar = { path = "bar", version = "*" }
+            "#,
+        ).file("src/lib.rs", "extern crate bar; pub fn baz() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+            baz = "*"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate bar;
+            extern crate baz;
+            pub fn foo() {
+                bar::bar();
+                baz::baz();
+            }
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UNPACKING] [..]
+[UNPACKING] [..]
+[COMPILING] bar v0.0.1
+[COMPILING] baz v0.1.0
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn invalid_dir_bad() {
+    setup();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [source.crates-io]
+            registry = 'https://wut'
+            replace-with = 'my-awesome-local-directory'
+
+            [source.my-awesome-local-directory]
+            local-registry = '/path/to/nowhere'
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to load source for a dependency on `bar`
+
+Caused by:
+  Unable to update registry `https://[..]`
+
+Caused by:
+  failed to update replaced source registry `https://[..]`
+
+Caused by:
+  local registry path is not a directory: [..]path[..]to[..]nowhere
+",
+        ).run();
+}
+
+#[test]
+fn different_directory_replacing_the_registry_is_bad() {
+    setup();
+
+    // Move our test's .cargo/config to a temporary location and publish a
+    // registry package we're going to use first.
+    let config = paths::root().join(".cargo");
+    let config_tmp = paths::root().join(".cargo-old");
+    t!(fs::rename(&config, &config_tmp));
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    // Generate a lock file against the crates.io registry
+    Package::new("bar", "0.0.1").publish();
+    p.cargo("build").run();
+
+    // Switch back to our directory source, and now that we're replacing
+    // crates.io make sure that this fails because we're replacing with a
+    // different checksum
+    config.rm_rf();
+    t!(fs::rename(&config_tmp, &config));
+    Package::new("bar", "0.0.1")
+        .file("src/lib.rs", "invalid")
+        .local(true)
+        .publish();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] checksum for `bar v0.0.1` changed between lock files
+
+this could be indicative of a few possible errors:
+
+    * the lock file is corrupt
+    * a replacement source in use (e.g. a mirror) returned a different checksum
+    * the source itself may be corrupt in one way or another
+
+unable to verify that `bar v0.0.1` is the same as when the lockfile was generated
+
+",
+        ).run();
+}
+
+#[test]
+fn crates_io_registry_url_is_optional() {
+    let root = paths::root();
+    t!(fs::create_dir(&root.join(".cargo")));
+    t!(t!(File::create(root.join(".cargo/config"))).write_all(
+        br#"
+        [source.crates-io]
+        replace-with = 'my-awesome-local-registry'
+
+        [source.my-awesome-local-registry]
+        local-registry = 'registry'
+    "#
+    ));
+
+    Package::new("bar", "0.0.1")
+        .local(true)
+        .file("src/lib.rs", "pub fn bar() {}")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.0.1"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UNPACKING] bar v0.0.1 ([..])
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] [..]
+",
+        ).run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+    p.cargo("test").run();
+}
diff --git a/tests/testsuite/lockfile_compat.rs b/tests/testsuite/lockfile_compat.rs
new file mode 100644 (file)
index 0000000..a4adcdc
--- /dev/null
@@ -0,0 +1,486 @@
+use support::git;
+use support::registry::Package;
+use support::{basic_manifest, lines_match, project};
+
+#[test]
+fn oldest_lockfile_still_works() {
+    let cargo_commands = vec!["build", "update"];
+    for cargo_command in cargo_commands {
+        oldest_lockfile_still_works_with_command(cargo_command);
+    }
+}
+
+fn oldest_lockfile_still_works_with_command(cargo_command: &str) {
+    Package::new("bar", "0.1.0").publish();
+
+    let expected_lockfile = r#"[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[metadata]
+"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "[..]"
+"#;
+
+    let old_lockfile = r#"[root]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+"#;
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("Cargo.lock", old_lockfile)
+        .build();
+
+    p.cargo(cargo_command).run();
+
+    let lock = p.read_lockfile();
+    for (l, r) in expected_lockfile.lines().zip(lock.lines()) {
+        assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r);
+    }
+
+    assert_eq!(lock.lines().count(), expected_lockfile.lines().count());
+}
+
+#[test]
+fn frozen_flag_preserves_old_lockfile() {
+    let cksum = Package::new("bar", "0.1.0").publish();
+
+    let old_lockfile = format!(
+        r#"[root]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}"
+"#,
+        cksum,
+    );
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("Cargo.lock", &old_lockfile)
+        .build();
+
+    p.cargo("build --locked").run();
+
+    let lock = p.read_lockfile();
+    for (l, r) in old_lockfile.lines().zip(lock.lines()) {
+        assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r);
+    }
+
+    assert_eq!(lock.lines().count(), old_lockfile.lines().count());
+}
+
+#[test]
+fn totally_wild_checksums_works() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "Cargo.lock",
+            r#"
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
+"checksum bar 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
+"#,
+        );
+
+    let p = p.build();
+
+    p.cargo("build").run();
+
+    let lock = p.read_lockfile();
+    assert!(
+        lock.starts_with(
+            r#"
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[metadata]
+"#.trim()
+        )
+    );
+}
+
+#[test]
+fn wrong_checksum_is_an_error() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "Cargo.lock",
+            r#"
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum"
+"#,
+        );
+
+    let p = p.build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+error: checksum for `bar v0.1.0` changed between lock files
+
+this could be indicative of a few possible errors:
+
+    * the lock file is corrupt
+    * a replacement source in use (e.g. a mirror) returned a different checksum
+    * the source itself may be corrupt in one way or another
+
+unable to verify that `bar v0.1.0` is the same as when the lockfile was generated
+
+",
+        ).run();
+}
+
+// If the checksum is unlisted in the lockfile (e.g. <none>) yet we can
+// calculate it (e.g. it's a registry dep), then we should in theory just fill
+// it in.
+#[test]
+fn unlisted_checksum_is_bad_if_we_calculate() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "Cargo.lock",
+            r#"
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[metadata]
+"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "<none>"
+"#,
+        );
+    let p = p.build();
+
+    p.cargo("fetch")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+error: checksum for `bar v0.1.0` was not previously calculated, but a checksum \
+could now be calculated
+
+this could be indicative of a few possible situations:
+
+    * the source `[..]` did not previously support checksums,
+      but was replaced with one that does
+    * newer Cargo implementations know how to checksum this source, but this
+      older implementation does not
+    * the lock file is corrupt
+
+",
+        ).run();
+}
+
+// If the checksum is listed in the lockfile yet we cannot calculate it (e.g.
+// git dependencies as of today), then make sure we choke.
+#[test]
+fn listed_checksum_bad_if_we_cannot_compute() {
+    let git = git::new("bar", |p| {
+        p.file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+            .file("src/lib.rs", "")
+    }).unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = {{ git = '{}' }}
+        "#,
+                git.url()
+            ),
+        ).file("src/lib.rs", "")
+        .file(
+            "Cargo.lock",
+            &format!(
+                r#"
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (git+{0})"
+]
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "git+{0}"
+
+[metadata]
+"checksum bar 0.1.0 (git+{0})" = "checksum"
+"#,
+                git.url()
+            ),
+        );
+
+    let p = p.build();
+
+    p.cargo("fetch")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] git repository `[..]`
+error: checksum for `bar v0.1.0 ([..])` could not be calculated, but a \
+checksum is listed in the existing lock file[..]
+
+this could be indicative of a few possible situations:
+
+    * the source `[..]` supports checksums,
+      but was replaced with one that doesn't
+    * the lock file is corrupt
+
+unable to verify that `bar v0.1.0 ([..])` is the same as when the lockfile was generated
+
+",
+        ).run();
+}
+
+#[test]
+fn current_lockfile_format() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build").run();
+
+    let actual = p.read_lockfile();
+
+    let expected = "\
+[[package]]
+name = \"bar\"
+version = \"0.1.0\"
+source = \"registry+https://github.com/rust-lang/crates.io-index\"
+
+[[package]]
+name = \"foo\"
+version = \"0.0.1\"
+dependencies = [
+ \"bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)\",
+]
+
+[metadata]
+\"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)\" = \"[..]\"";
+
+    for (l, r) in expected.lines().zip(actual.lines()) {
+        assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r);
+    }
+
+    assert_eq!(actual.lines().count(), expected.lines().count());
+}
+
+#[test]
+fn lockfile_without_root() {
+    Package::new("bar", "0.1.0").publish();
+
+    let lockfile = r#"[[package]]
+name = "bar"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+"#;
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("Cargo.lock", lockfile);
+
+    let p = p.build();
+
+    p.cargo("build").run();
+
+    let lock = p.read_lockfile();
+    assert!(lock.starts_with(lockfile.trim()));
+}
+
+#[test]
+fn locked_correct_error() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build --locked")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+error: the lock file [CWD]/Cargo.lock needs to be updated but --locked was passed to prevent this
+",
+        ).run();
+}
diff --git a/tests/testsuite/login.rs b/tests/testsuite/login.rs
new file mode 100644 (file)
index 0000000..6fc84d7
--- /dev/null
@@ -0,0 +1,167 @@
+use std::fs::{self, File};
+use std::io::prelude::*;
+
+use cargo::core::Shell;
+use cargo::util::config::Config;
+use support::cargo_process;
+use support::install::cargo_home;
+use support::registry::registry;
+use toml;
+
+const TOKEN: &str = "test-token";
+const ORIGINAL_TOKEN: &str = "api-token";
+const CONFIG_FILE: &str = r#"
+    [registry]
+    token = "api-token"
+
+    [registries.test-reg]
+    index = "http://dummy_index/"
+"#;
+
+fn setup_old_credentials() {
+    let config = cargo_home().join("config");
+    t!(fs::create_dir_all(config.parent().unwrap()));
+    t!(t!(File::create(&config)).write_all(CONFIG_FILE.as_bytes()));
+}
+
+fn setup_new_credentials() {
+    let config = cargo_home().join("credentials");
+    t!(fs::create_dir_all(config.parent().unwrap()));
+    t!(t!(File::create(&config))
+        .write_all(format!(r#"token = "{token}""#, token = ORIGINAL_TOKEN).as_bytes()));
+}
+
+fn check_token(expected_token: &str, registry: Option<&str>) -> bool {
+    let credentials = cargo_home().join("credentials");
+    assert!(credentials.is_file());
+
+    let mut contents = String::new();
+    File::open(&credentials)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    let toml: toml::Value = contents.parse().unwrap();
+
+    let token = match (registry, toml) {
+        // A registry has been provided, so check that the token exists in a
+        // table for the registry.
+        (Some(registry), toml::Value::Table(table)) => table
+            .get("registries")
+            .and_then(|registries_table| registries_table.get(registry))
+            .and_then(|registry_table| match registry_table.get("token") {
+                Some(&toml::Value::String(ref token)) => Some(token.as_str().to_string()),
+                _ => None,
+            }),
+        // There is no registry provided, so check the global token instead.
+        (None, toml::Value::Table(table)) => table
+            .get("registry")
+            .and_then(|registry_table| registry_table.get("token"))
+            .and_then(|v| match v {
+                toml::Value::String(ref token) => Some(token.as_str().to_string()),
+                _ => None,
+            }),
+        _ => None,
+    };
+
+    if let Some(token_val) = token {
+        token_val == expected_token
+    } else {
+        false
+    }
+}
+
+#[test]
+fn login_with_old_credentials() {
+    setup_old_credentials();
+
+    cargo_process("login --host")
+        .arg(registry().to_string())
+        .arg(TOKEN)
+        .run();
+
+    let config = cargo_home().join("config");
+    assert!(config.is_file());
+
+    let mut contents = String::new();
+    File::open(&config)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert_eq!(CONFIG_FILE, contents);
+
+    // Ensure that we get the new token for the registry
+    assert!(check_token(TOKEN, None));
+}
+
+#[test]
+fn login_with_new_credentials() {
+    setup_new_credentials();
+
+    cargo_process("login --host")
+        .arg(registry().to_string())
+        .arg(TOKEN)
+        .run();
+
+    let config = cargo_home().join("config");
+    assert!(!config.is_file());
+
+    // Ensure that we get the new token for the registry
+    assert!(check_token(TOKEN, None));
+}
+
+#[test]
+fn login_with_old_and_new_credentials() {
+    setup_new_credentials();
+    login_with_old_credentials();
+}
+
+#[test]
+fn login_without_credentials() {
+    cargo_process("login --host")
+        .arg(registry().to_string())
+        .arg(TOKEN)
+        .run();
+
+    let config = cargo_home().join("config");
+    assert!(!config.is_file());
+
+    // Ensure that we get the new token for the registry
+    assert!(check_token(TOKEN, None));
+}
+
+#[test]
+fn new_credentials_is_used_instead_old() {
+    setup_old_credentials();
+    setup_new_credentials();
+
+    cargo_process("login --host")
+        .arg(registry().to_string())
+        .arg(TOKEN)
+        .run();
+
+    let config = Config::new(Shell::new(), cargo_home(), cargo_home());
+
+    let token = config.get_string("registry.token").unwrap().map(|p| p.val);
+    assert_eq!(token.unwrap(), TOKEN);
+}
+
+#[test]
+fn registry_credentials() {
+    setup_old_credentials();
+    setup_new_credentials();
+
+    let reg = "test-reg";
+
+    cargo_process("login --registry")
+        .arg(reg)
+        .arg(TOKEN)
+        .arg("-Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .run();
+
+    // Ensure that we have not updated the default token
+    assert!(check_token(ORIGINAL_TOKEN, None));
+
+    // Also ensure that we get the new token for the registry
+    assert!(check_token(TOKEN, Some(reg)));
+}
diff --git a/tests/testsuite/main.rs b/tests/testsuite/main.rs
new file mode 100644 (file)
index 0000000..bd1dc33
--- /dev/null
@@ -0,0 +1,112 @@
+#![deny(warnings)]
+#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
+#![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))]
+
+extern crate bufstream;
+extern crate cargo;
+extern crate filetime;
+extern crate flate2;
+extern crate git2;
+extern crate glob;
+extern crate hex;
+#[macro_use]
+extern crate lazy_static;
+extern crate libc;
+#[macro_use]
+extern crate proptest;
+#[macro_use]
+extern crate serde_derive;
+#[macro_use]
+extern crate serde_json;
+extern crate tar;
+extern crate toml;
+extern crate url;
+#[cfg(windows)]
+extern crate winapi;
+
+#[macro_use]
+mod support;
+
+mod alt_registry;
+mod bad_config;
+mod bad_manifest_path;
+mod bench;
+mod build;
+mod build_auth;
+mod build_lib;
+mod build_plan;
+mod build_script;
+mod build_script_env;
+mod cargo_alias_config;
+mod cargo_command;
+mod cargo_features;
+mod cfg;
+mod check;
+mod clean;
+mod concurrent;
+mod config;
+mod corrupt_git;
+mod cross_compile;
+mod cross_publish;
+mod custom_target;
+mod death;
+mod dep_info;
+mod directory;
+mod doc;
+mod edition;
+mod features;
+mod fetch;
+mod fix;
+mod freshness;
+mod generate_lockfile;
+mod git;
+mod init;
+mod install;
+mod jobserver;
+mod local_registry;
+mod lockfile_compat;
+mod login;
+mod member_errors;
+mod metabuild;
+mod metadata;
+mod net_config;
+mod new;
+mod out_dir;
+mod overrides;
+mod package;
+mod patch;
+mod path;
+mod plugins;
+mod proc_macro;
+mod profile_config;
+mod profile_overrides;
+mod profile_targets;
+mod profiles;
+mod publish;
+mod read_manifest;
+mod registry;
+mod rename_deps;
+mod required_features;
+mod resolve;
+mod run;
+mod rustc;
+mod rustc_info_cache;
+mod rustdoc;
+mod rustdocflags;
+mod rustflags;
+mod search;
+mod shell_quoting;
+mod small_fd_limits;
+mod test;
+mod tool_paths;
+mod update;
+mod verify_project;
+mod version;
+mod warn_on_failure;
+mod workspaces;
+
+#[test]
+fn aaa_trigger_cross_compile_disabled_check() {
+    // This triggers the cross compile disabled check to run ASAP, see #5141
+    support::cross_compile::disabled();
+}
diff --git a/tests/testsuite/member_errors.rs b/tests/testsuite/member_errors.rs
new file mode 100644 (file)
index 0000000..a4f82cf
--- /dev/null
@@ -0,0 +1,154 @@
+use cargo::core::{compiler::CompileMode, Workspace};
+use cargo::ops::{self, CompileOptions};
+use cargo::util::{config::Config, errors::ManifestError};
+use cargo::core::resolver::ResolveError;
+
+use support::project;
+
+/// Tests inclusion of a `ManifestError` pointing to a member manifest
+/// when that manifest fails to deserialize.
+#[test]
+fn toml_deserialize_manifest_error() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#,
+        )
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foobar == "0.55"
+        "#,
+        )
+        .file("bar/src/main.rs", "fn main() {}")
+        .build();
+
+    let root_manifest_path = p.root().join("Cargo.toml");
+    let member_manifest_path = p.root().join("bar").join("Cargo.toml");
+
+    let error = Workspace::new(&root_manifest_path, &Config::default().unwrap()).unwrap_err();
+    eprintln!("{:?}", error);
+
+    let manifest_err: &ManifestError = error.downcast_ref().expect("Not a ManifestError");
+    assert_eq!(manifest_err.manifest_path(), &root_manifest_path);
+
+    let causes: Vec<_> = manifest_err.manifest_causes().collect();
+    assert_eq!(causes.len(), 1, "{:?}", causes);
+    assert_eq!(causes[0].manifest_path(), &member_manifest_path);
+}
+
+/// Tests inclusion of a `ManifestError` pointing to a member manifest
+/// when that manifest has an invalid dependency path.
+#[test]
+fn member_manifest_path_io_error() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#,
+        )
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foobar = { path = "nosuch" }
+        "#,
+        )
+        .file("bar/src/main.rs", "fn main() {}")
+        .build();
+
+    let root_manifest_path = p.root().join("Cargo.toml");
+    let member_manifest_path = p.root().join("bar").join("Cargo.toml");
+    let missing_manifest_path = p.root().join("bar").join("nosuch").join("Cargo.toml");
+
+    let error = Workspace::new(&root_manifest_path, &Config::default().unwrap()).unwrap_err();
+    eprintln!("{:?}", error);
+
+    let manifest_err: &ManifestError = error.downcast_ref().expect("Not a ManifestError");
+    assert_eq!(manifest_err.manifest_path(), &root_manifest_path);
+
+    let causes: Vec<_> = manifest_err.manifest_causes().collect();
+    assert_eq!(causes.len(), 2, "{:?}", causes);
+    assert_eq!(causes[0].manifest_path(), &member_manifest_path);
+    assert_eq!(causes[1].manifest_path(), &missing_manifest_path);
+}
+
+/// Test dependency version errors provide which package failed via a `ResolveError`.
+#[test]
+fn member_manifest_version_error() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#,
+        )
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            i-dont-exist = "0.55"
+        "#,
+        )
+        .file("bar/src/main.rs", "fn main() {}")
+        .build();
+
+    let config = Config::default().unwrap();
+    let ws = Workspace::new(&p.root().join("Cargo.toml"), &config).unwrap();
+    let compile_options = CompileOptions::new(&config, CompileMode::Build).unwrap();
+    let member_bar = ws.members().find(|m| &*m.name() == "bar").unwrap();
+
+    let error = ops::compile(&ws, &compile_options).map(|_| ()).unwrap_err();
+    eprintln!("{:?}", error);
+
+    let resolve_err: &ResolveError = error.downcast_ref().expect("Not a ResolveError");
+    let package_path = resolve_err.package_path();
+    assert_eq!(package_path.len(), 1, "package_path: {:?}", package_path);
+    assert_eq!(&package_path[0], member_bar.package_id());
+}
diff --git a/tests/testsuite/metabuild.rs b/tests/testsuite/metabuild.rs
new file mode 100644 (file)
index 0000000..6420eda
--- /dev/null
@@ -0,0 +1,652 @@
+use glob::glob;
+use serde_json;
+use std::str;
+use support::{
+    basic_lib_manifest, basic_manifest, project, registry::Package, rustc_host, Project,
+};
+
+#[test]
+fn metabuild_gated() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            metabuild = ["mb"]
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  feature `metabuild` is required
+
+consider adding `cargo-features = [\"metabuild\"]` to the manifest
+",
+        ).run();
+}
+
+fn basic_project() -> Project {
+    project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            metabuild = ["mb", "mb-other"]
+
+            [build-dependencies]
+            mb = {path="mb"}
+            mb-other = {path="mb-other"}
+        "#,
+        ).file("src/lib.rs", "")
+        .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+        .file(
+            "mb/src/lib.rs",
+            r#"pub fn metabuild() { println!("Hello mb"); }"#,
+        ).file(
+            "mb-other/Cargo.toml",
+            r#"
+            [package]
+            name = "mb-other"
+            version = "0.0.1"
+        "#,
+        ).file(
+            "mb-other/src/lib.rs",
+            r#"pub fn metabuild() { println!("Hello mb-other"); }"#,
+        ).build()
+}
+
+#[test]
+fn metabuild_basic() {
+    let p = basic_project();
+    p.cargo("build -vv")
+        .masquerade_as_nightly_cargo()
+        .with_stdout_contains("[foo 0.0.1] Hello mb")
+        .with_stdout_contains("[foo 0.0.1] Hello mb-other")
+        .run();
+}
+
+#[test]
+fn metabuild_error_both() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            metabuild = "mb"
+
+            [build-dependencies]
+            mb = {path="mb"}
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", r#"fn main() {}"#)
+        .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+        .file(
+            "mb/src/lib.rs",
+            r#"pub fn metabuild() { println!("Hello mb"); }"#,
+        ).build();
+
+    p.cargo("build -vv")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: failed to parse manifest at [..]
+
+Caused by:
+  cannot specify both `metabuild` and `build`
+",
+        ).run();
+}
+
+#[test]
+fn metabuild_missing_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            metabuild = "mb"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -vv")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: failed to parse manifest at [..]
+
+Caused by:
+  metabuild package `mb` must be specified in `build-dependencies`",
+        ).run();
+}
+
+#[test]
+fn metabuild_optional_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            metabuild = "mb"
+
+            [build-dependencies]
+            mb = {path="mb", optional=true}
+        "#,
+        ).file("src/lib.rs", "")
+        .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+        .file(
+            "mb/src/lib.rs",
+            r#"pub fn metabuild() { println!("Hello mb"); }"#,
+        ).build();
+
+    p.cargo("build -vv")
+        .masquerade_as_nightly_cargo()
+        .with_stdout_does_not_contain("[foo 0.0.1] Hello mb")
+        .run();
+
+    p.cargo("build -vv --features mb")
+        .masquerade_as_nightly_cargo()
+        .with_stdout_contains("[foo 0.0.1] Hello mb")
+        .run();
+}
+
+#[test]
+fn metabuild_lib_name() {
+    // Test when setting `name` on [lib].
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            metabuild = "mb"
+
+            [build-dependencies]
+            mb = {path="mb"}
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "mb/Cargo.toml",
+            r#"
+            [package]
+            name = "mb"
+            version = "0.0.1"
+            [lib]
+            name = "other"
+        "#,
+        ).file(
+            "mb/src/lib.rs",
+            r#"pub fn metabuild() { println!("Hello mb"); }"#,
+        ).build();
+
+    p.cargo("build -vv")
+        .masquerade_as_nightly_cargo()
+        .with_stdout_contains("[foo 0.0.1] Hello mb")
+        .run();
+}
+
+#[test]
+fn metabuild_fresh() {
+    // Check that rebuild is fresh.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            metabuild = "mb"
+
+            [build-dependencies]
+            mb = {path="mb"}
+        "#,
+        ).file("src/lib.rs", "")
+        .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+        .file(
+            "mb/src/lib.rs",
+            r#"pub fn metabuild() { println!("Hello mb"); }"#,
+        ).build();
+
+    p.cargo("build -vv")
+        .masquerade_as_nightly_cargo()
+        .with_stdout_contains("[foo 0.0.1] Hello mb")
+        .run();
+
+    p.cargo("build -vv")
+        .masquerade_as_nightly_cargo()
+        .with_stdout_does_not_contain("[foo 0.0.1] Hello mb")
+        .with_stderr(
+            "\
+[FRESH] mb [..]
+[FRESH] foo [..]
+[FINISHED] dev [..]
+",
+        ).run();
+}
+
+#[test]
+fn metabuild_links() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            links = "cat"
+            metabuild = "mb"
+
+            [build-dependencies]
+            mb = {path="mb"}
+        "#,
+        ).file("src/lib.rs", "")
+        .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+        .file(
+            "mb/src/lib.rs",
+            r#"pub fn metabuild() {
+                assert_eq!(std::env::var("CARGO_MANIFEST_LINKS"),
+                    Ok("cat".to_string()));
+                println!("Hello mb");
+            }"#,
+        ).build();
+
+    p.cargo("build -vv")
+        .masquerade_as_nightly_cargo()
+        .with_stdout_contains("[foo 0.0.1] Hello mb")
+        .run();
+}
+
+#[test]
+fn metabuild_override() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            links = "cat"
+            metabuild = "mb"
+
+            [build-dependencies]
+            mb = {path="mb"}
+        "#,
+        ).file("src/lib.rs", "")
+        .file("mb/Cargo.toml", &basic_lib_manifest("mb"))
+        .file(
+            "mb/src/lib.rs",
+            r#"pub fn metabuild() { panic!("should not run"); }"#,
+        ).file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}.cat]
+            rustc-link-lib = ["a"]
+        "#,
+                rustc_host()
+            ),
+        ).build();
+
+    p.cargo("build -vv").masquerade_as_nightly_cargo().run();
+}
+
+#[test]
+fn metabuild_workspace() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["member1", "member2"]
+        "#,
+        ).file(
+            "member1/Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "member1"
+            version = "0.0.1"
+            metabuild = ["mb1", "mb2"]
+
+            [build-dependencies]
+            mb1 = {path="../../mb1"}
+            mb2 = {path="../../mb2"}
+        "#,
+        ).file("member1/src/lib.rs", "")
+        .file(
+            "member2/Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "member2"
+            version = "0.0.1"
+            metabuild = ["mb1"]
+
+            [build-dependencies]
+            mb1 = {path="../../mb1"}
+        "#,
+        ).file("member2/src/lib.rs", "")
+        .build();
+
+    project()
+        .at("mb1")
+        .file("Cargo.toml", &basic_lib_manifest("mb1"))
+        .file(
+            "src/lib.rs",
+            r#"pub fn metabuild() { println!("Hello mb1 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#,
+        )
+        .build();
+
+    project()
+        .at("mb2")
+        .file("Cargo.toml", &basic_lib_manifest("mb2"))
+        .file(
+            "src/lib.rs",
+            r#"pub fn metabuild() { println!("Hello mb2 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#,
+        )
+        .build();
+
+    p.cargo("build -vv --all")
+        .masquerade_as_nightly_cargo()
+        .with_stdout_contains("[member1 0.0.1] Hello mb1 [..]member1")
+        .with_stdout_contains("[member1 0.0.1] Hello mb2 [..]member1")
+        .with_stdout_contains("[member2 0.0.1] Hello mb1 [..]member2")
+        .with_stdout_does_not_contain("[member2 0.0.1] Hello mb2 [..]member2")
+        .run();
+}
+
+#[test]
+fn metabuild_metadata() {
+    // The metabuild Target is filtered out of the `metadata` results.
+    let p = basic_project();
+
+    let output = p
+        .cargo("metadata --format-version=1")
+        .masquerade_as_nightly_cargo()
+        .exec_with_output()
+        .expect("cargo metadata failed");
+    let stdout = str::from_utf8(&output.stdout).unwrap();
+    let meta: serde_json::Value = serde_json::from_str(stdout).expect("failed to parse json");
+    let mb_info: Vec<&str> = meta["packages"]
+        .as_array()
+        .unwrap()
+        .iter()
+        .filter(|p| p["name"].as_str().unwrap() == "foo")
+        .next()
+        .unwrap()["metabuild"]
+        .as_array()
+        .unwrap()
+        .iter()
+        .map(|s| s.as_str().unwrap())
+        .collect();
+    assert_eq!(mb_info, ["mb", "mb-other"]);
+}
+
+#[test]
+fn metabuild_build_plan() {
+    let p = basic_project();
+
+    p.cargo("build --build-plan -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .with_json(
+            r#"
+{
+    "invocations": [
+        {
+            "package_name": "mb",
+            "package_version": "0.5.0",
+            "target_kind": ["lib"],
+            "kind": "Host",
+            "deps": [],
+            "outputs": ["[..]/target/debug/deps/libmb-[..].rlib"],
+            "links": {},
+            "program": "rustc",
+            "args": "{...}",
+            "env": "{...}",
+            "cwd": "[..]"
+        },
+        {
+            "package_name": "mb-other",
+            "package_version": "0.0.1",
+            "target_kind": ["lib"],
+            "kind": "Host",
+            "deps": [],
+            "outputs": ["[..]/target/debug/deps/libmb_other-[..].rlib"],
+            "links": {},
+            "program": "rustc",
+            "args": "{...}",
+            "env": "{...}",
+            "cwd": "[..]"
+        },
+        {
+            "package_name": "foo",
+            "package_version": "0.0.1",
+            "target_kind": ["custom-build"],
+            "kind": "Host",
+            "deps": [0, 1],
+            "outputs": ["[..]/target/debug/build/foo-[..]/metabuild_foo-[..][EXE]"],
+            "links": "{...}",
+            "program": "rustc",
+            "args": "{...}",
+            "env": "{...}",
+            "cwd": "[..]"
+        },
+        {
+            "package_name": "foo",
+            "package_version": "0.0.1",
+            "target_kind": ["custom-build"],
+            "kind": "Host",
+            "deps": [2],
+            "outputs": [],
+            "links": {},
+            "program": "[..]/foo/target/debug/build/foo-[..]/metabuild-foo",
+            "args": [],
+            "env": "{...}",
+            "cwd": "[..]"
+        },
+        {
+            "package_name": "foo",
+            "package_version": "0.0.1",
+            "target_kind": ["lib"],
+            "kind": "Host",
+            "deps": [3],
+            "outputs": ["[..]/foo/target/debug/deps/libfoo-[..].rlib"],
+            "links": "{...}",
+            "program": "rustc",
+            "args": "{...}",
+            "env": "{...}",
+            "cwd": "[..]"
+        }
+    ],
+    "inputs": [
+        "[..]/foo/Cargo.toml",
+        "[..]/foo/mb/Cargo.toml",
+        "[..]/foo/mb-other/Cargo.toml"
+    ]
+}
+"#,
+        ).run();
+
+    assert_eq!(
+        glob(
+            &p.root()
+                .join("target/.metabuild/metabuild-foo-*.rs")
+                .to_str()
+                .unwrap()
+        ).unwrap()
+        .count(),
+        1
+    );
+}
+
+#[test]
+fn metabuild_two_versions() {
+    // Two versions of a metabuild dep with the same name.
+    let p = project()
+        .at("ws")
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["member1", "member2"]
+        "#,
+        ).file(
+            "member1/Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "member1"
+            version = "0.0.1"
+            metabuild = ["mb"]
+
+            [build-dependencies]
+            mb = {path="../../mb1"}
+        "#,
+        ).file("member1/src/lib.rs", "")
+        .file(
+            "member2/Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "member2"
+            version = "0.0.1"
+            metabuild = ["mb"]
+
+            [build-dependencies]
+            mb = {path="../../mb2"}
+        "#,
+        ).file("member2/src/lib.rs", "")
+        .build();
+
+    project().at("mb1")
+        .file("Cargo.toml", r#"
+            [package]
+            name = "mb"
+            version = "0.0.1"
+        "#)
+        .file(
+            "src/lib.rs",
+            r#"pub fn metabuild() { println!("Hello mb1 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#,
+        )
+        .build();
+
+    project().at("mb2")
+        .file("Cargo.toml", r#"
+            [package]
+            name = "mb"
+            version = "0.0.2"
+        "#)
+        .file(
+            "src/lib.rs",
+            r#"pub fn metabuild() { println!("Hello mb2 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#,
+        )
+        .build();
+
+    p.cargo("build -vv --all")
+        .masquerade_as_nightly_cargo()
+        .with_stdout_contains("[member1 0.0.1] Hello mb1 [..]member1")
+        .with_stdout_contains("[member2 0.0.1] Hello mb2 [..]member2")
+        .run();
+
+    assert_eq!(
+        glob(
+            &p.root()
+                .join("target/.metabuild/metabuild-member?-*.rs")
+                .to_str()
+                .unwrap()
+        ).unwrap()
+        .count(),
+        2
+    );
+}
+
+#[test]
+fn metabuild_external_dependency() {
+    Package::new("mb", "1.0.0")
+        .file("Cargo.toml", &basic_manifest("mb", "1.0.0"))
+        .file(
+            "src/lib.rs",
+            r#"pub fn metabuild() { println!("Hello mb"); }"#,
+        ).publish();
+    Package::new("dep", "1.0.0")
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["metabuild"]
+            [package]
+            name = "dep"
+            version = "1.0.0"
+            metabuild = ["mb"]
+
+            [build-dependencies]
+            mb = "1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .build_dep("mb", "1.0.0")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            [dependencies]
+            dep = "1.0"
+            "#,
+        ).file("src/lib.rs", "extern crate dep;")
+        .build();
+
+    p.cargo("build -vv")
+        .masquerade_as_nightly_cargo()
+        .with_stdout_contains("[dep 1.0.0] Hello mb")
+        .run();
+
+    assert_eq!(
+        glob(
+            &p.root()
+                .join("target/.metabuild/metabuild-dep-*.rs")
+                .to_str()
+                .unwrap()
+        ).unwrap()
+        .count(),
+        1
+    );
+}
+
+#[test]
+fn metabuild_json_artifact() {
+    let p = basic_project();
+    p.cargo("build --message-format=json")
+        .masquerade_as_nightly_cargo()
+        .run();
+}
diff --git a/tests/testsuite/metadata.rs b/tests/testsuite/metadata.rs
new file mode 100644 (file)
index 0000000..7abddf7
--- /dev/null
@@ -0,0 +1,1462 @@
+use support::registry::Package;
+use support::{basic_bin_manifest, basic_lib_manifest, main_file, project};
+
+#[test]
+fn cargo_metadata_simple() {
+    let p = project()
+        .file("src/foo.rs", "")
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .build();
+
+    p.cargo("metadata")
+        .with_json(
+            r#"
+    {
+        "packages": [
+            {
+                "authors": [
+                    "wycats@example.com"
+                ],
+                "categories": [],
+                "name": "foo",
+                "version": "0.5.0",
+                "id": "foo[..]",
+                "keywords": [],
+                "source": null,
+                "dependencies": [],
+                "edition": "2015",
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "readme": null,
+                "repository": null,
+                "targets": [
+                    {
+                        "kind": [
+                            "bin"
+                        ],
+                        "crate_types": [
+                            "bin"
+                        ],
+                        "edition": "2015",
+                        "name": "foo",
+                        "src_path": "[..]/foo/src/foo.rs"
+                    }
+                ],
+                "features": {},
+                "manifest_path": "[..]Cargo.toml",
+                "metadata": null
+            }
+        ],
+        "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"],
+        "resolve": {
+            "nodes": [
+                {
+                    "dependencies": [],
+                    "deps": [],
+                    "features": [],
+                    "id": "foo 0.5.0 (path+file:[..]foo)"
+                }
+            ],
+            "root": "foo 0.5.0 (path+file:[..]foo)"
+        },
+        "target_directory": "[..]foo/target",
+        "version": 1,
+        "workspace_root": "[..]/foo"
+    }"#,
+        ).run();
+}
+
+#[test]
+fn cargo_metadata_warns_on_implicit_version() {
+    let p = project()
+        .file("src/foo.rs", "")
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .build();
+
+    p.cargo("metadata").with_stderr("[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems").run();
+
+    p.cargo("metadata --format-version 1").with_stderr("").run();
+}
+
+#[test]
+fn library_with_several_crate_types() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            "Cargo.toml",
+            r#"
+[package]
+name = "foo"
+version = "0.5.0"
+
+[lib]
+crate-type = ["lib", "staticlib"]
+            "#,
+        ).build();
+
+    p.cargo("metadata")
+        .with_json(
+            r#"
+    {
+        "packages": [
+            {
+                "authors": [],
+                "categories": [],
+                "name": "foo",
+                "readme": null,
+                "repository": null,
+                "version": "0.5.0",
+                "id": "foo[..]",
+                "keywords": [],
+                "source": null,
+                "dependencies": [],
+                "edition": "2015",
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "targets": [
+                    {
+                        "kind": [
+                            "lib",
+                            "staticlib"
+                        ],
+                        "crate_types": [
+                            "lib",
+                            "staticlib"
+                        ],
+                        "edition": "2015",
+                        "name": "foo",
+                        "src_path": "[..]/foo/src/lib.rs"
+                    }
+                ],
+                "features": {},
+                "manifest_path": "[..]Cargo.toml",
+                "metadata": null
+            }
+        ],
+        "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"],
+        "resolve": {
+            "nodes": [
+                {
+                    "dependencies": [],
+                    "deps": [],
+                    "features": [],
+                    "id": "foo 0.5.0 (path+file:[..]foo)"
+                }
+            ],
+            "root": "foo 0.5.0 (path+file:[..]foo)"
+        },
+        "target_directory": "[..]foo/target",
+        "version": 1,
+        "workspace_root": "[..]/foo"
+    }"#,
+        ).run();
+}
+
+#[test]
+fn library_with_features() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            "Cargo.toml",
+            r#"
+[package]
+name = "foo"
+version = "0.5.0"
+
+[features]
+default = ["default_feat"]
+default_feat = []
+optional_feat = []
+            "#,
+        ).build();
+
+    p.cargo("metadata")
+        .with_json(
+            r#"
+    {
+        "packages": [
+            {
+                "authors": [],
+                "categories": [],
+                "name": "foo",
+                "readme": null,
+                "repository": null,
+                "version": "0.5.0",
+                "id": "foo[..]",
+                "keywords": [],
+                "source": null,
+                "dependencies": [],
+                "edition": "2015",
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "targets": [
+                    {
+                        "kind": [
+                            "lib"
+                        ],
+                        "crate_types": [
+                            "lib"
+                        ],
+                        "edition": "2015",
+                        "name": "foo",
+                        "src_path": "[..]/foo/src/lib.rs"
+                    }
+                ],
+                "features": {
+                  "default": [
+                      "default_feat"
+                  ],
+                  "default_feat": [],
+                  "optional_feat": []
+                },
+                "manifest_path": "[..]Cargo.toml",
+                "metadata": null
+            }
+        ],
+        "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"],
+        "resolve": {
+            "nodes": [
+                {
+                    "dependencies": [],
+                    "deps": [],
+                    "features": [
+                      "default",
+                      "default_feat"
+                    ],
+                    "id": "foo 0.5.0 (path+file:[..]foo)"
+                }
+            ],
+            "root": "foo 0.5.0 (path+file:[..]foo)"
+        },
+        "target_directory": "[..]foo/target",
+        "version": 1,
+        "workspace_root": "[..]/foo"
+    }"#,
+        ).run();
+}
+
+#[test]
+fn cargo_metadata_with_deps_and_version() {
+    let p = project()
+        .file("src/foo.rs", "")
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            license = "MIT"
+            description = "foo"
+
+            [[bin]]
+            name = "foo"
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).build();
+    Package::new("baz", "0.0.1").publish();
+    Package::new("bar", "0.0.1").dep("baz", "0.0.1").publish();
+
+    p.cargo("metadata -q --format-version 1")
+        .with_json(
+            r#"
+    {
+        "packages": [
+            {
+                "authors": [],
+                "categories": [],
+                "dependencies": [],
+                "description": null,
+                "features": {},
+                "id": "baz 0.0.1 (registry+[..])",
+                "keywords": [],
+                "manifest_path": "[..]Cargo.toml",
+                "name": "baz",
+                "readme": null,
+                "repository": null,
+                "source": "registry+[..]",
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "edition": "2015",
+                "targets": [
+                    {
+                        "kind": [
+                            "lib"
+                        ],
+                        "crate_types": [
+                            "lib"
+                        ],
+                        "edition": "2015",
+                        "name": "baz",
+                        "src_path": "[..]lib.rs"
+                    }
+                ],
+                "version": "0.0.1",
+                "metadata": null
+            },
+            {
+                "authors": [],
+                "categories": [],
+                "dependencies": [
+                    {
+                        "features": [],
+                        "kind": null,
+                        "name": "baz",
+                        "optional": false,
+                        "req": "^0.0.1",
+                        "source": "registry+[..]",
+                        "target": null,
+                        "uses_default_features": true,
+                        "rename": null
+                    }
+                ],
+                "features": {},
+                "id": "bar 0.0.1 (registry+[..])",
+                "keywords": [],
+                "manifest_path": "[..]Cargo.toml",
+                "name": "bar",
+                "readme": null,
+                "repository": null,
+                "source": "registry+[..]",
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "edition": "2015",
+                "targets": [
+                    {
+                        "kind": [
+                            "lib"
+                        ],
+                        "crate_types": [
+                            "lib"
+                        ],
+                        "edition": "2015",
+                        "name": "bar",
+                        "src_path": "[..]lib.rs"
+                    }
+                ],
+                "version": "0.0.1",
+                "metadata": null
+            },
+            {
+                "authors": [],
+                "categories": [],
+                "dependencies": [
+                    {
+                        "features": [],
+                        "kind": null,
+                        "name": "bar",
+                        "optional": false,
+                        "req": "*",
+                        "source": "registry+[..]",
+                        "target": null,
+                        "uses_default_features": true,
+                        "rename": null
+                    }
+                ],
+                "features": {},
+                "id": "foo 0.5.0 (path+file:[..]foo)",
+                "keywords": [],
+                "manifest_path": "[..]Cargo.toml",
+                "name": "foo",
+                "readme": null,
+                "repository": null,
+                "source": null,
+                "license": "MIT",
+                "license_file": null,
+                "description": "foo",
+                "edition": "2015",
+                "targets": [
+                    {
+                        "kind": [
+                            "bin"
+                        ],
+                        "crate_types": [
+                            "bin"
+                        ],
+                        "edition": "2015",
+                        "name": "foo",
+                        "src_path": "[..]foo.rs"
+                    }
+                ],
+                "version": "0.5.0",
+                "metadata": null
+            }
+        ],
+        "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"],
+        "resolve": {
+            "nodes": [
+                {
+                    "dependencies": [
+                        "bar 0.0.1 (registry+[..])"
+                    ],
+                    "deps": [
+                        { "name": "bar", "pkg": "bar 0.0.1 (registry+[..])" }
+                    ],
+                    "features": [],
+                    "id": "foo 0.5.0 (path+file:[..]foo)"
+                },
+                {
+                    "dependencies": [
+                        "baz 0.0.1 (registry+[..])"
+                    ],
+                    "deps": [
+                        { "name": "baz", "pkg": "baz 0.0.1 (registry+[..])" }
+                    ],
+                    "features": [],
+                    "id": "bar 0.0.1 (registry+[..])"
+                },
+                {
+                    "dependencies": [],
+                    "deps": [],
+                    "features": [],
+                    "id": "baz 0.0.1 (registry+[..])"
+                }
+            ],
+            "root": "foo 0.5.0 (path+file:[..]foo)"
+        },
+        "target_directory": "[..]foo/target",
+        "version": 1,
+        "workspace_root": "[..]/foo"
+    }"#,
+        ).run();
+}
+
+#[test]
+fn example() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("examples/ex.rs", "")
+        .file(
+            "Cargo.toml",
+            r#"
+[package]
+name = "foo"
+version = "0.1.0"
+
+[[example]]
+name = "ex"
+            "#,
+        ).build();
+
+    p.cargo("metadata")
+        .with_json(
+            r#"
+    {
+        "packages": [
+            {
+                "authors": [],
+                "categories": [],
+                "name": "foo",
+                "readme": null,
+                "repository": null,
+                "version": "0.1.0",
+                "id": "foo[..]",
+                "keywords": [],
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "edition": "2015",
+                "source": null,
+                "dependencies": [],
+                "targets": [
+                    {
+                        "kind": [ "lib" ],
+                        "crate_types": [ "lib" ],
+                        "edition": "2015",
+                        "name": "foo",
+                        "src_path": "[..]/foo/src/lib.rs"
+                    },
+                    {
+                        "kind": [ "example" ],
+                        "crate_types": [ "bin" ],
+                        "edition": "2015",
+                        "name": "ex",
+                        "src_path": "[..]/foo/examples/ex.rs"
+                    }
+                ],
+                "features": {},
+                "manifest_path": "[..]Cargo.toml",
+                "metadata": null
+            }
+        ],
+        "workspace_members": [
+            "foo 0.1.0 (path+file:[..]foo)"
+        ],
+        "resolve": {
+            "root": "foo 0.1.0 (path+file://[..]foo)",
+            "nodes": [
+                {
+                    "id": "foo 0.1.0 (path+file:[..]foo)",
+                    "features": [],
+                    "dependencies": [],
+                    "deps": []
+                }
+            ]
+        },
+        "target_directory": "[..]foo/target",
+        "version": 1,
+        "workspace_root": "[..]/foo"
+    }"#,
+        ).run();
+}
+
+#[test]
+fn example_lib() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("examples/ex.rs", "")
+        .file(
+            "Cargo.toml",
+            r#"
+[package]
+name = "foo"
+version = "0.1.0"
+
+[[example]]
+name = "ex"
+crate-type = ["rlib", "dylib"]
+            "#,
+        ).build();
+
+    p.cargo("metadata")
+        .with_json(
+            r#"
+    {
+        "packages": [
+            {
+                "authors": [],
+                "categories": [],
+                "name": "foo",
+                "readme": null,
+                "repository": null,
+                "version": "0.1.0",
+                "id": "foo[..]",
+                "keywords": [],
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "edition": "2015",
+                "source": null,
+                "dependencies": [],
+                "targets": [
+                    {
+                        "kind": [ "lib" ],
+                        "crate_types": [ "lib" ],
+                        "edition": "2015",
+                        "name": "foo",
+                        "src_path": "[..]/foo/src/lib.rs"
+                    },
+                    {
+                        "kind": [ "example" ],
+                        "crate_types": [ "rlib", "dylib" ],
+                        "edition": "2015",
+                        "name": "ex",
+                        "src_path": "[..]/foo/examples/ex.rs"
+                    }
+                ],
+                "features": {},
+                "manifest_path": "[..]Cargo.toml",
+                "metadata": null
+            }
+        ],
+        "workspace_members": [
+            "foo 0.1.0 (path+file:[..]foo)"
+        ],
+        "resolve": {
+            "root": "foo 0.1.0 (path+file://[..]foo)",
+            "nodes": [
+                {
+                    "id": "foo 0.1.0 (path+file:[..]foo)",
+                    "features": [],
+                    "dependencies": [],
+                    "deps": []
+                }
+            ]
+        },
+        "target_directory": "[..]foo/target",
+        "version": 1,
+        "workspace_root": "[..]/foo"
+    }"#,
+        ).run();
+}
+
+#[test]
+fn workspace_metadata() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file("bar/src/lib.rs", "")
+        .file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+        .file("baz/src/lib.rs", "")
+        .build();
+
+    p.cargo("metadata")
+        .with_json(
+            r#"
+    {
+        "packages": [
+            {
+                "authors": [
+                    "wycats@example.com"
+                ],
+                "categories": [],
+                "name": "bar",
+                "version": "0.5.0",
+                "id": "bar[..]",
+                "readme": null,
+                "repository": null,
+                "keywords": [],
+                "source": null,
+                "dependencies": [],
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "edition": "2015",
+                "targets": [
+                    {
+                        "kind": [ "lib" ],
+                        "crate_types": [ "lib" ],
+                        "edition": "2015",
+                        "name": "bar",
+                        "src_path": "[..]bar/src/lib.rs"
+                    }
+                ],
+                "features": {},
+                "manifest_path": "[..]bar/Cargo.toml",
+                "metadata": null
+            },
+            {
+                "authors": [
+                    "wycats@example.com"
+                ],
+                "categories": [],
+                "name": "baz",
+                "readme": null,
+                "repository": null,
+                "version": "0.5.0",
+                "id": "baz[..]",
+                "keywords": [],
+                "source": null,
+                "dependencies": [],
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "edition": "2015",
+                "targets": [
+                    {
+                        "kind": [ "lib" ],
+                        "crate_types": [ "lib" ],
+                        "edition": "2015",
+                        "name": "baz",
+                        "src_path": "[..]baz/src/lib.rs"
+                    }
+                ],
+                "features": {},
+                "manifest_path": "[..]baz/Cargo.toml",
+                "metadata": null
+            }
+        ],
+        "workspace_members": ["baz 0.5.0 (path+file:[..]baz)", "bar 0.5.0 (path+file:[..]bar)"],
+        "resolve": {
+            "nodes": [
+                {
+                    "dependencies": [],
+                    "deps": [],
+                    "features": [],
+                    "id": "baz 0.5.0 (path+file:[..]baz)"
+                },
+                {
+                    "dependencies": [],
+                    "deps": [],
+                    "features": [],
+                    "id": "bar 0.5.0 (path+file:[..]bar)"
+                }
+            ],
+            "root": null
+        },
+        "target_directory": "[..]foo/target",
+        "version": 1,
+        "workspace_root": "[..]/foo"
+    }"#,
+        ).run();
+}
+
+#[test]
+fn workspace_metadata_no_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file("bar/src/lib.rs", "")
+        .file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+        .file("baz/src/lib.rs", "")
+        .build();
+
+    p.cargo("metadata --no-deps")
+        .with_json(
+            r#"
+    {
+        "packages": [
+            {
+                "authors": [
+                    "wycats@example.com"
+                ],
+                "categories": [],
+                "name": "bar",
+                "readme": null,
+                "repository": null,
+                "version": "0.5.0",
+                "id": "bar[..]",
+                "keywords": [],
+                "source": null,
+                "dependencies": [],
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "edition": "2015",
+                "targets": [
+                    {
+                        "kind": [ "lib" ],
+                        "crate_types": [ "lib" ],
+                        "edition": "2015",
+                        "name": "bar",
+                        "src_path": "[..]bar/src/lib.rs"
+                    }
+                ],
+                "features": {},
+                "manifest_path": "[..]bar/Cargo.toml",
+                "metadata": null
+            },
+            {
+                "authors": [
+                    "wycats@example.com"
+                ],
+                "categories": [],
+                "name": "baz",
+                "readme": null,
+                "repository": null,
+                "version": "0.5.0",
+                "id": "baz[..]",
+                "keywords": [],
+                "source": null,
+                "dependencies": [],
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "edition": "2015",
+                "targets": [
+                    {
+                        "kind": [ "lib" ],
+                        "crate_types": ["lib"],
+                        "edition": "2015",
+                        "name": "baz",
+                        "src_path": "[..]baz/src/lib.rs"
+                    }
+                ],
+                "features": {},
+                "manifest_path": "[..]baz/Cargo.toml",
+                "metadata": null
+            }
+        ],
+        "workspace_members": ["baz 0.5.0 (path+file:[..]baz)", "bar 0.5.0 (path+file:[..]bar)"],
+        "resolve": null,
+        "target_directory": "[..]foo/target",
+        "version": 1,
+        "workspace_root": "[..]/foo"
+    }"#,
+        ).run();
+}
+
+#[test]
+fn cargo_metadata_with_invalid_manifest() {
+    let p = project().file("Cargo.toml", "").build();
+
+    p.cargo("metadata --format-version 1")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  virtual manifests must be configured with [workspace]",
+        ).run();
+}
+
+const MANIFEST_OUTPUT: &str = r#"
+{
+    "packages": [{
+        "authors": [
+            "wycats@example.com"
+        ],
+        "categories": [],
+        "name":"foo",
+        "version":"0.5.0",
+        "id":"foo[..]0.5.0[..](path+file://[..]/foo)",
+        "source":null,
+        "dependencies":[],
+        "keywords": [],
+        "license": null,
+        "license_file": null,
+        "description": null,
+        "edition": "2015",
+        "targets":[{
+            "kind":["bin"],
+            "crate_types":["bin"],
+            "edition": "2015",
+            "name":"foo",
+            "src_path":"[..]/foo/src/foo.rs"
+        }],
+        "features":{},
+        "manifest_path":"[..]Cargo.toml",
+        "metadata": null,
+        "readme": null,
+        "repository": null
+    }],
+    "workspace_members": [ "foo 0.5.0 (path+file:[..]foo)" ],
+    "resolve": null,
+    "target_directory": "[..]foo/target",
+    "version": 1,
+    "workspace_root": "[..]/foo"
+}"#;
+
+#[test]
+fn cargo_metadata_no_deps_path_to_cargo_toml_relative() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("metadata --no-deps --manifest-path foo/Cargo.toml")
+        .cwd(p.root().parent().unwrap())
+        .with_json(MANIFEST_OUTPUT)
+        .run();
+}
+
+#[test]
+fn cargo_metadata_no_deps_path_to_cargo_toml_absolute() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("metadata --no-deps --manifest-path")
+        .arg(p.root().join("Cargo.toml"))
+        .cwd(p.root().parent().unwrap())
+        .with_json(MANIFEST_OUTPUT)
+        .run();
+}
+
+#[test]
+fn cargo_metadata_no_deps_path_to_cargo_toml_parent_relative() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("metadata --no-deps --manifest-path foo")
+        .cwd(p.root().parent().unwrap())
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] the manifest-path must be \
+             a path to a Cargo.toml file",
+        ).run();
+}
+
+#[test]
+fn cargo_metadata_no_deps_path_to_cargo_toml_parent_absolute() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("metadata --no-deps --manifest-path")
+        .arg(p.root())
+        .cwd(p.root().parent().unwrap())
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] the manifest-path must be \
+             a path to a Cargo.toml file",
+        ).run();
+}
+
+#[test]
+fn cargo_metadata_no_deps_cwd() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("metadata --no-deps")
+        .with_json(MANIFEST_OUTPUT)
+        .run();
+}
+
+#[test]
+fn cargo_metadata_bad_version() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("metadata --no-deps --format-version 2")
+        .with_status(1)
+        .with_stderr_contains(
+            "\
+error: '2' isn't a valid value for '--format-version <VERSION>'
+<tab>[possible values: 1]
+",
+        ).run();
+}
+
+#[test]
+fn multiple_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [features]
+            a = []
+            b = []
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("metadata --features").arg("a b").run();
+}
+
+#[test]
+fn package_metadata() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = ["wycats@example.com"]
+            categories = ["database"]
+            keywords = ["database"]
+            readme = "README.md"
+            repository = "https://github.com/rust-lang/cargo"
+
+            [package.metadata.bar]
+            baz = "quux"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("metadata --no-deps")
+        .with_json(
+            r#"
+    {
+        "packages": [
+            {
+                "authors": ["wycats@example.com"],
+                "categories": ["database"],
+                "name": "foo",
+                "readme": "README.md",
+                "repository": "https://github.com/rust-lang/cargo",
+                "version": "0.1.0",
+                "id": "foo[..]",
+                "keywords": ["database"],
+                "source": null,
+                "dependencies": [],
+                "edition": "2015",
+                "license": null,
+                "license_file": null,
+                "description": null,
+                "targets": [
+                    {
+                        "kind": [ "lib" ],
+                        "crate_types": [ "lib" ],
+                        "edition": "2015",
+                        "name": "foo",
+                        "src_path": "[..]foo/src/lib.rs"
+                    }
+                ],
+                "features": {},
+                "manifest_path": "[..]foo/Cargo.toml",
+                "metadata": {
+                    "bar": {
+                        "baz": "quux"
+                    }
+                }
+            }
+        ],
+        "workspace_members": ["foo[..]"],
+        "resolve": null,
+        "target_directory": "[..]foo/target",
+        "version": 1,
+        "workspace_root": "[..]/foo"
+    }"#,
+        ).run();
+}
+
+#[test]
+fn cargo_metadata_path_to_cargo_toml_project() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("package --manifest-path")
+        .arg(p.root().join("bar/Cargo.toml"))
+        .cwd(p.root().parent().unwrap())
+        .run();
+
+    p.cargo("metadata --manifest-path")
+        .arg(p.root().join("target/package/bar-0.5.0/Cargo.toml"))
+        .with_json(
+            r#"
+        {
+            "packages": [
+            {
+                "authors": [
+                    "wycats@example.com"
+                ],
+                "categories": [],
+                "dependencies": [],
+                "description": null,
+                "edition": "2015",
+                "features": {},
+                "id": "bar 0.5.0 ([..])",
+                "keywords": [],
+                "license": null,
+                "license_file": null,
+                "manifest_path": "[..]Cargo.toml",
+                "metadata": null,
+                "name": "bar",
+                "readme": null,
+                "repository": null,
+                "source": null,
+                "targets": [
+                {
+                    "crate_types": [
+                        "lib"
+                    ],
+                    "edition": "2015",
+                    "kind": [
+                        "lib"
+                    ],
+                    "name": "bar",
+                    "src_path": "[..]src/lib.rs"
+                }
+                ],
+                "version": "0.5.0"
+            }
+            ],
+            "resolve": {
+                "nodes": [
+                {
+                    "dependencies": [],
+                    "deps": [],
+                    "features": [],
+                    "id": "bar 0.5.0 ([..])"
+                }
+                ],
+                "root": "bar 0.5.0 (path+file:[..])"
+            },
+            "target_directory": "[..]",
+            "version": 1,
+            "workspace_members": [
+                "bar 0.5.0 (path+file:[..])"
+            ],
+            "workspace_root": "[..]"
+        }
+"#,
+        ).run();
+}
+
+#[test]
+fn package_edition_2018() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["edition"]
+
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = ["wycats@example.com"]
+            edition = "2018"
+        "#,
+        ).build();
+    p.cargo("metadata")
+        .masquerade_as_nightly_cargo()
+        .with_json(
+            r#"
+        {
+            "packages": [
+                {
+                    "authors": [
+                        "wycats@example.com"
+                    ],
+                    "categories": [],
+                    "dependencies": [],
+                    "description": null,
+                    "edition": "2018",
+                    "features": {},
+                    "id": "foo 0.1.0 (path+file:[..])",
+                    "keywords": [],
+                    "license": null,
+                    "license_file": null,
+                    "manifest_path": "[..]Cargo.toml",
+                    "metadata": null,
+                    "name": "foo",
+                    "readme": null,
+                    "repository": null,
+                    "source": null,
+                    "targets": [
+                        {
+                            "crate_types": [
+                                "lib"
+                            ],
+                            "edition": "2018",
+                            "kind": [
+                                "lib"
+                            ],
+                            "name": "foo",
+                            "src_path": "[..]src/lib.rs"
+                        }
+                    ],
+                    "version": "0.1.0"
+                }
+            ],
+            "resolve": {
+                "nodes": [
+                    {
+                        "dependencies": [],
+                        "deps": [],
+                        "features": [],
+                        "id": "foo 0.1.0 (path+file:[..])"
+                    }
+                ],
+                "root": "foo 0.1.0 (path+file:[..])"
+            },
+            "target_directory": "[..]",
+            "version": 1,
+            "workspace_members": [
+                "foo 0.1.0 (path+file:[..])"
+            ],
+            "workspace_root": "[..]"
+        }
+        "#,
+        ).run();
+}
+
+#[test]
+fn target_edition_2018() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/main.rs", "")
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["edition"]
+
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = ["wycats@example.com"]
+            edition = "2015"
+
+            [lib]
+            edition = "2018"
+        "#,
+        ).build();
+    p.cargo("metadata")
+        .masquerade_as_nightly_cargo()
+        .with_json(
+            r#"
+        {
+            "packages": [
+                {
+                    "authors": [
+                        "wycats@example.com"
+                    ],
+                    "categories": [],
+                    "dependencies": [],
+                    "description": null,
+                    "edition": "2015",
+                    "features": {},
+                    "id": "foo 0.1.0 (path+file:[..])",
+                    "keywords": [],
+                    "license": null,
+                    "license_file": null,
+                    "manifest_path": "[..]Cargo.toml",
+                    "metadata": null,
+                    "name": "foo",
+                    "readme": null,
+                    "repository": null,
+                    "source": null,
+                    "targets": [
+                        {
+                            "crate_types": [
+                                "lib"
+                            ],
+                            "edition": "2018",
+                            "kind": [
+                                "lib"
+                            ],
+                            "name": "foo",
+                            "src_path": "[..]src/lib.rs"
+                        },
+                        {
+                            "crate_types": [
+                                "bin"
+                            ],
+                            "edition": "2015",
+                            "kind": [
+                                "bin"
+                            ],
+                            "name": "foo",
+                            "src_path": "[..]src/main.rs"
+                        }
+                    ],
+                    "version": "0.1.0"
+                }
+            ],
+            "resolve": {
+                "nodes": [
+                    {
+                        "dependencies": [],
+                        "deps": [],
+                        "features": [],
+                        "id": "foo 0.1.0 (path+file:[..])"
+                    }
+                ],
+                "root": "foo 0.1.0 (path+file:[..])"
+            },
+            "target_directory": "[..]",
+            "version": 1,
+            "workspace_members": [
+                "foo 0.1.0 (path+file:[..])"
+            ],
+            "workspace_root": "[..]"
+        }
+        "#,
+        ).run();
+}
+
+#[test]
+fn rename_dependency() {
+    Package::new("bar", "0.1.0").publish();
+    Package::new("bar", "0.2.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["rename-dependency"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = { version = "0.1.0" }
+            baz = { version = "0.2.0", package = "bar" }
+        "#,
+        ).file("src/lib.rs", "extern crate bar; extern crate baz;")
+        .build();
+
+    p.cargo("metadata")
+        .masquerade_as_nightly_cargo()
+        .with_json(
+            r#"
+{
+    "packages": [
+        {
+            "authors": [],
+            "categories": [],
+            "dependencies": [
+                {
+                    "features": [],
+                    "kind": null,
+                    "name": "bar",
+                    "optional": false,
+                    "rename": null,
+                    "req": "^0.1.0",
+                    "source": "registry+https://github.com/rust-lang/crates.io-index",
+                    "target": null,
+                    "uses_default_features": true
+                },
+                {
+                    "features": [],
+                    "kind": null,
+                    "name": "bar",
+                    "optional": false,
+                    "rename": "baz",
+                    "req": "^0.2.0",
+                    "source": "registry+https://github.com/rust-lang/crates.io-index",
+                    "target": null,
+                    "uses_default_features": true
+                }
+            ],
+            "description": null,
+            "edition": "2015",
+            "features": {},
+            "id": "foo 0.0.1[..]",
+            "keywords": [],
+            "license": null,
+            "license_file": null,
+            "manifest_path": "[..]",
+            "metadata": null,
+            "name": "foo",
+            "readme": null,
+            "repository": null,
+            "source": null,
+            "targets": [
+                {
+                    "crate_types": [
+                        "lib"
+                    ],
+                    "edition": "2015",
+                    "kind": [
+                        "lib"
+                    ],
+                    "name": "foo",
+                    "src_path": "[..]"
+                }
+            ],
+            "version": "0.0.1"
+        },
+        {
+            "authors": [],
+            "categories": [],
+            "dependencies": [],
+            "description": null,
+            "edition": "2015",
+            "features": {},
+            "id": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+            "keywords": [],
+            "license": null,
+            "license_file": null,
+            "manifest_path": "[..]",
+            "metadata": null,
+            "name": "bar",
+            "readme": null,
+            "repository": null,
+            "source": "registry+https://github.com/rust-lang/crates.io-index",
+            "targets": [
+                {
+                    "crate_types": [
+                        "lib"
+                    ],
+                    "edition": "2015",
+                    "kind": [
+                        "lib"
+                    ],
+                    "name": "bar",
+                    "src_path": "[..]"
+                }
+            ],
+            "version": "0.1.0"
+        },
+        {
+            "authors": [],
+            "categories": [],
+            "dependencies": [],
+            "description": null,
+            "edition": "2015",
+            "features": {},
+            "id": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+            "keywords": [],
+            "license": null,
+            "license_file": null,
+            "manifest_path": "[..]",
+            "metadata": null,
+            "name": "bar",
+            "readme": null,
+            "repository": null,
+            "source": "registry+https://github.com/rust-lang/crates.io-index",
+            "targets": [
+                {
+                    "crate_types": [
+                        "lib"
+                    ],
+                    "edition": "2015",
+                    "kind": [
+                        "lib"
+                    ],
+                    "name": "bar",
+                    "src_path": "[..]"
+                }
+            ],
+            "version": "0.2.0"
+        }
+    ],
+    "resolve": {
+        "nodes": [
+            {
+                "dependencies": [],
+                "deps": [],
+                "features": [],
+                "id": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)"
+            },
+            {
+                "dependencies": [],
+                "deps": [],
+                "features": [],
+                "id": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)"
+            },
+            {
+                "dependencies": [
+                    "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+                    "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)"
+                ],
+                "deps": [
+                    {
+                        "name": "bar",
+                        "pkg": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)"
+                    },
+                    {
+                        "name": "baz",
+                        "pkg": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)"
+                    }
+                ],
+                "features": [],
+                "id": "foo 0.0.1[..]"
+            }
+        ],
+        "root": "foo 0.0.1[..]"
+    },
+    "target_directory": "[..]",
+    "version": 1,
+    "workspace_members": [
+        "foo 0.0.1[..]"
+    ],
+    "workspace_root": "[..]"
+}"#,
+        ).run();
+}
diff --git a/tests/testsuite/net_config.rs b/tests/testsuite/net_config.rs
new file mode 100644 (file)
index 0000000..afcf7c5
--- /dev/null
@@ -0,0 +1,66 @@
+use support::project;
+
+#[test]
+fn net_retry_loads_from_config() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            git = "https://127.0.0.1:11/foo/bar"
+        "#,
+        ).file("src/main.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+        [net]
+        retry=1
+        [http]
+        timeout=1
+         "#,
+        ).build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr_contains(
+            "[WARNING] spurious network error \
+             (1 tries remaining): [..]",
+        ).run();
+}
+
+#[test]
+fn net_retry_git_outputs_warning() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            git = "https://127.0.0.1:11/foo/bar"
+        "#,
+        ).file(
+            ".cargo/config",
+            r#"
+        [http]
+        timeout=1
+         "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("build -v -j 1")
+        .with_status(101)
+        .with_stderr_contains(
+            "[WARNING] spurious network error \
+             (2 tries remaining): [..]",
+        ).with_stderr_contains("[WARNING] spurious network error (1 tries remaining): [..]")
+        .run();
+}
diff --git a/tests/testsuite/new.rs b/tests/testsuite/new.rs
new file mode 100644 (file)
index 0000000..1cdabf7
--- /dev/null
@@ -0,0 +1,493 @@
+use std::env;
+use std::fs::{self, File};
+use std::io::prelude::*;
+
+use support::paths;
+use support::{cargo_process, git_process};
+
+fn create_empty_gitconfig() {
+    // This helps on Windows where libgit2 is very aggressive in attempting to
+    // find a git config file.
+    let gitconfig = paths::home().join(".gitconfig");
+    File::create(gitconfig).unwrap();
+}
+
+#[test]
+fn simple_lib() {
+    cargo_process("new --lib foo --vcs none --edition 2015")
+        .env("USER", "foo")
+        .with_stderr("[CREATED] library `foo` package")
+        .run();
+
+    assert!(paths::root().join("foo").is_dir());
+    assert!(paths::root().join("foo/Cargo.toml").is_file());
+    assert!(paths::root().join("foo/src/lib.rs").is_file());
+    assert!(!paths::root().join("foo/.gitignore").is_file());
+
+    let lib = paths::root().join("foo/src/lib.rs");
+    let mut contents = String::new();
+    File::open(&lib)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert_eq!(
+        contents,
+        r#"#[cfg(test)]
+mod tests {
+    #[test]
+    fn it_works() {
+        assert_eq!(2 + 2, 4);
+    }
+}
+"#
+    );
+
+    cargo_process("build").cwd(&paths::root().join("foo")).run();
+}
+
+#[test]
+fn simple_bin() {
+    cargo_process("new --bin foo --edition 2015")
+        .env("USER", "foo")
+        .with_stderr("[CREATED] binary (application) `foo` package")
+        .run();
+
+    assert!(paths::root().join("foo").is_dir());
+    assert!(paths::root().join("foo/Cargo.toml").is_file());
+    assert!(paths::root().join("foo/src/main.rs").is_file());
+
+    cargo_process("build").cwd(&paths::root().join("foo")).run();
+    assert!(
+        paths::root()
+            .join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX))
+            .is_file()
+    );
+}
+
+#[test]
+fn both_lib_and_bin() {
+    cargo_process("new --lib --bin foo")
+        .env("USER", "foo")
+        .with_status(101)
+        .with_stderr("[ERROR] can't specify both lib and binary outputs")
+        .run();
+}
+
+#[test]
+fn simple_git() {
+    cargo_process("new --lib foo --edition 2015").env("USER", "foo").run();
+
+    assert!(paths::root().is_dir());
+    assert!(paths::root().join("foo/Cargo.toml").is_file());
+    assert!(paths::root().join("foo/src/lib.rs").is_file());
+    assert!(paths::root().join("foo/.git").is_dir());
+    assert!(paths::root().join("foo/.gitignore").is_file());
+
+    cargo_process("build").cwd(&paths::root().join("foo")).run();
+}
+
+#[test]
+fn no_argument() {
+    cargo_process("new")
+        .with_status(1)
+        .with_stderr_contains(
+            "\
+error: The following required arguments were not provided:
+    <path>
+",
+        ).run();
+}
+
+#[test]
+fn existing() {
+    let dst = paths::root().join("foo");
+    fs::create_dir(&dst).unwrap();
+    cargo_process("new foo")
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] destination `[CWD]/foo` already exists\n\n\
+             Use `cargo init` to initialize the directory",
+        ).run();
+}
+
+#[test]
+fn invalid_characters() {
+    cargo_process("new foo.rs")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] Invalid character `.` in crate name: `foo.rs`
+use --name to override crate name",
+        ).run();
+}
+
+#[test]
+fn reserved_name() {
+    cargo_process("new test")
+        .with_status(101)
+        .with_stderr(
+            "\
+             [ERROR] The name `test` cannot be used as a crate name\n\
+             use --name to override crate name",
+        ).run();
+}
+
+#[test]
+fn reserved_binary_name() {
+    cargo_process("new --bin incremental")
+        .with_status(101)
+        .with_stderr(
+            "\
+             [ERROR] The name `incremental` cannot be used as a crate name\n\
+             use --name to override crate name",
+        ).run();
+}
+
+#[test]
+fn keyword_name() {
+    cargo_process("new pub")
+        .with_status(101)
+        .with_stderr(
+            "\
+             [ERROR] The name `pub` cannot be used as a crate name\n\
+             use --name to override crate name",
+        ).run();
+}
+
+#[test]
+fn finds_author_user() {
+    create_empty_gitconfig();
+    cargo_process("new foo").env("USER", "foo").run();
+
+    let toml = paths::root().join("foo/Cargo.toml");
+    let mut contents = String::new();
+    File::open(&toml)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"authors = ["foo"]"#));
+}
+
+#[test]
+fn finds_author_user_escaped() {
+    create_empty_gitconfig();
+    cargo_process("new foo").env("USER", "foo \"bar\"").run();
+
+    let toml = paths::root().join("foo/Cargo.toml");
+    let mut contents = String::new();
+    File::open(&toml)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"authors = ["foo \"bar\""]"#));
+}
+
+#[test]
+fn finds_author_username() {
+    create_empty_gitconfig();
+    cargo_process("new foo")
+        .env_remove("USER")
+        .env("USERNAME", "foo")
+        .run();
+
+    let toml = paths::root().join("foo/Cargo.toml");
+    let mut contents = String::new();
+    File::open(&toml)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"authors = ["foo"]"#));
+}
+
+#[test]
+fn finds_author_priority() {
+    cargo_process("new foo")
+        .env("USER", "bar2")
+        .env("EMAIL", "baz2")
+        .env("CARGO_NAME", "bar")
+        .env("CARGO_EMAIL", "baz")
+        .run();
+
+    let toml = paths::root().join("foo/Cargo.toml");
+    let mut contents = String::new();
+    File::open(&toml)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"authors = ["bar <baz>"]"#));
+}
+
+#[test]
+fn finds_author_email() {
+    create_empty_gitconfig();
+    cargo_process("new foo")
+        .env("USER", "bar")
+        .env("EMAIL", "baz")
+        .run();
+
+    let toml = paths::root().join("foo/Cargo.toml");
+    let mut contents = String::new();
+    File::open(&toml)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"authors = ["bar <baz>"]"#));
+}
+
+#[test]
+fn finds_author_git() {
+    git_process("config --global user.name bar").exec().unwrap();
+    git_process("config --global user.email baz")
+        .exec()
+        .unwrap();
+    cargo_process("new foo").env("USER", "foo").run();
+
+    let toml = paths::root().join("foo/Cargo.toml");
+    let mut contents = String::new();
+    File::open(&toml)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"authors = ["bar <baz>"]"#));
+}
+
+#[test]
+fn finds_local_author_git() {
+    git_process("init").exec().unwrap();
+    git_process("config --global user.name foo").exec().unwrap();
+    git_process("config --global user.email foo@bar")
+        .exec()
+        .unwrap();
+
+    // Set local git user config
+    git_process("config user.name bar").exec().unwrap();
+    git_process("config user.email baz").exec().unwrap();
+    cargo_process("init").env("USER", "foo").run();
+
+    let toml = paths::root().join("Cargo.toml");
+    let mut contents = String::new();
+    File::open(&toml)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"authors = ["bar <baz>"]"#));
+}
+
+#[test]
+fn finds_git_email() {
+    cargo_process("new foo")
+        .env("GIT_AUTHOR_NAME", "foo")
+        .env("GIT_AUTHOR_EMAIL", "gitfoo")
+        .run();
+
+    let toml = paths::root().join("foo/Cargo.toml");
+    let mut contents = String::new();
+    File::open(&toml)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"authors = ["foo <gitfoo>"]"#), contents);
+}
+
+#[test]
+fn finds_git_author() {
+    create_empty_gitconfig();
+    cargo_process("new foo")
+        .env_remove("USER")
+        .env("GIT_COMMITTER_NAME", "gitfoo")
+        .run();
+
+    let toml = paths::root().join("foo/Cargo.toml");
+    let mut contents = String::new();
+    File::open(&toml)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"authors = ["gitfoo"]"#));
+}
+
+#[test]
+fn author_prefers_cargo() {
+    git_process("config --global user.name foo").exec().unwrap();
+    git_process("config --global user.email bar")
+        .exec()
+        .unwrap();
+    let root = paths::root();
+    fs::create_dir(&root.join(".cargo")).unwrap();
+    File::create(&root.join(".cargo/config"))
+        .unwrap()
+        .write_all(
+            br#"
+        [cargo-new]
+        name = "new-foo"
+        email = "new-bar"
+        vcs = "none"
+    "#,
+        ).unwrap();
+
+    cargo_process("new foo").env("USER", "foo").run();
+
+    let toml = paths::root().join("foo/Cargo.toml");
+    let mut contents = String::new();
+    File::open(&toml)
+        .unwrap()
+        .read_to_string(&mut contents)
+        .unwrap();
+    assert!(contents.contains(r#"authors = ["new-foo <new-bar>"]"#));
+    assert!(!root.join("foo/.gitignore").exists());
+}
+
+#[test]
+fn git_prefers_command_line() {
+    let root = paths::root();
+    fs::create_dir(&root.join(".cargo")).unwrap();
+    File::create(&root.join(".cargo/config"))
+        .unwrap()
+        .write_all(
+            br#"
+        [cargo-new]
+        vcs = "none"
+        name = "foo"
+        email = "bar"
+    "#,
+        ).unwrap();
+
+    cargo_process("new foo --vcs git").env("USER", "foo").run();
+    assert!(paths::root().join("foo/.gitignore").exists());
+}
+
+#[test]
+fn subpackage_no_git() {
+    cargo_process("new foo").env("USER", "foo").run();
+
+    assert!(paths::root().join("foo/.git").is_dir());
+    assert!(paths::root().join("foo/.gitignore").is_file());
+
+    let subpackage = paths::root().join("foo").join("components");
+    fs::create_dir(&subpackage).unwrap();
+    cargo_process("new foo/components/subcomponent")
+        .env("USER", "foo")
+        .run();
+
+    assert!(
+        !paths::root()
+            .join("foo/components/subcomponent/.git")
+            .is_file()
+    );
+    assert!(
+        !paths::root()
+            .join("foo/components/subcomponent/.gitignore")
+            .is_file()
+    );
+}
+
+#[test]
+fn subpackage_git_with_gitignore() {
+    cargo_process("new foo").env("USER", "foo").run();
+
+    assert!(paths::root().join("foo/.git").is_dir());
+    assert!(paths::root().join("foo/.gitignore").is_file());
+
+    let gitignore = paths::root().join("foo/.gitignore");
+    fs::write(gitignore, b"components").unwrap();
+
+    let subpackage = paths::root().join("foo/components");
+    fs::create_dir(&subpackage).unwrap();
+    cargo_process("new foo/components/subcomponent")
+        .env("USER", "foo")
+        .run();
+
+    assert!(
+        paths::root()
+            .join("foo/components/subcomponent/.git")
+            .is_dir()
+    );
+    assert!(
+        paths::root()
+            .join("foo/components/subcomponent/.gitignore")
+            .is_file()
+    );
+}
+
+#[test]
+fn subpackage_git_with_vcs_arg() {
+    cargo_process("new foo").env("USER", "foo").run();
+
+    let subpackage = paths::root().join("foo").join("components");
+    fs::create_dir(&subpackage).unwrap();
+    cargo_process("new foo/components/subcomponent --vcs git")
+        .env("USER", "foo")
+        .run();
+
+    assert!(
+        paths::root()
+            .join("foo/components/subcomponent/.git")
+            .is_dir()
+    );
+    assert!(
+        paths::root()
+            .join("foo/components/subcomponent/.gitignore")
+            .is_file()
+    );
+}
+
+#[test]
+fn unknown_flags() {
+    cargo_process("new foo --flag")
+        .with_status(1)
+        .with_stderr_contains(
+            "error: Found argument '--flag' which wasn't expected, or isn't valid in this context",
+        ).run();
+}
+
+#[test]
+fn explicit_invalid_name_not_suggested() {
+    cargo_process("new --name 10-invalid a")
+        .with_status(101)
+        .with_stderr("[ERROR] Package names starting with a digit cannot be used as a crate name")
+        .run();
+}
+
+#[test]
+fn explicit_project_name() {
+    cargo_process("new --lib foo --name bar")
+        .env("USER", "foo")
+        .with_stderr("[CREATED] library `bar` package")
+        .run();
+}
+
+#[test]
+fn new_with_edition_2015() {
+    cargo_process("new --edition 2015 foo")
+        .env("USER", "foo")
+        .run();
+    let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap();
+    assert!(manifest.contains("edition = \"2015\""));
+}
+
+#[test]
+fn new_with_edition_2018() {
+    cargo_process("new --edition 2018 foo")
+        .env("USER", "foo")
+        .run();
+    let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap();
+    assert!(manifest.contains("edition = \"2018\""));
+}
+
+#[test]
+fn new_default_edition() {
+    cargo_process("new foo")
+        .env("USER", "foo")
+        .run();
+    let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap();
+    assert!(manifest.contains("edition = \"2018\""));
+}
+
+#[test]
+fn new_with_bad_edition() {
+    cargo_process("new --edition something_else foo")
+        .env("USER", "foo")
+        .with_stderr_contains("error: 'something_else' isn't a valid value[..]")
+        .with_status(1)
+        .run();
+}
diff --git a/tests/testsuite/out_dir.rs b/tests/testsuite/out_dir.rs
new file mode 100644 (file)
index 0000000..2700e52
--- /dev/null
@@ -0,0 +1,234 @@
+use std::env;
+use std::fs::{self, File};
+use std::path::Path;
+
+use support::sleep_ms;
+use support::{basic_manifest, project};
+
+#[test]
+fn binary_with_debug() {
+    let p = project()
+        .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#)
+        .build();
+
+    p.cargo("build -Z unstable-options --out-dir out")
+        .masquerade_as_nightly_cargo()
+        .run();
+    check_dir_contents(
+        &p.root().join("out"),
+        &["foo"],
+        &["foo", "foo.dSYM"],
+        &["foo.exe", "foo.pdb"],
+    );
+}
+
+#[test]
+fn static_library_with_debug() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            crate-type = ["staticlib"]
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #[no_mangle]
+            pub extern "C" fn foo() { println!("Hello, World!") }
+        "#,
+        ).build();
+
+    p.cargo("build -Z unstable-options --out-dir out")
+        .masquerade_as_nightly_cargo()
+        .run();
+    check_dir_contents(
+        &p.root().join("out"),
+        &["libfoo.a"],
+        &["libfoo.a"],
+        &["foo.lib"],
+    );
+}
+
+#[test]
+fn dynamic_library_with_debug() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            crate-type = ["cdylib"]
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #[no_mangle]
+            pub extern "C" fn foo() { println!("Hello, World!") }
+        "#,
+        ).build();
+
+    p.cargo("build -Z unstable-options --out-dir out")
+        .masquerade_as_nightly_cargo()
+        .run();
+    check_dir_contents(
+        &p.root().join("out"),
+        &["libfoo.so"],
+        &["libfoo.dylib"],
+        &["foo.dll", "foo.dll.lib"],
+    );
+}
+
+#[test]
+fn rlib_with_debug() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            crate-type = ["rlib"]
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            pub fn foo() { println!("Hello, World!") }
+        "#,
+        ).build();
+
+    p.cargo("build -Z unstable-options --out-dir out")
+        .masquerade_as_nightly_cargo()
+        .run();
+    check_dir_contents(
+        &p.root().join("out"),
+        &["libfoo.rlib"],
+        &["libfoo.rlib"],
+        &["libfoo.rlib"],
+    );
+}
+
+#[test]
+fn include_only_the_binary_from_the_current_package() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [workspace]
+
+            [dependencies]
+            utils = { path = "./utils" }
+        "#,
+        ).file("src/lib.rs", "extern crate utils;")
+        .file(
+            "src/main.rs",
+            r#"
+            extern crate foo;
+            extern crate utils;
+            fn main() {
+                println!("Hello, World!")
+            }
+        "#,
+        ).file("utils/Cargo.toml", &basic_manifest("utils", "0.0.1"))
+        .file("utils/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -Z unstable-options --bin foo --out-dir out")
+        .masquerade_as_nightly_cargo()
+        .run();
+    check_dir_contents(
+        &p.root().join("out"),
+        &["foo"],
+        &["foo", "foo.dSYM"],
+        &["foo.exe", "foo.pdb"],
+    );
+}
+
+#[test]
+fn out_dir_is_a_file() {
+    let p = project()
+        .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#)
+        .build();
+    File::create(p.root().join("out")).unwrap();
+
+    p.cargo("build -Z unstable-options --out-dir out")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains("[ERROR] failed to link or copy [..]")
+        .run();
+}
+
+#[test]
+fn replaces_artifacts() {
+    let p = project()
+        .file("src/main.rs", r#"fn main() { println!("foo") }"#)
+        .build();
+
+    p.cargo("build -Z unstable-options --out-dir out")
+        .masquerade_as_nightly_cargo()
+        .run();
+    p.process(
+        &p.root()
+            .join(&format!("out/foo{}", env::consts::EXE_SUFFIX)),
+    ).with_stdout("foo")
+    .run();
+
+    sleep_ms(1000);
+    p.change_file("src/main.rs", r#"fn main() { println!("bar") }"#);
+
+    p.cargo("build -Z unstable-options --out-dir out")
+        .masquerade_as_nightly_cargo()
+        .run();
+    p.process(
+        &p.root()
+            .join(&format!("out/foo{}", env::consts::EXE_SUFFIX)),
+    ).with_stdout("bar")
+    .run();
+}
+
+fn check_dir_contents(
+    out_dir: &Path,
+    expected_linux: &[&str],
+    expected_mac: &[&str],
+    expected_win: &[&str],
+) {
+    let expected = if cfg!(target_os = "windows") {
+        expected_win
+    } else if cfg!(target_os = "macos") {
+        expected_mac
+    } else {
+        expected_linux
+    };
+
+    let actual = list_dir(out_dir);
+    let mut expected = expected.iter().map(|s| s.to_string()).collect::<Vec<_>>();
+    expected.sort_unstable();
+    assert_eq!(actual, expected);
+}
+
+fn list_dir(dir: &Path) -> Vec<String> {
+    let mut res = Vec::new();
+    for entry in fs::read_dir(dir).unwrap() {
+        let entry = entry.unwrap();
+        res.push(entry.file_name().into_string().unwrap());
+    }
+    res.sort_unstable();
+    res
+}
diff --git a/tests/testsuite/overrides.rs b/tests/testsuite/overrides.rs
new file mode 100644 (file)
index 0000000..bb5e88f
--- /dev/null
@@ -0,0 +1,1335 @@
+use support::git;
+use support::paths;
+use support::registry::Package;
+use support::{basic_manifest, project};
+
+#[test]
+fn override_simple() {
+    Package::new("bar", "0.1.0").publish();
+
+    let bar = git::repo(&paths::root().join("override"))
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [replace]
+            "bar:0.1.0" = {{ git = '{}' }}
+        "#,
+                bar.url()
+            ),
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[UPDATING] git repository `[..]`
+[COMPILING] bar v0.1.0 (file://[..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn missing_version() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [replace]
+            bar = { git = 'https://example.com' }
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  replacements must specify a version to replace, but `[..]bar` does not
+",
+        ).run();
+}
+
+#[test]
+fn invalid_semver_version() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+
+            [replace]
+            "bar:*" = { git = 'https://example.com' }
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  replacements must specify a valid semver version to replace, but `bar:*` does not
+",
+        ).run();
+}
+
+#[test]
+fn different_version() {
+    Package::new("bar", "0.2.0").publish();
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [replace]
+            "bar:0.1.0" = "0.2.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  replacements cannot specify a version requirement, but found one for [..]
+",
+        ).run();
+}
+
+#[test]
+fn transitive() {
+    Package::new("bar", "0.1.0").publish();
+    Package::new("baz", "0.2.0")
+        .dep("bar", "0.1.0")
+        .file("src/lib.rs", "extern crate bar; fn baz() { bar::bar(); }")
+        .publish();
+
+    let foo = git::repo(&paths::root().join("override"))
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            baz = "0.2.0"
+
+            [replace]
+            "bar:0.1.0" = {{ git = '{}' }}
+        "#,
+                foo.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[UPDATING] git repository `[..]`
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.2.0 (registry [..])
+[COMPILING] bar v0.1.0 (file://[..])
+[COMPILING] baz v0.2.0
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn persists_across_rebuilds() {
+    Package::new("bar", "0.1.0").publish();
+
+    let foo = git::repo(&paths::root().join("override"))
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [replace]
+            "bar:0.1.0" = {{ git = '{}' }}
+        "#,
+                foo.url()
+            ),
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[UPDATING] git repository `file://[..]`
+[COMPILING] bar v0.1.0 (file://[..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn replace_registry_with_path() {
+    Package::new("bar", "0.1.0").publish();
+
+    let _ = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [replace]
+            "bar:0.1.0" = { path = "../bar" }
+        "#,
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[COMPILING] bar v0.1.0 ([ROOT][..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn use_a_spec_to_select() {
+    Package::new("baz", "0.1.1")
+        .file("src/lib.rs", "pub fn baz1() {}")
+        .publish();
+    Package::new("baz", "0.2.0").publish();
+    Package::new("bar", "0.1.1")
+        .dep("baz", "0.2")
+        .file(
+            "src/lib.rs",
+            "extern crate baz; pub fn bar() { baz::baz3(); }",
+        ).publish();
+
+    let foo = git::repo(&paths::root().join("override"))
+        .file("Cargo.toml", &basic_manifest("baz", "0.2.0"))
+        .file("src/lib.rs", "pub fn baz3() {}")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+            baz = "0.1"
+
+            [replace]
+            "baz:0.2.0" = {{ git = '{}' }}
+        "#,
+                foo.url()
+            ),
+        ).file(
+            "src/lib.rs",
+            "
+            extern crate bar;
+            extern crate baz;
+
+            pub fn local() {
+                baz::baz1();
+                bar::bar();
+            }
+        ",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[UPDATING] git repository `[..]`
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[DOWNLOADED] [..]
+[COMPILING] [..]
+[COMPILING] [..]
+[COMPILING] [..]
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn override_adds_some_deps() {
+    Package::new("baz", "0.1.1").publish();
+    Package::new("bar", "0.1.0").publish();
+
+    let foo = git::repo(&paths::root().join("override"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            baz = "0.1"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+
+            [replace]
+            "bar:0.1.0" = {{ git = '{}' }}
+        "#,
+                foo.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[UPDATING] git repository `[..]`
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.1.1 (registry [..])
+[COMPILING] baz v0.1.1
+[COMPILING] bar v0.1.0 ([..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build").with_stdout("").run();
+
+    Package::new("baz", "0.1.2").publish();
+    p.cargo("update -p")
+        .arg(&format!("{}#bar", foo.url()))
+        .with_stderr(
+            "\
+[UPDATING] git repository `file://[..]`
+[UPDATING] `[ROOT][..]` index
+",
+        ).run();
+    p.cargo("update -p https://github.com/rust-lang/crates.io-index#bar")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+",
+        ).run();
+
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn locked_means_locked_yes_no_seriously_i_mean_locked() {
+    // this in theory exercises #2041
+    Package::new("baz", "0.1.0").publish();
+    Package::new("baz", "0.2.0").publish();
+    Package::new("bar", "0.1.0").publish();
+
+    let foo = git::repo(&paths::root().join("override"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            baz = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+            baz = "0.1"
+
+            [replace]
+            "bar:0.1.0" = {{ git = '{}' }}
+        "#,
+                foo.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    p.cargo("build").with_stdout("").run();
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn override_wrong_name() {
+    Package::new("baz", "0.1.0").publish();
+
+    let foo = git::repo(&paths::root().join("override"))
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            baz = "0.1"
+
+            [replace]
+            "baz:0.1.0" = {{ git = '{}' }}
+        "#,
+                foo.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+[UPDATING] git repository [..]
+error: no matching package for override `[..]baz:0.1.0` found
+location searched: file://[..]
+version required: = 0.1.0
+",
+        ).run();
+}
+
+#[test]
+fn override_with_nothing() {
+    Package::new("bar", "0.1.0").publish();
+
+    let foo = git::repo(&paths::root().join("override"))
+        .file("src/lib.rs", "")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+
+            [replace]
+            "bar:0.1.0" = {{ git = '{}' }}
+        "#,
+                foo.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+[UPDATING] git repository [..]
+[ERROR] failed to load source for a dependency on `bar`
+
+Caused by:
+  Unable to update file://[..]
+
+Caused by:
+  Could not find Cargo.toml in `[..]`
+",
+        ).run();
+}
+
+#[test]
+fn override_wrong_version() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [replace]
+            "bar:0.1.0" = { git = 'https://example.com', version = '0.2.0' }
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  replacements cannot specify a version requirement, but found one for `[..]bar:0.1.0`
+",
+        ).run();
+}
+
+#[test]
+fn multiple_specs() {
+    Package::new("bar", "0.1.0").publish();
+
+    let bar = git::repo(&paths::root().join("override"))
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [replace]
+            "bar:0.1.0" = {{ git = '{0}' }}
+
+            [replace."https://github.com/rust-lang/crates.io-index#bar:0.1.0"]
+            git = '{0}'
+        "#,
+                bar.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+[UPDATING] git repository [..]
+error: overlapping replacement specifications found:
+
+  * [..]
+  * [..]
+
+both specifications match: bar v0.1.0
+",
+        ).run();
+}
+
+#[test]
+fn test_override_dep() {
+    Package::new("bar", "0.1.0").publish();
+
+    let bar = git::repo(&paths::root().join("override"))
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [replace]
+            "bar:0.1.0" = {{ git = '{0}' }}
+        "#,
+                bar.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("test -p bar")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: There are multiple `bar` packages in your project, and the [..]
+Please re-run this command with [..]
+  [..]#bar:0.1.0
+  [..]#bar:0.1.0
+",
+        ).run();
+}
+
+#[test]
+fn update() {
+    Package::new("bar", "0.1.0").publish();
+
+    let bar = git::repo(&paths::root().join("override"))
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [replace]
+            "bar:0.1.0" = {{ git = '{0}' }}
+        "#,
+                bar.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("generate-lockfile").run();
+    p.cargo("update")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[UPDATING] git repository `[..]`
+",
+        ).run();
+}
+
+// foo -> near -> far
+// near is overridden with itself
+#[test]
+fn no_override_self() {
+    let deps = git::repo(&paths::root().join("override"))
+        .file("far/Cargo.toml", &basic_manifest("far", "0.1.0"))
+        .file("far/src/lib.rs", "")
+        .file(
+            "near/Cargo.toml",
+            r#"
+            [package]
+            name = "near"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            far = { path = "../far" }
+        "#,
+        ).file("near/src/lib.rs", "#![no_std] pub extern crate far;")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            near = {{ git = '{0}' }}
+
+            [replace]
+            "near:0.1.0" = {{ git = '{0}' }}
+        "#,
+                deps.url()
+            ),
+        ).file("src/lib.rs", "#![no_std] pub extern crate near;")
+        .build();
+
+    p.cargo("build --verbose").run();
+}
+
+#[test]
+fn broken_path_override_warns() {
+    Package::new("bar", "0.1.0").publish();
+    Package::new("bar", "0.2.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a1" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "a1/Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+        "#,
+        ).file("a1/src/lib.rs", "")
+        .file(
+            "a2/Cargo.toml",
+            r#"
+            [package]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.2"
+        "#,
+        ).file("a2/src/lib.rs", "")
+        .file(".cargo/config", r#"paths = ["a2"]"#)
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] [..]
+warning: path override for crate `a` has altered the original list of
+dependencies; the dependency on `bar` was either added or
+modified to not match the previously resolved version
+
+This is currently allowed but is known to produce buggy behavior with spurious
+recompiles and changes to the crate graph. Path overrides unfortunately were
+never intended to support this feature, so for now this message is just a
+warning. In the future, however, this message will become a hard error.
+
+To change the dependency graph via an override it's recommended to use the
+`[replace]` feature of Cargo instead of the path override feature. This is
+documented online at the url below for more information.
+
+https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#overriding-dependencies
+
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[COMPILING] [..]
+[COMPILING] [..]
+[COMPILING] [..]
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn override_an_override() {
+    Package::new("chrono", "0.2.0")
+        .dep("serde", "< 0.9")
+        .publish();
+    Package::new("serde", "0.7.0")
+        .file("src/lib.rs", "pub fn serde07() {}")
+        .publish();
+    Package::new("serde", "0.8.0")
+        .file("src/lib.rs", "pub fn serde08() {}")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            chrono = "0.2"
+            serde = "0.8"
+
+            [replace]
+            "chrono:0.2.0" = { path = "chrono" }
+            "serde:0.8.0" = { path = "serde" }
+        "#,
+        ).file(
+            "Cargo.lock",
+            r#"
+            [[package]]
+            name = "foo"
+            version = "0.0.1"
+            dependencies = [
+             "chrono 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+             "serde 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
+            ]
+
+            [[package]]
+            name = "chrono"
+            version = "0.2.0"
+            source = "registry+https://github.com/rust-lang/crates.io-index"
+            replace = "chrono 0.2.0"
+
+            [[package]]
+            name = "chrono"
+            version = "0.2.0"
+            dependencies = [
+             "serde 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+            ]
+
+            [[package]]
+            name = "serde"
+            version = "0.7.0"
+            source = "registry+https://github.com/rust-lang/crates.io-index"
+
+            [[package]]
+            name = "serde"
+            version = "0.8.0"
+            source = "registry+https://github.com/rust-lang/crates.io-index"
+            replace = "serde 0.8.0"
+
+            [[package]]
+            name = "serde"
+            version = "0.8.0"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "
+            extern crate chrono;
+            extern crate serde;
+
+            pub fn foo() {
+                chrono::chrono();
+                serde::serde08_override();
+            }
+        ",
+        ).file(
+            "chrono/Cargo.toml",
+            r#"
+            [package]
+            name = "chrono"
+            version = "0.2.0"
+            authors = []
+
+            [dependencies]
+            serde = "< 0.9"
+        "#,
+        ).file(
+            "chrono/src/lib.rs",
+            "
+            extern crate serde;
+            pub fn chrono() {
+                serde::serde07();
+            }
+        ",
+        ).file("serde/Cargo.toml", &basic_manifest("serde", "0.8.0"))
+        .file("serde/src/lib.rs", "pub fn serde08_override() {}")
+        .build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn overriding_nonexistent_no_spurious() {
+    Package::new("bar", "0.1.0").dep("baz", "0.1").publish();
+    Package::new("baz", "0.1.0").publish();
+
+    let bar = git::repo(&paths::root().join("override"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            baz = { path = "baz" }
+        "#,
+        ).file("src/lib.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [replace]
+            "bar:0.1.0" = {{ git = '{url}' }}
+            "baz:0.1.0" = {{ git = '{url}' }}
+        "#,
+                url = bar.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("build")
+        .with_stderr(
+            "\
+[WARNING] package replacement is not used: [..]baz:0.1.0
+[FINISHED] [..]
+",
+        ).with_stdout("")
+        .run();
+}
+
+#[test]
+fn no_warnings_when_replace_is_used_in_another_workspace_member() {
+    Package::new("bar", "0.1.0").publish();
+    Package::new("baz", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = [ "first_crate", "second_crate"]
+
+            [replace]
+            "bar:0.1.0" = { path = "local_bar" }"#,
+        ).file(
+            "first_crate/Cargo.toml",
+            r#"
+            [package]
+            name = "first_crate"
+            version = "0.1.0"
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("first_crate/src/lib.rs", "")
+        .file(
+            "second_crate/Cargo.toml",
+            &basic_manifest("second_crate", "0.1.0"),
+        ).file("second_crate/src/lib.rs", "")
+        .file("local_bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("local_bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .cwd(p.root().join("first_crate"))
+        .with_stdout("")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[COMPILING] bar v0.1.0 ([..])
+[COMPILING] first_crate v0.1.0 ([..])
+[FINISHED] [..]",
+        ).run();
+
+    p.cargo("build")
+        .cwd(p.root().join("second_crate"))
+        .with_stdout("")
+        .with_stderr(
+            "\
+[COMPILING] second_crate v0.1.0 ([..])
+[FINISHED] [..]",
+        ).run();
+}
+
+#[test]
+fn override_to_path_dep() {
+    Package::new("bar", "0.1.0").dep("baz", "0.1").publish();
+    Package::new("baz", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            baz = { path = "baz" }
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+        .file("bar/baz/src/lib.rs", "")
+        .file(".cargo/config", r#"paths = ["bar"]"#)
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn replace_to_path_dep() {
+    Package::new("bar", "0.1.0").dep("baz", "0.1").publish();
+    Package::new("baz", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [replace]
+            "bar:0.1.0" = { path = "bar" }
+        "#,
+        ).file("src/lib.rs", "extern crate bar;")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            baz = { path = "baz" }
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            "extern crate baz; pub fn bar() { baz::baz(); }",
+        ).file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("bar/baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn paths_ok_with_optional() {
+    Package::new("baz", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            baz = { version = "0.1", optional = true }
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .file(
+            "bar2/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            baz = { version = "0.1", optional = true }
+        "#,
+        ).file("bar2/src/lib.rs", "")
+        .file(".cargo/config", r#"paths = ["bar2"]"#)
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.1.0 ([..]bar2)
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn paths_add_optional_bad() {
+    Package::new("baz", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "")
+        .file(
+            "bar2/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            baz = { version = "0.1", optional = true }
+        "#,
+        ).file("bar2/src/lib.rs", "")
+        .file(".cargo/config", r#"paths = ["bar2"]"#)
+        .build();
+
+    p.cargo("build")
+        .with_stderr_contains(
+            "\
+warning: path override for crate `bar` has altered the original list of
+dependencies; the dependency on `baz` was either added or\
+",
+        ).run();
+}
+
+#[test]
+fn override_with_default_feature() {
+    Package::new("another", "0.1.0").publish();
+    Package::new("another", "0.1.1").dep("bar", "0.1").publish();
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar", default-features = false }
+            another = "0.1"
+            another2 = { path = "another2" }
+
+            [replace]
+            'bar:0.1.0' = { path = "bar" }
+        "#,
+        ).file("src/main.rs", "extern crate bar; fn main() { bar::bar(); }")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [features]
+            default = []
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"
+            #[cfg(feature = "default")]
+            pub fn bar() {}
+        "#,
+        ).file(
+            "another2/Cargo.toml",
+            r#"
+            [package]
+            name = "another2"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { version = "0.1", default-features = false }
+        "#,
+        ).file("another2/src/lib.rs", "")
+        .build();
+
+    p.cargo("run").run();
+}
+
+#[test]
+fn override_plus_dep() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+
+            [replace]
+            'bar:0.1.0' = { path = "bar" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foo = { path = ".." }
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains("error: cyclic package dependency: [..]")
+        .run();
+}
diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs
new file mode 100644 (file)
index 0000000..b9c1cae
--- /dev/null
@@ -0,0 +1,1230 @@
+use std;
+use std::fs::File;
+use std::io::prelude::*;
+use std::path::{Path, PathBuf};
+
+use flate2::read::GzDecoder;
+use git2;
+use support::registry::Package;
+use support::{basic_manifest, git, is_nightly, path2url, paths, project, registry};
+use support::{cargo_process, sleep_ms};
+use tar::Archive;
+
+#[test]
+fn simple() {
+    let p = project()
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            exclude = ["*.txt"]
+            license = "MIT"
+            description = "foo"
+        "#)
+        .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .file("src/bar.txt", "") // should be ignored when packaging
+        .build();
+
+    p.cargo("package")
+        .with_stderr(
+            "\
+[WARNING] manifest has no documentation[..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+    p.cargo("package -l")
+        .with_stdout(
+            "\
+Cargo.toml
+src/main.rs
+",
+        ).run();
+    p.cargo("package").with_stdout("").run();
+
+    let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+    let mut rdr = GzDecoder::new(f);
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    for f in ar.entries().unwrap() {
+        let f = f.unwrap();
+        let fname = f.header().path_bytes();
+        let fname = &*fname;
+        assert!(
+            fname == b"foo-0.0.1/Cargo.toml"
+                || fname == b"foo-0.0.1/Cargo.toml.orig"
+                || fname == b"foo-0.0.1/src/main.rs",
+            "unexpected filename: {:?}",
+            f.header().path()
+        )
+    }
+}
+
+#[test]
+fn metadata_warning() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+    p.cargo("package")
+        .with_stderr(
+            "\
+warning: manifest has no description, license, license-file, documentation, \
+homepage or repository.
+See http://doc.crates.io/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+    p.cargo("package")
+        .with_stderr(
+            "\
+warning: manifest has no description, documentation, homepage or repository.
+See http://doc.crates.io/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            repository = "bar"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+    p.cargo("package")
+        .with_stderr(
+            "\
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn package_verbose() {
+    let root = paths::root().join("all");
+    let repo = git::repo(&root)
+        .file("Cargo.toml", &basic_manifest("foo", "0.0.1"))
+        .file("src/main.rs", "fn main() {}")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "")
+        .build();
+    cargo_process("build").cwd(repo.root()).run();
+
+    println!("package main repo");
+    cargo_process("package -v --no-verify")
+        .cwd(repo.root())
+        .with_stderr(
+            "\
+[WARNING] manifest has no description[..]
+See http://doc.crates.io/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([..])
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] .cargo_vcs_info.json
+",
+        ).run();
+
+    let f = File::open(&repo.root().join("target/package/foo-0.0.1.crate")).unwrap();
+    let mut rdr = GzDecoder::new(f);
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    let mut entry = ar
+        .entries()
+        .unwrap()
+        .map(|f| f.unwrap())
+        .find(|e| e.path().unwrap().ends_with(".cargo_vcs_info.json"))
+        .unwrap();
+    let mut contents = String::new();
+    entry.read_to_string(&mut contents).unwrap();
+    assert_eq!(
+        &contents[..],
+        &*format!(
+            r#"{{
+  "git": {{
+    "sha1": "{}"
+  }}
+}}
+"#,
+            repo.revparse_head()
+        )
+    );
+
+    println!("package sub-repo");
+    cargo_process("package -v --no-verify")
+        .cwd(repo.root().join("a"))
+        .with_stderr(
+            "\
+[WARNING] manifest has no description[..]
+See http://doc.crates.io/manifest.html#package-metadata for more info.
+[PACKAGING] a v0.0.1 ([..])
+[ARCHIVING] Cargo.toml
+[ARCHIVING] src/lib.rs
+[ARCHIVING] .cargo_vcs_info.json
+",
+        ).run();
+}
+
+#[test]
+fn package_verification() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+    p.cargo("build").run();
+    p.cargo("package")
+        .with_stderr(
+            "\
+[WARNING] manifest has no description[..]
+See http://doc.crates.io/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn vcs_file_collision() {
+    let p = project().build();
+    let _ = git::repo(&paths::root().join("foo"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            description = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            documentation = "foo"
+            homepage = "foo"
+            repository = "foo"
+            exclude = ["*.no-existe"]
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            fn main() {}
+        "#,
+        ).file(".cargo_vcs_info.json", "foo")
+        .build();
+    p.cargo("package")
+        .arg("--no-verify")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] Invalid inclusion of reserved file name .cargo_vcs_info.json \
+in package source
+",
+        ).run();
+}
+
+#[test]
+fn path_dependency_no_version() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("package")
+        .with_status(101)
+        .with_stderr(
+            "\
+[WARNING] manifest has no documentation, homepage or repository.
+See http://doc.crates.io/manifest.html#package-metadata for more info.
+[ERROR] all path dependencies must have a version specified when packaging.
+dependency `bar` does not specify a version.
+",
+        ).run();
+}
+
+#[test]
+fn exclude() {
+    let p = project()
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            exclude = [
+                "*.txt",
+                # file in root
+                "file_root_1",       # NO_CHANGE (ignored)
+                "/file_root_2",      # CHANGING (packaged -> ignored)
+                "file_root_3/",      # NO_CHANGE (packaged)
+                "file_root_4/*",     # NO_CHANGE (packaged)
+                "file_root_5/**",    # NO_CHANGE (packaged)
+                # file in sub-dir
+                "file_deep_1",       # CHANGING (packaged -> ignored)
+                "/file_deep_2",      # NO_CHANGE (packaged)
+                "file_deep_3/",      # NO_CHANGE (packaged)
+                "file_deep_4/*",     # NO_CHANGE (packaged)
+                "file_deep_5/**",    # NO_CHANGE (packaged)
+                # dir in root
+                "dir_root_1",        # CHANGING (packaged -> ignored)
+                "/dir_root_2",       # CHANGING (packaged -> ignored)
+                "dir_root_3/",       # CHANGING (packaged -> ignored)
+                "dir_root_4/*",      # NO_CHANGE (ignored)
+                "dir_root_5/**",     # NO_CHANGE (ignored)
+                # dir in sub-dir
+                "dir_deep_1",        # CHANGING (packaged -> ignored)
+                "/dir_deep_2",       # NO_CHANGE
+                "dir_deep_3/",       # CHANGING (packaged -> ignored)
+                "dir_deep_4/*",      # CHANGING (packaged -> ignored)
+                "dir_deep_5/**",     # CHANGING (packaged -> ignored)
+            ]
+        "#)
+        .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .file("bar.txt", "")
+        .file("src/bar.txt", "")
+        // file in root
+        .file("file_root_1", "")
+        .file("file_root_2", "")
+        .file("file_root_3", "")
+        .file("file_root_4", "")
+        .file("file_root_5", "")
+        // file in sub-dir
+        .file("some_dir/file_deep_1", "")
+        .file("some_dir/file_deep_2", "")
+        .file("some_dir/file_deep_3", "")
+        .file("some_dir/file_deep_4", "")
+        .file("some_dir/file_deep_5", "")
+        // dir in root
+        .file("dir_root_1/some_dir/file", "")
+        .file("dir_root_2/some_dir/file", "")
+        .file("dir_root_3/some_dir/file", "")
+        .file("dir_root_4/some_dir/file", "")
+        .file("dir_root_5/some_dir/file", "")
+        // dir in sub-dir
+        .file("some_dir/dir_deep_1/some_dir/file", "")
+        .file("some_dir/dir_deep_2/some_dir/file", "")
+        .file("some_dir/dir_deep_3/some_dir/file", "")
+        .file("some_dir/dir_deep_4/some_dir/file", "")
+        .file("some_dir/dir_deep_5/some_dir/file", "")
+        .build();
+
+    p.cargo("package --no-verify -v")
+        .with_stdout("")
+        .with_stderr(
+            "\
+[WARNING] manifest has no description[..]
+See http://doc.crates.io/manifest.html#package-metadata for more info.
+[WARNING] [..] file `dir_root_1/some_dir/file` WILL be excluded [..]
+See [..]
+[WARNING] [..] file `dir_root_2/some_dir/file` WILL be excluded [..]
+See [..]
+[WARNING] [..] file `dir_root_3/some_dir/file` WILL be excluded [..]
+See [..]
+[WARNING] [..] file `some_dir/dir_deep_1/some_dir/file` WILL be excluded [..]
+See [..]
+[WARNING] [..] file `some_dir/dir_deep_3/some_dir/file` WILL be excluded [..]
+See [..]
+[WARNING] [..] file `some_dir/file_deep_1` WILL be excluded [..]
+See [..]
+[WARNING] No (git) Cargo.toml found at `[..]` in workdir `[..]`
+[PACKAGING] foo v0.0.1 ([..])
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+",
+        ).run();
+
+    assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+
+    p.cargo("package -l")
+        .with_stdout(
+            "\
+Cargo.toml
+dir_root_1/some_dir/file
+dir_root_2/some_dir/file
+dir_root_3/some_dir/file
+file_root_3
+file_root_4
+file_root_5
+some_dir/dir_deep_1/some_dir/file
+some_dir/dir_deep_2/some_dir/file
+some_dir/dir_deep_3/some_dir/file
+some_dir/dir_deep_4/some_dir/file
+some_dir/dir_deep_5/some_dir/file
+some_dir/file_deep_1
+some_dir/file_deep_2
+some_dir/file_deep_3
+some_dir/file_deep_4
+some_dir/file_deep_5
+src/main.rs
+",
+        ).run();
+}
+
+#[test]
+fn include() {
+    let p = project()
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            exclude = ["*.txt"]
+            include = ["foo.txt", "**/*.rs", "Cargo.toml"]
+        "#)
+        .file("foo.txt", "")
+        .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .file("src/bar.txt", "") // should be ignored when packaging
+        .build();
+
+    p.cargo("package --no-verify -v")
+        .with_stderr(
+            "\
+[WARNING] manifest has no description[..]
+See http://doc.crates.io/manifest.html#package-metadata for more info.
+[WARNING] No (git) Cargo.toml found at `[..]` in workdir `[..]`
+[PACKAGING] foo v0.0.1 ([..])
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+[ARCHIVING] [..]
+",
+        ).run();
+}
+
+#[test]
+fn package_lib_with_bin() {
+    let p = project()
+        .file("src/main.rs", "extern crate foo; fn main() {}")
+        .file("src/lib.rs", "")
+        .build();
+
+    p.cargo("package -v").run();
+}
+
+#[test]
+fn package_git_submodule() {
+    let project = git::new("foo", |project| {
+        project
+            .file(
+                "Cargo.toml",
+                r#"
+                    [project]
+                    name = "foo"
+                    version = "0.0.1"
+                    authors = ["foo@example.com"]
+                    license = "MIT"
+                    description = "foo"
+                    repository = "foo"
+                "#,
+            ).file("src/lib.rs", "pub fn foo() {}")
+    }).unwrap();
+    let library = git::new("bar", |library| {
+        library.no_manifest().file("Makefile", "all:")
+    }).unwrap();
+
+    let repository = git2::Repository::open(&project.root()).unwrap();
+    let url = path2url(library.root()).to_string();
+    git::add_submodule(&repository, &url, Path::new("bar"));
+    git::commit(&repository);
+
+    let repository = git2::Repository::open(&project.root().join("bar")).unwrap();
+    repository
+        .reset(
+            &repository.revparse_single("HEAD").unwrap(),
+            git2::ResetType::Hard,
+            None,
+        ).unwrap();
+
+    project.cargo("package --no-verify -v")
+        .with_stderr_contains("[ARCHIVING] bar/Makefile")
+        .run();
+}
+
+#[test]
+fn no_duplicates_from_modified_tracked_files() {
+    let root = paths::root().join("all");
+    let p = git::repo(&root)
+        .file("Cargo.toml", &basic_manifest("foo", "0.0.1"))
+        .file("src/main.rs", "fn main() {}")
+        .build();
+    File::create(p.root().join("src/main.rs"))
+        .unwrap()
+        .write_all(br#"fn main() { println!("A change!"); }"#)
+        .unwrap();
+    cargo_process("build").cwd(p.root()).run();
+    cargo_process("package --list --allow-dirty")
+        .cwd(p.root())
+        .with_stdout(
+            "\
+Cargo.toml
+src/main.rs
+",
+        ).run();
+}
+
+#[test]
+fn ignore_nested() {
+    let cargo_toml = r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+        "#;
+    let main_rs = r#"
+            fn main() { println!("hello"); }
+        "#;
+    let p = project()
+        .file("Cargo.toml", cargo_toml)
+        .file("src/main.rs", main_rs)
+        // If a project happens to contain a copy of itself, we should
+        // ignore it.
+        .file("a_dir/foo/Cargo.toml", cargo_toml)
+        .file("a_dir/foo/src/main.rs", main_rs)
+        .build();
+
+    p.cargo("package")
+        .with_stderr(
+            "\
+[WARNING] manifest has no documentation[..]
+See http://doc.crates.io/manifest.html#package-metadata for more info.
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+    p.cargo("package -l")
+        .with_stdout(
+            "\
+Cargo.toml
+src[..]main.rs
+",
+        ).run();
+    p.cargo("package").with_stdout("").run();
+
+    let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+    let mut rdr = GzDecoder::new(f);
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    for f in ar.entries().unwrap() {
+        let f = f.unwrap();
+        let fname = f.header().path_bytes();
+        let fname = &*fname;
+        assert!(
+            fname == b"foo-0.0.1/Cargo.toml"
+                || fname == b"foo-0.0.1/Cargo.toml.orig"
+                || fname == b"foo-0.0.1/src/main.rs",
+            "unexpected filename: {:?}",
+            f.header().path()
+        )
+    }
+}
+
+#[cfg(unix)] // windows doesn't allow these characters in filenames
+#[test]
+fn package_weird_characters() {
+    let p = project()
+        .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .file("src/:foo", "")
+        .build();
+
+    p.cargo("package")
+        .with_status(101)
+        .with_stderr(
+            "\
+warning: [..]
+See [..]
+[PACKAGING] foo [..]
+[ERROR] failed to prepare local package for uploading
+
+Caused by:
+  cannot package a filename with a special character `:`: src/:foo
+",
+        ).run();
+}
+
+#[test]
+fn repackage_on_source_change() {
+    let p = project()
+        .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .build();
+
+    p.cargo("package").run();
+
+    // Add another source file
+    let mut file = File::create(p.root().join("src").join("foo.rs")).unwrap_or_else(|e| {
+        panic!(
+            "could not create file {}: {}",
+            p.root().join("src/foo.rs").display(),
+            e
+        )
+    });
+
+    file.write_all(br#"fn main() { println!("foo"); }"#)
+        .unwrap();
+    std::mem::drop(file);
+
+    // Check that cargo rebuilds the tarball
+    p.cargo("package")
+        .with_stderr(
+            "\
+[WARNING] [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    // Check that the tarball contains the added file
+    let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+    let mut rdr = GzDecoder::new(f);
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    let entries = ar.entries().unwrap();
+    let entry_paths = entries
+        .map(|entry| entry.unwrap().path().unwrap().into_owned())
+        .collect::<Vec<PathBuf>>();
+    assert!(entry_paths.contains(&PathBuf::from("foo-0.0.1/src/foo.rs")));
+}
+
+#[test]
+#[cfg(unix)]
+fn broken_symlink() {
+    use std::os::unix::fs;
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = 'foo'
+            documentation = 'foo'
+            homepage = 'foo'
+            repository = 'foo'
+        "#,
+        ).file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .build();
+    t!(fs::symlink("nowhere", &p.root().join("src/foo.rs")));
+
+    p.cargo("package -v")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: failed to prepare local package for uploading
+
+Caused by:
+  failed to open for archiving: `[..]foo.rs`
+
+Caused by:
+  [..]
+",
+        ).run();
+}
+
+#[test]
+fn do_not_package_if_repository_is_dirty() {
+    let p = project().build();
+
+    // Create a Git repository containing a minimal Rust project.
+    let _ = git::repo(&paths::root().join("foo"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            license = "MIT"
+            description = "foo"
+            documentation = "foo"
+            homepage = "foo"
+            repository = "foo"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    // Modify Cargo.toml without committing the change.
+    p.change_file(
+        "Cargo.toml",
+        r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            license = "MIT"
+            description = "foo"
+            documentation = "foo"
+            homepage = "foo"
+            repository = "foo"
+            # change
+    "#,
+    );
+
+    p.cargo("package")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: 1 files in the working directory contain changes that were not yet \
+committed into git:
+
+Cargo.toml
+
+to proceed despite this, pass the `--allow-dirty` flag
+",
+        ).run();
+}
+
+#[test]
+fn generated_manifest() {
+    Package::new("abc", "1.0.0").publish();
+    Package::new("def", "1.0.0").alternative(true).publish();
+    Package::new("ghi", "1.0.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            exclude = ["*.txt"]
+            license = "MIT"
+            description = "foo"
+
+            [project.metadata]
+            foo = 'bar'
+
+            [workspace]
+
+            [dependencies]
+            bar = { path = "bar", version = "0.1" }
+            def = { version = "1.0", registry = "alternative" }
+            ghi = "1.0"
+            abc = "1.0"
+        "#,
+        ).file("src/main.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("package --no-verify")
+        .masquerade_as_nightly_cargo()
+        .run();
+
+    let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+    let mut rdr = GzDecoder::new(f);
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    let mut entry = ar
+        .entries()
+        .unwrap()
+        .map(|f| f.unwrap())
+        .find(|e| e.path().unwrap().ends_with("Cargo.toml"))
+        .unwrap();
+    let mut contents = String::new();
+    entry.read_to_string(&mut contents).unwrap();
+    // BTreeMap makes the order of dependencies in the generated file deterministic
+    // by sorting alphabetically
+    assert_eq!(
+        &contents[..],
+        &*format!(
+            r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+cargo-features = ["alternative-registries"]
+
+[package]
+name = "foo"
+version = "0.0.1"
+authors = []
+exclude = ["*.txt"]
+description = "foo"
+license = "MIT"
+
+[package.metadata]
+foo = "bar"
+[dependencies.abc]
+version = "1.0"
+
+[dependencies.bar]
+version = "0.1"
+
+[dependencies.def]
+version = "1.0"
+registry-index = "{}"
+
+[dependencies.ghi]
+version = "1.0"
+"#,
+            registry::alt_registry()
+        )
+    );
+}
+
+#[test]
+fn ignore_workspace_specifier() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+
+            authors = []
+
+            [workspace]
+
+            [dependencies]
+            bar = { path = "bar", version = "0.1" }
+        "#,
+        ).file("src/main.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = ".."
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("package --no-verify")
+        .cwd(p.root().join("bar"))
+        .run();
+
+    let f = File::open(&p.root().join("target/package/bar-0.1.0.crate")).unwrap();
+    let mut rdr = GzDecoder::new(f);
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    let mut entry = ar
+        .entries()
+        .unwrap()
+        .map(|f| f.unwrap())
+        .find(|e| e.path().unwrap().ends_with("Cargo.toml"))
+        .unwrap();
+    let mut contents = String::new();
+    entry.read_to_string(&mut contents).unwrap();
+    assert_eq!(
+        &contents[..],
+        r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "bar"
+version = "0.1.0"
+authors = []
+"#
+    );
+}
+
+#[test]
+fn package_two_kinds_of_deps() {
+    Package::new("other", "1.0.0").publish();
+    Package::new("other1", "1.0.0").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            other = "1.0"
+            other1 = { version = "1.0" }
+        "#,
+        ).file("src/main.rs", "")
+        .build();
+
+    p.cargo("package --no-verify").run();
+}
+
+#[test]
+fn test_edition() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["edition"]
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            edition = "2018"
+        "#,
+        ).file("src/lib.rs", r#" "#)
+        .build();
+
+    p.cargo("build -v").masquerade_as_nightly_cargo()
+        .without_status() // passes on nightly, fails on stable, b/c --edition is nightly-only
+        // --edition is still in flux and we're not passing -Zunstable-options
+        // from Cargo so it will probably error. Only partially match the output
+        // until stuff stabilizes
+        .with_stderr_contains("\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..]--edition=2018 [..]
+").run();
+}
+
+#[test]
+fn edition_with_metadata() {
+    if !is_nightly() {
+        // --edition is nightly-only
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+                edition = "2018"
+
+                [package.metadata.docs.rs]
+                features = ["foobar"]
+            "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("package").run();
+}
+
+#[test]
+fn test_edition_malformed() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+                edition = "chicken"
+            "#,
+        ).file("src/lib.rs", r#" "#)
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  failed to parse the `edition` key
+
+Caused by:
+  supported edition values are `2015` or `2018`, but `chicken` is unknown
+".to_string(),
+        ).run();
+}
+
+#[test]
+fn package_lockfile() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["publish-lockfile"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            publish-lockfile = true
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("package")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(
+            "\
+[WARNING] manifest has no documentation[..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+    p.cargo("package -l")
+        .masquerade_as_nightly_cargo()
+        .with_stdout(
+            "\
+Cargo.lock
+Cargo.toml
+src/main.rs
+",
+        ).run();
+    p.cargo("package")
+        .masquerade_as_nightly_cargo()
+        .with_stdout("")
+        .run();
+
+    let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+    let mut rdr = GzDecoder::new(f);
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    for f in ar.entries().unwrap() {
+        let f = f.unwrap();
+        let fname = f.header().path_bytes();
+        let fname = &*fname;
+        assert!(
+            fname == b"foo-0.0.1/Cargo.toml"
+                || fname == b"foo-0.0.1/Cargo.toml.orig"
+                || fname == b"foo-0.0.1/Cargo.lock"
+                || fname == b"foo-0.0.1/src/main.rs",
+            "unexpected filename: {:?}",
+            f.header().path()
+        )
+    }
+}
+
+#[test]
+fn package_lockfile_git_repo() {
+    let p = project().build();
+
+    // Create a Git repository containing a minimal Rust project.
+    let _ = git::repo(&paths::root().join("foo"))
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["publish-lockfile"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            license = "MIT"
+            description = "foo"
+            documentation = "foo"
+            homepage = "foo"
+            repository = "foo"
+            publish-lockfile = true
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+    p.cargo("package -l")
+        .masquerade_as_nightly_cargo()
+        .with_stdout(
+            "\
+.cargo_vcs_info.json
+Cargo.lock
+Cargo.toml
+src/main.rs
+",
+        ).run();
+}
+
+#[test]
+fn no_lock_file_with_library() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["publish-lockfile"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            publish-lockfile = true
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("package").masquerade_as_nightly_cargo().run();
+
+    let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+    let mut rdr = GzDecoder::new(f);
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    for f in ar.entries().unwrap() {
+        let f = f.unwrap();
+        let fname = f.header().path().unwrap();
+        assert!(!fname.ends_with("Cargo.lock"));
+    }
+}
+
+#[test]
+fn lock_file_and_workspace() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["foo"]
+        "#,
+        ).file(
+            "foo/Cargo.toml",
+            r#"
+            cargo-features = ["publish-lockfile"]
+
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            publish-lockfile = true
+        "#,
+        ).file("foo/src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("package")
+        .cwd(p.root().join("foo"))
+        .masquerade_as_nightly_cargo()
+        .run();
+
+    let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+    let mut rdr = GzDecoder::new(f);
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    assert!(ar.entries().unwrap().into_iter().any(|f| {
+        let f = f.unwrap();
+        let fname = f.header().path().unwrap();
+        fname.ends_with("Cargo.lock")
+    }));
+}
+
+#[test]
+fn do_not_package_if_src_was_modified() {
+    let p = project()
+        .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .file(
+            "build.rs",
+            r#"
+            use std::fs::File;
+            use std::io::Write;
+
+            fn main() {
+                let mut file = File::create("src/generated.txt").expect("failed to create file");
+                file.write_all(b"Hello, world of generated files.").expect("failed to write");
+            }
+        "#,
+        ).build();
+
+    if cfg!(target_os = "macos") {
+        // MacOS has 1s resolution filesystem.
+        // If src/main.rs is created within 1s of src/generated.txt, then it
+        // won't trigger the modification check.
+        sleep_ms(1000);
+    }
+
+    p.cargo("package")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: failed to verify package tarball
+
+Caused by:
+  Source directory was modified by build.rs during cargo publish. \
+Build scripts should not modify anything outside of OUT_DIR. Modified file: [..]src/generated.txt
+
+To proceed despite this, pass the `--no-verify` flag.",
+        ).run();
+
+    p.cargo("package --no-verify").run();
+}
diff --git a/tests/testsuite/patch.rs b/tests/testsuite/patch.rs
new file mode 100644 (file)
index 0000000..8298803
--- /dev/null
@@ -0,0 +1,867 @@
+use std::fs::{self, File};
+use std::io::{Read, Write};
+
+use support::git;
+use support::paths;
+use support::registry::Package;
+use support::{basic_manifest, project};
+use toml;
+
+#[test]
+fn replace() {
+    Package::new("bar", "0.1.0").publish();
+    Package::new("baz", "0.1.0")
+        .file(
+            "src/lib.rs",
+            "extern crate bar; pub fn baz() { bar::bar(); }",
+        ).dep("bar", "0.1.0")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+            baz = "0.1.0"
+
+            [patch.crates-io]
+            bar = { path = "bar" }
+        "#,
+        ).file(
+            "src/lib.rs",
+            "
+            extern crate bar;
+            extern crate baz;
+            pub fn bar() {
+                bar::bar();
+                baz::baz();
+            }
+        ",
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.1.0 ([..])
+[COMPILING] bar v0.1.0 ([CWD]/bar)
+[COMPILING] baz v0.1.0
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[test]
+fn nonexistent() {
+    Package::new("baz", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [patch.crates-io]
+            bar = { path = "bar" }
+        "#,
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[COMPILING] bar v0.1.0 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[test]
+fn patch_git() {
+    let bar = git::repo(&paths::root().join("override"))
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = {{ git = '{}' }}
+
+            [patch.'{0}']
+            bar = {{ path = "bar" }}
+        "#,
+                bar.url()
+            ),
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] git repository `file://[..]`
+[COMPILING] bar v0.1.0 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[test]
+fn patch_to_git() {
+    let bar = git::repo(&paths::root().join("override"))
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+
+            [patch.crates-io]
+            bar = {{ git = '{}' }}
+        "#,
+                bar.url()
+            ),
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::bar(); }",
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] git repository `file://[..]`
+[UPDATING] `[ROOT][..]` index
+[COMPILING] bar v0.1.0 (file://[..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[test]
+fn unused() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [patch.crates-io]
+            bar = { path = "bar" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0"))
+        .file("bar/src/lib.rs", "not rust code")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 [..]
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+
+    // unused patch should be in the lock file
+    let mut lock = String::new();
+    File::open(p.root().join("Cargo.lock"))
+        .unwrap()
+        .read_to_string(&mut lock)
+        .unwrap();
+    let toml: toml::Value = toml::from_str(&lock).unwrap();
+    assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1);
+    assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("bar"));
+    assert_eq!(
+        toml["patch"]["unused"][0]["version"].as_str(),
+        Some("0.2.0")
+    );
+}
+
+#[test]
+fn unused_git() {
+    Package::new("bar", "0.1.0").publish();
+
+    let foo = git::repo(&paths::root().join("override"))
+        .file("Cargo.toml", &basic_manifest("bar", "0.2.0"))
+        .file("src/lib.rs", "")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+
+            [patch.crates-io]
+            bar = {{ git = '{}' }}
+        "#,
+                foo.url()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] git repository `file://[..]`
+[UPDATING] `[ROOT][..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 [..]
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[test]
+fn add_patch() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", r#""#)
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 [..]
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+
+    t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(
+        br#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [patch.crates-io]
+            bar = { path = 'bar' }
+    "#
+    ));
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.1.0 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[test]
+fn add_ignored_patch() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+        .file("bar/src/lib.rs", r#""#)
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 [..]
+[COMPILING] bar v0.1.0
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+
+    t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(
+        br#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [patch.crates-io]
+            bar = { path = 'bar' }
+    "#
+    ));
+
+    p.cargo("build")
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[test]
+fn new_minor() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1.0"
+
+            [patch.crates-io]
+            bar = { path = 'bar' }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+        .file("bar/src/lib.rs", r#""#)
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[COMPILING] bar v0.1.1 [..]
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn transitive_new_minor() {
+    Package::new("baz", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = { path = 'bar' }
+
+            [patch.crates-io]
+            baz = { path = 'baz' }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            baz = '0.1.0'
+        "#,
+        ).file("bar/src/lib.rs", r#""#)
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.1"))
+        .file("baz/src/lib.rs", r#""#)
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[COMPILING] baz v0.1.1 [..]
+[COMPILING] bar v0.1.0 [..]
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn new_major() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.2.0"
+
+            [patch.crates-io]
+            bar = { path = 'bar' }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0"))
+        .file("bar/src/lib.rs", r#""#)
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[COMPILING] bar v0.2.0 [..]
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    Package::new("bar", "0.2.0").publish();
+    p.cargo("update").run();
+    p.cargo("build")
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+
+    t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(
+        br#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.2.0"
+    "#
+    ));
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.2.0 [..]
+[COMPILING] bar v0.2.0
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn transitive_new_major() {
+    Package::new("baz", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = { path = 'bar' }
+
+            [patch.crates-io]
+            baz = { path = 'baz' }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            baz = '0.2.0'
+        "#,
+        ).file("bar/src/lib.rs", r#""#)
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.2.0"))
+        .file("baz/src/lib.rs", r#""#)
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[ROOT][..]` index
+[COMPILING] baz v0.2.0 [..]
+[COMPILING] bar v0.1.0 [..]
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn remove_patch() {
+    Package::new("foo", "0.1.0").publish();
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+
+            [patch.crates-io]
+            foo = { path = 'foo' }
+            bar = { path = 'bar' }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", r#""#)
+        .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("foo/src/lib.rs", r#""#)
+        .build();
+
+    // Generate a lock file where `foo` is unused
+    p.cargo("build").run();
+    let mut lock_file1 = String::new();
+    File::open(p.root().join("Cargo.lock"))
+        .unwrap()
+        .read_to_string(&mut lock_file1)
+        .unwrap();
+
+    // Remove `foo` and generate a new lock file form the old one
+    File::create(p.root().join("Cargo.toml"))
+        .unwrap()
+        .write_all(
+            br#"
+        [package]
+        name = "foo"
+        version = "0.0.1"
+        authors = []
+
+        [dependencies]
+        bar = "0.1"
+
+        [patch.crates-io]
+        bar = { path = 'bar' }
+    "#,
+        ).unwrap();
+    p.cargo("build").run();
+    let mut lock_file2 = String::new();
+    File::open(p.root().join("Cargo.lock"))
+        .unwrap()
+        .read_to_string(&mut lock_file2)
+        .unwrap();
+
+    // Remove the lock file and build from scratch
+    fs::remove_file(p.root().join("Cargo.lock")).unwrap();
+    p.cargo("build").run();
+    let mut lock_file3 = String::new();
+    File::open(p.root().join("Cargo.lock"))
+        .unwrap()
+        .read_to_string(&mut lock_file3)
+        .unwrap();
+
+    assert!(lock_file1.contains("foo"));
+    assert_eq!(lock_file2, lock_file3);
+    assert_ne!(lock_file1, lock_file2);
+}
+
+#[test]
+fn non_crates_io() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [patch.some-other-source]
+            bar = { path = 'bar' }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", r#""#)
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  invalid url `some-other-source`: relative URL without a base
+",
+        ).run();
+}
+
+#[test]
+fn replace_with_crates_io() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [patch.crates-io]
+            bar = "0.1"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", r#""#)
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..]
+error: failed to resolve patches for `[..]`
+
+Caused by:
+  patch for `bar` in `[..]` points to the same source, but patches must point \
+  to different sources
+",
+        ).run();
+}
+
+#[test]
+fn patch_in_virtual() {
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["foo"]
+
+            [patch.crates-io]
+            bar = { path = "bar" }
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", r#""#)
+        .file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+        "#,
+        ).file("foo/src/lib.rs", r#""#)
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[test]
+fn patch_depends_on_another_patch() {
+    Package::new("bar", "0.1.0")
+        .file("src/lib.rs", "broken code")
+        .publish();
+
+    Package::new("baz", "0.1.0")
+        .dep("bar", "0.1")
+        .file("src/lib.rs", "broken code")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.1.0"
+
+            [dependencies]
+            bar = "0.1"
+            baz = "0.1"
+
+            [patch.crates-io]
+            bar = { path = "bar" }
+            baz = { path = "baz" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1"))
+        .file("bar/src/lib.rs", r#""#)
+        .file(
+            "baz/Cargo.toml",
+            r#"
+            [package]
+            name = "baz"
+            version = "0.1.1"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+        "#,
+        ).file("baz/src/lib.rs", r#""#)
+        .build();
+
+    p.cargo("build").run();
+
+    // Nothing should be rebuilt, no registry should be updated.
+    p.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
+
+#[test]
+fn replace_prerelease() {
+    Package::new("baz", "1.1.0-pre.1").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar"]
+
+            [patch.crates-io]
+            baz = { path = "./baz" }
+        "#,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            baz = "1.1.0-pre.1"
+        "#,
+        ).file(
+            "bar/src/main.rs",
+            "extern crate baz; fn main() { baz::baz() }",
+        ).file(
+            "baz/Cargo.toml",
+            r#"
+            [project]
+            name = "baz"
+            version = "1.1.0-pre.1"
+            authors = []
+            [workspace]
+        "#,
+        ).file("baz/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    p.cargo("build").run();
+}
diff --git a/tests/testsuite/path.rs b/tests/testsuite/path.rs
new file mode 100644 (file)
index 0000000..086a536
--- /dev/null
@@ -0,0 +1,977 @@
+use std::fs::{self, File};
+use std::io::prelude::*;
+
+use support::paths::{self, CargoPathExt};
+use support::registry::Package;
+use support::sleep_ms;
+use support::{basic_lib_manifest, basic_manifest, main_file, project};
+
+#[test]
+#[cfg(not(windows))] // I have no idea why this is failing spuriously on
+                     // Windows, for more info see #3466.
+fn cargo_compile_with_nested_deps_shorthand() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+
+            version = "0.5.0"
+            path = "bar"
+        "#,
+        ).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.baz]
+
+            version = "0.5.0"
+            path = "baz"
+
+            [lib]
+
+            name = "bar"
+        "#,
+        ).file(
+            "bar/src/bar.rs",
+            r#"
+            extern crate baz;
+
+            pub fn gimme() -> String {
+                baz::gimme()
+            }
+        "#,
+        ).file("bar/baz/Cargo.toml", &basic_lib_manifest("baz"))
+        .file(
+            "bar/baz/src/baz.rs",
+            r#"
+            pub fn gimme() -> String {
+                "test passed".to_string()
+            }
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] baz v0.5.0 ([CWD]/bar/baz)\n\
+             [COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("test passed\n").run();
+
+    println!("cleaning");
+    p.cargo("clean -v").with_stdout("").run();
+    println!("building baz");
+    p.cargo("build -p baz")
+        .with_stderr(
+            "[COMPILING] baz v0.5.0 ([CWD]/bar/baz)\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+    println!("building foo");
+    p.cargo("build -p foo")
+        .with_stderr(
+            "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+}
+
+#[test]
+fn cargo_compile_with_root_dev_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dev-dependencies.bar]
+
+            version = "0.5.0"
+            path = "../bar"
+
+            [[bin]]
+            name = "foo"
+        "#,
+        ).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .build();
+    let _p2 = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+        .file(
+            "src/lib.rs",
+            r#"
+            pub fn gimme() -> &'static str {
+                "zoidberg"
+            }
+        "#,
+        ).build();
+
+    p.cargo("build").with_status(101).run();
+}
+
+#[test]
+fn cargo_compile_with_root_dev_deps_with_testing() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dev-dependencies.bar]
+
+            version = "0.5.0"
+            path = "../bar"
+
+            [[bin]]
+            name = "foo"
+        "#,
+        ).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .build();
+    let _p2 = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+        .file(
+            "src/lib.rs",
+            r#"
+            pub fn gimme() -> &'static str {
+                "zoidberg"
+            }
+        "#,
+        ).build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] [..] v0.5.0 ([..])
+[COMPILING] [..] v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]",
+        ).with_stdout_contains("running 0 tests")
+        .run();
+}
+
+#[test]
+fn cargo_compile_with_transitive_dev_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+
+            version = "0.5.0"
+            path = "bar"
+        "#,
+        ).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dev-dependencies.baz]
+
+            git = "git://example.com/path/to/nowhere"
+
+            [lib]
+
+            name = "bar"
+        "#,
+        ).file(
+            "bar/src/bar.rs",
+            r#"
+            pub fn gimme() -> &'static str {
+                "zoidberg"
+            }
+        "#,
+        ).build();
+
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+             [..]\n",
+        ).run();
+
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("zoidberg\n").run();
+}
+
+#[test]
+fn no_rebuild_dependency() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", "extern crate bar; fn main() { bar::bar() }")
+        .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file("bar/src/bar.rs", "pub fn bar() {}")
+        .build();
+    // First time around we should compile both foo and bar
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+
+    sleep_ms(1000);
+    p.change_file(
+        "src/main.rs",
+        r#"
+        extern crate bar;
+        fn main() { bar::bar(); }
+    "#,
+    );
+    // Don't compile bar, but do recompile foo.
+    p.cargo("build")
+        .with_stderr(
+            "\
+             [COMPILING] foo v0.5.0 ([..])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+}
+
+#[test]
+fn deep_dependencies_trigger_rebuild() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", "extern crate bar; fn main() { bar::bar() }")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [lib]
+            name = "bar"
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file(
+            "bar/src/bar.rs",
+            "extern crate baz; pub fn bar() { baz::baz() }",
+        ).file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+        .file("baz/src/baz.rs", "pub fn baz() {}")
+        .build();
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\
+             [COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+    p.cargo("build").with_stdout("").run();
+
+    // Make sure an update to baz triggers a rebuild of bar
+    //
+    // We base recompilation off mtime, so sleep for at least a second to ensure
+    // that this write will change the mtime.
+    File::create(&p.root().join("baz/src/baz.rs"))
+        .unwrap()
+        .write_all(br#"pub fn baz() { println!("hello!"); }"#)
+        .unwrap();
+    sleep_ms(1000);
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\
+             [COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+
+    // Make sure an update to bar doesn't trigger baz
+    File::create(&p.root().join("bar/src/bar.rs"))
+        .unwrap()
+        .write_all(
+            br#"
+        extern crate baz;
+        pub fn bar() { println!("hello!"); baz::baz(); }
+    "#,
+        ).unwrap();
+    sleep_ms(1000);
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+}
+
+#[test]
+fn no_rebuild_two_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            path = "bar"
+            [dependencies.baz]
+            path = "baz"
+        "#,
+        ).file("src/main.rs", "extern crate bar; fn main() { bar::bar() }")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [lib]
+            name = "bar"
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file("bar/src/bar.rs", "pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_lib_manifest("baz"))
+        .file("baz/src/baz.rs", "pub fn baz() {}")
+        .build();
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\
+             [COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+    assert!(p.bin("foo").is_file());
+    p.cargo("build").with_stdout("").run();
+    assert!(p.bin("foo").is_file());
+}
+
+#[test]
+fn nested_deps_recompile() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+
+            version = "0.5.0"
+            path = "src/bar"
+        "#,
+        ).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .file("src/bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file("src/bar/src/bar.rs", "pub fn gimme() -> i32 { 92 }")
+        .build();
+
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] bar v0.5.0 ([CWD]/src/bar)\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+    sleep_ms(1000);
+
+    File::create(&p.root().join("src/main.rs"))
+        .unwrap()
+        .write_all(br#"fn main() {}"#)
+        .unwrap();
+
+    // This shouldn't recompile `bar`
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+}
+
+#[test]
+fn error_message_for_missing_manifest() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+
+            path = "src/bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("src/bar/not-a-manifest", "")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to load source for a dependency on `bar`
+
+Caused by:
+  Unable to update [CWD]/src/bar
+
+Caused by:
+  failed to read `[..]bar/Cargo.toml`
+
+Caused by:
+  [..] (os error [..])
+",
+        ).run();
+}
+
+#[test]
+fn override_relative() {
+    let bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+        .file("src/lib.rs", "")
+        .build();
+
+    fs::create_dir(&paths::root().join(".cargo")).unwrap();
+    File::create(&paths::root().join(".cargo/config"))
+        .unwrap()
+        .write_all(br#"paths = ["bar"]"#)
+        .unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            path = '{}'
+        "#,
+                bar.root().display()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn override_self() {
+    let bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.5.0"))
+        .file("src/lib.rs", "")
+        .build();
+
+    let p = project();
+    let root = p.root().clone();
+    let p = p
+        .file(".cargo/config", &format!("paths = ['{}']", root.display()))
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+            path = '{}'
+
+        "#,
+                bar.root().display()
+            ),
+        ).file("src/lib.rs", "")
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn override_path_dep() {
+    let bar = project()
+        .at("bar")
+        .file(
+            "p1/Cargo.toml",
+            r#"
+            [package]
+            name = "p1"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies.p2]
+            path = "../p2"
+       "#,
+        ).file("p1/src/lib.rs", "")
+        .file("p2/Cargo.toml", &basic_manifest("p2", "0.5.0"))
+        .file("p2/src/lib.rs", "")
+        .build();
+
+    let p = project()
+        .file(
+            ".cargo/config",
+            &format!(
+                "paths = ['{}', '{}']",
+                bar.root().join("p1").display(),
+                bar.root().join("p2").display()
+            ),
+        ).file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.p2]
+            path = '{}'
+
+        "#,
+                bar.root().join("p2").display()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn path_dep_build_cmd() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+
+            name = "foo"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+
+            [dependencies.bar]
+
+            version = "0.5.0"
+            path = "bar"
+        "#,
+        ).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+            build = "build.rs"
+
+            [lib]
+            name = "bar"
+            path = "src/bar.rs"
+        "#,
+        ).file(
+            "bar/build.rs",
+            r#"
+            use std::fs;
+            fn main() {
+                fs::copy("src/bar.rs.in", "src/bar.rs").unwrap();
+            }
+        "#,
+        ).file("bar/src/bar.rs.in", "pub fn gimme() -> i32 { 0 }")
+        .build();
+    p.root().join("bar").move_into_the_past();
+
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+             [..]\n",
+        ).run();
+
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("0\n").run();
+
+    // Touching bar.rs.in should cause the `build` command to run again.
+    {
+        let file = fs::File::create(&p.root().join("bar/src/bar.rs.in"));
+        file.unwrap()
+            .write_all(br#"pub fn gimme() -> i32 { 1 }"#)
+            .unwrap();
+    }
+
+    p.cargo("build")
+        .with_stderr(
+            "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\
+             [COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in \
+             [..]\n",
+        ).run();
+
+    p.process(&p.bin("foo")).with_stdout("1\n").run();
+}
+
+#[test]
+fn dev_deps_no_rebuild_lib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+                name = "foo"
+                version = "0.5.0"
+                authors = []
+
+            [dev-dependencies.bar]
+                path = "bar"
+
+            [lib]
+                name = "foo"
+                doctest = false
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #[cfg(test)] #[allow(unused_extern_crates)] extern crate bar;
+            #[cfg(not(test))] pub fn foo() { env!("FOO"); }
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+    p.cargo("build")
+        .env("FOO", "bar")
+        .with_stderr(
+            "[COMPILING] foo v0.5.0 ([CWD])\n\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) \
+             in [..]\n",
+        ).run();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] [..] v0.5.0 ([CWD][..])
+[COMPILING] [..] v0.5.0 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]",
+        ).with_stdout_contains("running 0 tests")
+        .run();
+}
+
+#[test]
+fn custom_target_no_rebuild() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+            [dependencies]
+            a = { path = "a" }
+            [workspace]
+            members = ["a", "b"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.5.0"))
+        .file("a/src/lib.rs", "")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [project]
+            name = "b"
+            version = "0.5.0"
+            authors = []
+            [dependencies]
+            a = { path = "../a" }
+        "#,
+        ).file("b/src/lib.rs", "")
+        .build();
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] a v0.5.0 ([..])
+[COMPILING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    t!(fs::rename(
+        p.root().join("target"),
+        p.root().join("target_moved")
+    ));
+    p.cargo("build --manifest-path=b/Cargo.toml")
+        .env("CARGO_TARGET_DIR", "target_moved")
+        .with_stderr(
+            "\
+[COMPILING] b v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn override_and_depend() {
+    let p = project()
+        .no_manifest()
+        .file(
+            "a/a1/Cargo.toml",
+            r#"
+            [project]
+            name = "a1"
+            version = "0.5.0"
+            authors = []
+            [dependencies]
+            a2 = { path = "../a2" }
+        "#,
+        ).file("a/a1/src/lib.rs", "")
+        .file("a/a2/Cargo.toml", &basic_manifest("a2", "0.5.0"))
+        .file("a/a2/src/lib.rs", "")
+        .file(
+            "b/Cargo.toml",
+            r#"
+            [project]
+            name = "b"
+            version = "0.5.0"
+            authors = []
+            [dependencies]
+            a1 = { path = "../a/a1" }
+            a2 = { path = "../a/a2" }
+        "#,
+        ).file("b/src/lib.rs", "")
+        .file("b/.cargo/config", r#"paths = ["../a"]"#)
+        .build();
+    p.cargo("build")
+        .cwd(p.root().join("b"))
+        .with_stderr(
+            "\
+[COMPILING] a2 v0.5.0 ([..])
+[COMPILING] a1 v0.5.0 ([..])
+[COMPILING] b v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn missing_path_dependency() {
+    let p = project()
+        .file("Cargo.toml", &basic_manifest("a", "0.5.0"))
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"paths = ["../whoa-this-does-not-exist"]"#,
+        ).build();
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to update path override `[..]../whoa-this-does-not-exist` \
+(defined in `[..]`)
+
+Caused by:
+  failed to read directory `[..]`
+
+Caused by:
+  [..] (os error [..])
+",
+        ).run();
+}
+
+#[test]
+fn invalid_path_dep_in_workspace_with_lockfile() {
+    Package::new("bar", "1.0.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "top"
+            version = "0.5.0"
+            authors = []
+
+            [workspace]
+
+            [dependencies]
+            foo = { path = "foo" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "foo/Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("foo/src/lib.rs", "")
+        .build();
+
+    // Generate a lock file
+    p.cargo("build").run();
+
+    // Change the dependency on `bar` to an invalid path
+    File::create(&p.root().join("foo/Cargo.toml"))
+        .unwrap()
+        .write_all(
+            br#"
+        [project]
+        name = "foo"
+        version = "0.5.0"
+        authors = []
+
+        [dependencies]
+        bar = { path = "" }
+    "#,
+        ).unwrap();
+
+    // Make sure we get a nice error. In the past this actually stack
+    // overflowed!
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: no matching package named `bar` found
+location searched: [..]
+did you mean: foo
+required by package `foo v0.5.0 ([..])`
+",
+        ).run();
+}
+
+#[test]
+fn workspace_produces_rlib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "top"
+            version = "0.5.0"
+            authors = []
+
+            [workspace]
+
+            [dependencies]
+            foo = { path = "foo" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("foo/Cargo.toml", &basic_manifest("foo", "0.5.0"))
+        .file("foo/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    assert!(p.root().join("target/debug/libtop.rlib").is_file());
+    assert!(!p.root().join("target/debug/libfoo.rlib").is_file());
+}
+
+#[test]
+fn thin_lto_works() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "top"
+            version = "0.5.0"
+            authors = []
+
+            [profile.release]
+            lto = 'thin'
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --release -v")
+        .with_stderr(
+            "\
+[COMPILING] top [..]
+[RUNNING] `rustc [..] -C lto=thin [..]`
+[FINISHED] [..]
+",
+        ).run();
+}
diff --git a/tests/testsuite/plugins.rs b/tests/testsuite/plugins.rs
new file mode 100644 (file)
index 0000000..2c5653e
--- /dev/null
@@ -0,0 +1,419 @@
+use std::env;
+use std::fs;
+
+use support::{basic_manifest, project};
+use support::{is_nightly, rustc_host};
+
+#[test]
+fn plugin_to_the_max() {
+    if !is_nightly() {
+        return;
+    }
+
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "foo_lib"
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #![feature(plugin)]
+            #![plugin(bar)]
+            extern crate foo_lib;
+
+            fn main() { foo_lib::foo(); }
+        "#,
+        ).file(
+            "src/foo_lib.rs",
+            r#"
+            #![feature(plugin)]
+            #![plugin(bar)]
+
+            pub fn foo() {}
+        "#,
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "bar"
+            plugin = true
+
+            [dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #![feature(plugin_registrar, rustc_private)]
+
+            extern crate rustc_plugin;
+            extern crate baz;
+
+            use rustc_plugin::Registry;
+
+            #[plugin_registrar]
+            pub fn foo(_reg: &mut Registry) {
+                println!("{}", baz::baz());
+            }
+        "#,
+        ).build();
+    let _baz = project()
+        .at("baz")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "baz"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "baz"
+            crate_type = ["dylib"]
+        "#,
+        ).file("src/lib.rs", "pub fn baz() -> i32 { 1 }")
+        .build();
+
+    foo.cargo("build").run();
+    foo.cargo("doc").run();
+}
+
+#[test]
+fn plugin_with_dynamic_native_dependency() {
+    if !is_nightly() {
+        return;
+    }
+
+    let workspace = project()
+        .at("ws")
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["builder", "foo"]
+        "#,
+        ).build();
+
+    let build = project()
+        .at("ws/builder")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "builder"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "builder"
+            crate-type = ["dylib"]
+        "#,
+        ).file("src/lib.rs", "#[no_mangle] pub extern fn foo() {}")
+        .build();
+
+    let foo = project()
+        .at("ws/foo")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #![feature(plugin)]
+            #![plugin(bar)]
+
+            fn main() {}
+        "#,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+            build = 'build.rs'
+
+            [lib]
+            name = "bar"
+            plugin = true
+        "#,
+        ).file(
+            "bar/build.rs",
+            r#"
+            use std::path::PathBuf;
+            use std::env;
+
+            fn main() {
+                let src = PathBuf::from(env::var("SRC").unwrap());
+                println!("cargo:rustc-flags=-L {}/deps", src.parent().unwrap().display());
+            }
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"
+            #![feature(plugin_registrar, rustc_private)]
+            extern crate rustc_plugin;
+
+            use rustc_plugin::Registry;
+
+            #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))]
+            #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))]
+            extern { fn foo(); }
+
+            #[plugin_registrar]
+            pub fn bar(_reg: &mut Registry) {
+                unsafe { foo() }
+            }
+        "#,
+        ).build();
+
+    build.cargo("build").run();
+
+    let src = workspace.root().join("target/debug");
+    let lib = fs::read_dir(&src)
+        .unwrap()
+        .map(|s| s.unwrap().path())
+        .find(|lib| {
+            let lib = lib.file_name().unwrap().to_str().unwrap();
+            lib.starts_with(env::consts::DLL_PREFIX) && lib.ends_with(env::consts::DLL_SUFFIX)
+        }).unwrap();
+
+    foo.cargo("build -v").env("SRC", &lib).run();
+}
+
+#[test]
+fn plugin_integration() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+
+            [lib]
+            name = "foo"
+            plugin = true
+            doctest = false
+        "#,
+        ).file("build.rs", "fn main() {}")
+        .file("src/lib.rs", "")
+        .file("tests/it_works.rs", "")
+        .build();
+
+    p.cargo("test -v").run();
+}
+
+#[test]
+fn doctest_a_plugin() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+        "#,
+        ).file("src/lib.rs", "#[macro_use] extern crate bar;")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "bar"
+            plugin = true
+        "#,
+        ).file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("test -v").run();
+}
+
+// See #1515
+#[test]
+fn native_plugin_dependency_with_custom_ar_linker() {
+    let target = rustc_host();
+
+    let _foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            plugin = true
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    let bar = project()
+        .at("bar")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.foo]
+            path = "../foo"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}]
+            ar = "nonexistent-ar"
+            linker = "nonexistent-linker"
+        "#,
+                target
+            ),
+        ).build();
+
+    bar.cargo("build --verbose")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]`
+[ERROR] [..]linker[..]
+",
+        ).run();
+}
+
+#[test]
+fn panic_abort_plugins() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [profile.dev]
+            panic = 'abort'
+
+            [dependencies]
+            bar = { path = "bar" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            plugin = true
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"
+            #![feature(rustc_private)]
+            extern crate syntax;
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn shared_panic_abort_plugins() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [profile.dev]
+            panic = 'abort'
+
+            [dependencies]
+            bar = { path = "bar" }
+            baz = { path = "baz" }
+        "#,
+        ).file("src/lib.rs", "extern crate baz;")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            plugin = true
+
+            [dependencies]
+            baz = { path = "../baz" }
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"
+            #![feature(rustc_private)]
+            extern crate syntax;
+            extern crate baz;
+        "#,
+        ).file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1"))
+        .file("baz/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+}
diff --git a/tests/testsuite/proc_macro.rs b/tests/testsuite/proc_macro.rs
new file mode 100644 (file)
index 0000000..e843eea
--- /dev/null
@@ -0,0 +1,281 @@
+use support::is_nightly;
+use support::project;
+
+#[test]
+fn probe_cfg_before_crate_type_discovery() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [target.'cfg(not(stage300))'.dependencies.noop]
+            path = "../noop"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[macro_use]
+            extern crate noop;
+
+            #[derive(Noop)]
+            struct X;
+
+            fn main() {}
+        "#,
+        ).build();
+    let _noop = project()
+        .at("noop")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "noop"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            proc-macro = true
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate proc_macro;
+            use proc_macro::TokenStream;
+
+            #[proc_macro_derive(Noop)]
+            pub fn noop(_input: TokenStream) -> TokenStream {
+                "".parse().unwrap()
+            }
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn noop() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.noop]
+            path = "../noop"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[macro_use]
+            extern crate noop;
+
+            #[derive(Noop)]
+            struct X;
+
+            fn main() {}
+        "#,
+        ).build();
+    let _noop = project()
+        .at("noop")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "noop"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            proc-macro = true
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate proc_macro;
+            use proc_macro::TokenStream;
+
+            #[proc_macro_derive(Noop)]
+            pub fn noop(_input: TokenStream) -> TokenStream {
+                "".parse().unwrap()
+            }
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+    p.cargo("build").run();
+}
+
+#[test]
+fn impl_and_derive() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.transmogrify]
+            path = "../transmogrify"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[macro_use]
+            extern crate transmogrify;
+
+            trait ImplByTransmogrify {
+                fn impl_by_transmogrify(&self) -> bool;
+            }
+
+            #[derive(Transmogrify, Debug)]
+            struct X { success: bool }
+
+            fn main() {
+                let x = X::new();
+                assert!(x.impl_by_transmogrify());
+                println!("{:?}", x);
+            }
+        "#,
+        ).build();
+    let _transmogrify = project()
+        .at("transmogrify")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "transmogrify"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            proc-macro = true
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate proc_macro;
+            use proc_macro::TokenStream;
+
+            #[proc_macro_derive(Transmogrify)]
+            #[doc(hidden)]
+            pub fn transmogrify(input: TokenStream) -> TokenStream {
+                "
+                    impl X {
+                        fn new() -> Self {
+                            X { success: true }
+                        }
+                    }
+
+                    impl ImplByTransmogrify for X {
+                        fn impl_by_transmogrify(&self) -> bool {
+                            true
+                        }
+                    }
+                ".parse().unwrap()
+            }
+        "#,
+        ).build();
+
+    p.cargo("build").run();
+    p.cargo("run").with_stdout("X { success: true }").run();
+}
+
+#[test]
+fn plugin_and_proc_macro() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            plugin = true
+            proc-macro = true
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #![feature(plugin_registrar, rustc_private)]
+            #![feature(proc_macro, proc_macro_lib)]
+
+            extern crate rustc_plugin;
+            use rustc_plugin::Registry;
+
+            extern crate proc_macro;
+            use proc_macro::TokenStream;
+
+            #[plugin_registrar]
+            pub fn plugin_registrar(reg: &mut Registry) {}
+
+            #[proc_macro_derive(Questionable)]
+            pub fn questionable(input: TokenStream) -> TokenStream {
+                input
+            }
+        "#,
+        ).build();
+
+    let msg = "  lib.plugin and lib.proc-macro cannot both be true";
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(msg)
+        .run();
+}
+
+#[test]
+fn proc_macro_doctest() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            [lib]
+            proc-macro = true
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+/// ```
+/// assert!(true);
+/// ```
+#[proc_macro_derive(Bar)]
+pub fn derive(_input: TokenStream) -> TokenStream {
+    "".parse().unwrap()
+}
+
+#[test]
+fn a() {
+  assert!(true);
+}
+"#,
+        ).build();
+
+    foo.cargo("test")
+        .with_stdout_contains("test a ... ok")
+        .with_stdout_contains_n("test [..] ... ok", 2)
+        .run();
+}
diff --git a/tests/testsuite/profile_config.rs b/tests/testsuite/profile_config.rs
new file mode 100644 (file)
index 0000000..0a22ac5
--- /dev/null
@@ -0,0 +1,360 @@
+use support::{basic_lib_manifest, paths, project};
+
+#[test]
+fn profile_config_gated() {
+    let p = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [profile.dev]
+            debug = 1
+        "#,
+        ).build();
+
+    p.cargo("build -v")
+        .with_stderr_contains(
+            "\
+[WARNING] profiles in config files require `-Z config-profile` command-line option
+",
+        ).with_stderr_contains("[..]-C debuginfo=2[..]")
+        .run();
+}
+
+#[test]
+fn profile_config_validate_warnings() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [profile.test]
+            opt-level = 3
+
+            [profile.asdf]
+            opt-level = 3
+
+            [profile.dev]
+            bad-key = true
+
+            [profile.dev.build-override]
+            bad-key-bo = true
+
+            [profile.dev.overrides.bar]
+            bad-key-bar = true
+        "#,
+        ).build();
+
+    p.cargo("build -Z config-profile")
+        .masquerade_as_nightly_cargo()
+        .with_stderr_unordered(
+            "\
+[WARNING] unused key `profile.asdf` in config file `[..].cargo/config`
+[WARNING] unused key `profile.test` in config file `[..].cargo/config`
+[WARNING] unused key `profile.dev.bad-key` in config file `[..].cargo/config`
+[WARNING] unused key `profile.dev.overrides.bar.bad-key-bar` in config file `[..].cargo/config`
+[WARNING] unused key `profile.dev.build-override.bad-key-bo` in config file `[..].cargo/config`
+[COMPILING] foo [..]
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn profile_config_error_paths() {
+    let p = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [profile.dev]
+            opt-level = 3
+        "#,
+        ).file(
+            paths::home().join(".cargo/config"),
+            r#"
+            [profile.dev]
+            rpath = "foo"
+            "#,
+        ).build();
+
+    p.cargo("build -Z config-profile")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+  error in [..].cargo/config: `profile.dev.rpath` expected true/false, but found a string
+",
+        ).run();
+}
+
+#[test]
+fn profile_config_validate_errors() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [profile.dev.overrides.foo]
+            panic = "abort"
+        "#,
+        ).build();
+
+    p.cargo("build -Z config-profile")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
+
+Caused by:
+  config profile `profile.dev` is not valid
+
+Caused by:
+  `panic` may not be specified in a profile override.
+",
+        ).run();
+}
+
+#[test]
+fn profile_config_syntax_errors() {
+    let p = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [profile.dev]
+            codegen-units = "foo"
+        "#,
+        ).build();
+
+    p.cargo("build -Z config-profile")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at [..]
+
+Caused by:
+  error in [..].cargo/config: `profile.dev.codegen-units` expected an integer, but found a string
+",
+        ).run();
+}
+
+#[test]
+fn profile_config_override_spec_multiple() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = { path = "bar" }
+            "#,
+        ).file(
+            ".cargo/config",
+            r#"
+            [profile.dev.overrides.bar]
+            opt-level = 3
+
+            [profile.dev.overrides."bar:0.5.0"]
+            opt-level = 3
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [package]
+            name = "bar"
+            version = "0.5.0"
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .build();
+
+    // Unfortunately this doesn't tell you which file, hopefully it's not too
+    // much of a problem.
+    p.cargo("build -v -Z config-profile")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] multiple profile overrides in profile `dev` match package `bar v0.5.0 ([..])`
+found profile override specs: bar, bar:0.5.0",
+        ).run();
+}
+
+#[test]
+fn profile_config_all_options() {
+    // Ensure all profile options are supported.
+    let p = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+        [profile.release]
+        opt-level = 1
+        debug = true
+        debug-assertions = true
+        overflow-checks = false
+        rpath = true
+        lto = true
+        codegen-units = 2
+        panic = "abort"
+        incremental = true
+        "#,
+        ).build();
+
+    p.cargo("build --release -v -Z config-profile")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(
+            "\
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..] \
+            -C opt-level=1 \
+            -C panic=abort \
+            -C codegen-units=2 \
+            -C debuginfo=2 \
+            -C debug-assertions=on \
+            -C overflow-checks=off [..]\
+            -C rpath [..]
+[FINISHED] release [optimized + debuginfo] [..]
+",
+        ).run();
+}
+
+#[test]
+fn profile_config_override_precedence() {
+    // Config values take precedence over manifest values.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = {path = "bar"}
+
+            [profile.dev]
+            codegen-units = 2
+
+            [profile.dev.overrides.bar]
+            opt-level = 3
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            "#,
+        ).file("bar/src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [profile.dev.overrides.bar]
+            opt-level = 2
+        "#,
+        ).build();
+
+    p.cargo("build -v -Z config-profile")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(
+            "\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar [..] -C opt-level=2 -C codegen-units=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..]-C codegen-units=2 [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+        ).run();
+}
+
+#[test]
+fn profile_config_no_warn_unknown_override() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [profile.dev.overrides.bar]
+            codegen-units = 4
+        "#,
+        ).build();
+
+    p.cargo("build -Z config-profile")
+        .masquerade_as_nightly_cargo()
+        .with_stderr_does_not_contain("[..]warning[..]")
+        .run();
+}
+
+#[test]
+fn profile_config_mixed_types() {
+    let p = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [profile.dev]
+            opt-level = 3
+        "#,
+        ).file(
+            paths::home().join(".cargo/config"),
+            r#"
+            [profile.dev]
+            opt-level = 's'
+            "#,
+        ).build();
+
+    p.cargo("build -v -Z config-profile")
+        .masquerade_as_nightly_cargo()
+        .with_stderr_contains("[..]-C opt-level=3 [..]")
+        .run();
+}
diff --git a/tests/testsuite/profile_overrides.rs b/tests/testsuite/profile_overrides.rs
new file mode 100644 (file)
index 0000000..6fb5551
--- /dev/null
@@ -0,0 +1,422 @@
+use support::{basic_lib_manifest, basic_manifest, project};
+
+#[test]
+fn profile_override_gated() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [profile.dev.build-override]
+            opt-level = 3
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  feature `profile-overrides` is required
+
+consider adding `cargo-features = [\"profile-overrides\"]` to the manifest
+",
+        ).run();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [profile.dev.overrides."*"]
+            opt-level = 3
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  feature `profile-overrides` is required
+
+consider adding `cargo-features = [\"profile-overrides\"]` to the manifest
+",
+        ).run();
+}
+
+#[test]
+fn profile_override_basic() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = {path = "bar"}
+
+            [profile.dev]
+            opt-level = 1
+
+            [profile.dev.overrides.bar]
+            opt-level = 3
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .masquerade_as_nightly_cargo()
+        .with_stderr(
+            "[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar [..] -C opt-level=3 [..]`
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name foo [..] -C opt-level=1 [..]`
+[FINISHED] dev [optimized + debuginfo] target(s) in [..]",
+        ).run();
+}
+
+#[test]
+fn profile_override_warnings() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = {path = "bar"}
+
+            [profile.dev.overrides.bart]
+            opt-level = 3
+
+            [profile.dev.overrides.no-suggestion]
+            opt-level = 3
+
+            [profile.dev.overrides."bar:1.2.3"]
+            opt-level = 3
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").masquerade_as_nightly_cargo().with_stderr_contains(
+            "\
+[WARNING] version or URL in profile override spec `bar:1.2.3` does not match any of the packages: bar v0.5.0 ([..])
+[WARNING] profile override spec `bart` did not match any packages
+
+Did you mean `bar`?
+[WARNING] profile override spec `no-suggestion` did not match any packages
+[COMPILING] [..]
+",
+        )
+        .run();
+}
+
+#[test]
+fn profile_override_dev_release_only() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = {path = "bar"}
+
+            [profile.test.overrides.bar]
+            opt-level = 3
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+Caused by:
+  Profile overrides may only be specified for `dev` or `release` profile, not `test`.
+",
+        ).run();
+}
+
+#[test]
+fn profile_override_bad_settings() {
+    let bad_values = [
+        (
+            "panic = \"abort\"",
+            "`panic` may not be specified in a profile override.",
+        ),
+        (
+            "lto = true",
+            "`lto` may not be specified in a profile override.",
+        ),
+        (
+            "rpath = true",
+            "`rpath` may not be specified in a profile override.",
+        ),
+        ("overrides = {}", "Profile overrides cannot be nested."),
+    ];
+    for &(ref snippet, ref expected) in bad_values.iter() {
+        let p = project()
+            .file(
+                "Cargo.toml",
+                &format!(
+                    r#"
+                cargo-features = ["profile-overrides"]
+
+                [package]
+                name = "foo"
+                version = "0.0.1"
+
+                [dependencies]
+                bar = {{path = "bar"}}
+
+                [profile.dev.overrides.bar]
+                {}
+            "#,
+                    snippet
+                ),
+            ).file("src/lib.rs", "")
+            .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+            .file("bar/src/lib.rs", "")
+            .build();
+
+        p.cargo("build")
+            .masquerade_as_nightly_cargo()
+            .with_status(101)
+            .with_stderr_contains(format!("Caused by:\n  {}", expected))
+            .run();
+    }
+}
+
+#[test]
+fn profile_override_hierarchy() {
+    // Test that the precedence rules are correct for different types.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [workspace]
+            members = ["m1", "m2", "m3"]
+
+            [profile.dev]
+            codegen-units = 1
+
+            [profile.dev.overrides.m2]
+            codegen-units = 2
+
+            [profile.dev.overrides."*"]
+            codegen-units = 3
+
+            [profile.dev.build-override]
+            codegen-units = 4
+            "#)
+
+        // m1
+        .file("m1/Cargo.toml",
+            r#"
+            [package]
+            name = "m1"
+            version = "0.0.1"
+
+            [dependencies]
+            m2 = { path = "../m2" }
+            dep = { path = "../../dep" }
+            "#)
+        .file("m1/src/lib.rs", "extern crate m2; extern crate dep;")
+        .file("m1/build.rs", "fn main() {}")
+
+        // m2
+        .file("m2/Cargo.toml",
+            r#"
+            [package]
+            name = "m2"
+            version = "0.0.1"
+
+            [dependencies]
+            m3 = { path = "../m3" }
+
+            [build-dependencies]
+            m3 = { path = "../m3" }
+            dep = { path = "../../dep" }
+            "#)
+        .file("m2/src/lib.rs", "extern crate m3;")
+        .file("m2/build.rs", "extern crate m3; extern crate dep; fn main() {}")
+
+        // m3
+        .file("m3/Cargo.toml", &basic_lib_manifest("m3"))
+        .file("m3/src/lib.rs", "")
+        .build();
+
+    // dep (outside of workspace)
+    let _dep = project()
+        .at("dep")
+        .file("Cargo.toml", &basic_lib_manifest("dep"))
+        .file("src/lib.rs", "")
+        .build();
+
+    // Profiles should be:
+    // m3: 4 (as build.rs dependency)
+    // m3: 1 (as [profile.dev] as workspace member)
+    // dep: 3 (as [profile.dev.overrides."*"] as non-workspace member)
+    // m1 build.rs: 4 (as [profile.dev.build-override])
+    // m2 build.rs: 2 (as [profile.dev.overrides.m2])
+    // m2: 2 (as [profile.dev.overrides.m2])
+    // m1: 1 (as [profile.dev])
+
+    p.cargo("build -v").masquerade_as_nightly_cargo().with_stderr_unordered("\
+[COMPILING] m3 [..]
+[COMPILING] dep [..]
+[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=dep-info,link -C codegen-units=4 [..]
+[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs --color never --crate-type lib --emit=dep-info,link -C codegen-units=3 [..]
+[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=dep-info,link -C codegen-units=1 [..]
+[RUNNING] `rustc --crate-name build_script_build m1/build.rs --color never --crate-type bin --emit=dep-info,link -C codegen-units=4 [..]
+[COMPILING] m2 [..]
+[RUNNING] `rustc --crate-name build_script_build m2/build.rs --color never --crate-type bin --emit=dep-info,link -C codegen-units=2 [..]
+[RUNNING] `[..]/m1-[..]/build-script-build`
+[RUNNING] `[..]/m2-[..]/build-script-build`
+[RUNNING] `rustc --crate-name m2 m2/src/lib.rs --color never --crate-type lib --emit=dep-info,link -C codegen-units=2 [..]
+[COMPILING] m1 [..]
+[RUNNING] `rustc --crate-name m1 m1/src/lib.rs --color never --crate-type lib --emit=dep-info,link -C codegen-units=1 [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+",
+        )
+        .run();
+}
+
+#[test]
+fn profile_override_spec_multiple() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [profile.dev.overrides.bar]
+            opt-level = 3
+
+            [profile.dev.overrides."bar:0.5.0"]
+            opt-level = 3
+            "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[ERROR] multiple profile overrides in profile `dev` match package `bar v0.5.0 ([..])`
+found profile override specs: bar, bar:0.5.0",
+        ).run();
+}
+
+#[test]
+fn profile_override_spec() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["profile-overrides"]
+
+            [workspace]
+            members = ["m1", "m2"]
+
+            [profile.dev.overrides."dep:1.0.0"]
+            codegen-units = 1
+
+            [profile.dev.overrides."dep:2.0.0"]
+            codegen-units = 2
+            "#)
+
+        // m1
+        .file("m1/Cargo.toml",
+            r#"
+            [package]
+            name = "m1"
+            version = "0.0.1"
+
+            [dependencies]
+            dep = { path = "../../dep1" }
+            "#)
+        .file("m1/src/lib.rs", "extern crate dep;")
+
+        // m2
+        .file("m2/Cargo.toml",
+            r#"
+            [package]
+            name = "m2"
+            version = "0.0.1"
+
+            [dependencies]
+            dep = {path = "../../dep2" }
+            "#)
+        .file("m2/src/lib.rs", "extern crate dep;")
+
+        .build();
+
+    project()
+        .at("dep1")
+        .file("Cargo.toml", &basic_manifest("dep", "1.0.0"))
+        .file("src/lib.rs", "")
+        .build();
+
+    project()
+        .at("dep2")
+        .file("Cargo.toml", &basic_manifest("dep", "2.0.0"))
+        .file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .masquerade_as_nightly_cargo()
+        .with_stderr_contains("[RUNNING] `rustc [..]dep1/src/lib.rs [..] -C codegen-units=1 [..]")
+        .with_stderr_contains("[RUNNING] `rustc [..]dep2/src/lib.rs [..] -C codegen-units=2 [..]")
+        .run();
+}
diff --git a/tests/testsuite/profile_targets.rs b/tests/testsuite/profile_targets.rs
new file mode 100644 (file)
index 0000000..cc8a226
--- /dev/null
@@ -0,0 +1,661 @@
+use support::is_nightly;
+use support::{basic_manifest, project, Project};
+
+// These tests try to exercise exactly which profiles are selected for every
+// target.
+
+fn all_target_project() -> Project {
+    // This abuses the `codegen-units` setting so that we can verify exactly
+    // which profile is used for each compiler invocation.
+    project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [build-dependencies]
+            bdep = { path = "bdep" }
+
+            [profile.dev]
+            codegen-units = 1
+            panic = "abort"
+            [profile.release]
+            codegen-units = 2
+            panic = "abort"
+            [profile.test]
+            codegen-units = 3
+            [profile.bench]
+            codegen-units = 4
+        "#,
+        )
+        .file("src/lib.rs", "extern crate bar;")
+        .file("src/main.rs", "extern crate foo; fn main() {}")
+        .file("examples/ex1.rs", "extern crate foo; fn main() {}")
+        .file("tests/test1.rs", "extern crate foo;")
+        .file("benches/bench1.rs", "extern crate foo;")
+        .file("build.rs", r#"
+            extern crate bdep;
+            fn main() {
+                eprintln!("foo custom build PROFILE={} DEBUG={} OPT_LEVEL={}",
+                    std::env::var("PROFILE").unwrap(),
+                    std::env::var("DEBUG").unwrap(),
+                    std::env::var("OPT_LEVEL").unwrap(),
+                );
+            }
+        "#)
+
+        // bar package
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+
+        // bdep package
+        .file("bdep/Cargo.toml", r#"
+            [package]
+            name = "bdep"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = { path = "../bar" }
+        "#)
+        .file("bdep/src/lib.rs", "extern crate bar;")
+        .build()
+}
+
+#[test]
+fn profile_selection_build() {
+    let p = all_target_project();
+
+    // Build default targets.
+    // NOTES:
+    // - bdep `panic` is not set because it thinks `build.rs` is a plugin.
+    // - build_script_build is built without panic because it thinks `build.rs` is a plugin.
+    p.cargo("build -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] bdep [..]
+[RUNNING] `rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=dep-info,link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+").run();
+    p.cargo("build -vv")
+        .with_stderr_unordered(
+            "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+",
+        ).run();
+}
+
+#[test]
+fn profile_selection_build_release() {
+    let p = all_target_project();
+
+    // Build default targets, release.
+    p.cargo("build --release -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[COMPILING] bdep [..]
+[RUNNING] `rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[FINISHED] release [optimized] [..]
+").run();
+    p.cargo("build --release -vv")
+        .with_stderr_unordered(
+            "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] release [optimized] [..]
+",
+        ).run();
+}
+
+#[test]
+fn profile_selection_build_all_targets() {
+    let p = all_target_project();
+    // Build all explicit targets.
+    // NOTES
+    // - bdep `panic` is not set because it thinks `build.rs` is a plugin.
+    // - build_script_build is built without panic because it thinks
+    //   `build.rs` is a plugin.
+    // - build_script_build is being run two times.  Once for the `dev` and
+    //   `test` targets, once for the `bench` targets.
+    //   TODO: "PROFILE" says debug both times, though!
+    // - Benchmark dependencies are compiled in `dev` mode, which may be
+    //   surprising.  See https://github.com/rust-lang/cargo/issues/4929.
+    //
+    // - Dependency profiles:
+    //   Pkg  Target  Profile     Reason
+    //   ---  ------  -------     ------
+    //   bar  lib     dev         For foo-bin
+    //   bar  lib     dev-panic   For tests/benches and bdep
+    //   bdep lib     dev-panic   For foo build.rs
+    //   foo  custom  dev-panic
+    //
+    // - foo target list is:
+    //   Target   Profile    Mode
+    //   ------   -------    ----
+    //   lib      dev+panic  build  (a normal lib target)
+    //   lib      dev-panic  build  (used by tests/benches)
+    //   lib      test       test
+    //   lib      bench      test(bench)
+    //   test     test       test
+    //   bench    bench      test(bench)
+    //   bin      test       test
+    //   bin      bench      test(bench)
+    //   bin      dev        build
+    //   example  dev        build
+    p.cargo("build --all-targets -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] bdep [..]
+[RUNNING] `rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build`
+[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=false OPT_LEVEL=3
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]`
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--emit=dep-info,link -C codegen-units=3 -C debuginfo=2 --test [..]`
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]`
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]`
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=dep-info,link -C codegen-units=3 -C debuginfo=2 --test [..]`
+[RUNNING] `rustc --crate-name test1 tests/test1.rs [..]--emit=dep-info,link -C codegen-units=3 -C debuginfo=2 --test [..]`
+[RUNNING] `rustc --crate-name bench1 benches/bench1.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]`
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]`
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=dep-info,link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]`
+[RUNNING] `rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=dep-info,link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]`
+[FINISHED] dev [unoptimized + debuginfo] [..]
+").run();
+    p.cargo("build -vv")
+        .with_stderr_unordered(
+            "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+",
+        ).run();
+}
+
+#[test]
+fn profile_selection_build_all_targets_release() {
+    let p = all_target_project();
+    // Build all explicit targets, release.
+    // NOTES
+    // - bdep `panic` is not set because it thinks `build.rs` is a plugin.
+    // - bar compiled twice.  It tries with and without panic, but the "is a
+    //   plugin" logic is forcing it to be cleared.
+    // - build_script_build is built without panic because it thinks
+    //   `build.rs` is a plugin.
+    // - build_script_build is being run two times.  Once for the `dev` and
+    //   `test` targets, once for the `bench` targets.
+    //   TODO: "PROFILE" says debug both times, though!
+    //
+    // - Dependency profiles:
+    //   Pkg  Target  Profile        Reason
+    //   ---  ------  -------        ------
+    //   bar  lib     release        For foo-bin
+    //   bar  lib     release-panic  For tests/benches and bdep
+    //   bdep lib     release-panic  For foo build.rs
+    //   foo  custom  release-panic
+    //
+    // - foo target list is:
+    //   Target   Profile        Mode
+    //   ------   -------        ----
+    //   lib      release+panic  build  (a normal lib target)
+    //   lib      release-panic  build  (used by tests/benches)
+    //   lib      bench          test   (bench/test de-duped)
+    //   test     bench          test
+    //   bench    bench          test
+    //   bin      bench          test   (bench/test de-duped)
+    //   bin      release        build
+    //   example  release        build
+    p.cargo("build --all-targets --release -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[COMPILING] bdep [..]
+[RUNNING] `rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]`
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]`
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]`
+[RUNNING] `rustc --crate-name test1 tests/test1.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]`
+[RUNNING] `rustc --crate-name bench1 benches/bench1.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]`
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]`
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]`
+[RUNNING] `rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]`
+[FINISHED] release [optimized] [..]
+").run();
+    p.cargo("build --all-targets --release -vv")
+        .with_stderr_unordered(
+            "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] release [optimized] [..]
+",
+        ).run();
+}
+
+#[test]
+fn profile_selection_test() {
+    let p = all_target_project();
+    // Test default.
+    // NOTES:
+    // - Dependency profiles:
+    //   Pkg  Target  Profile    Reason
+    //   ---  ------  -------    ------
+    //   bar  lib     dev        For foo-bin
+    //   bar  lib     dev-panic  For tests/benches and bdep
+    //   bdep lib     dev-panic  For foo build.rs
+    //   foo  custom  dev-panic
+    //
+    // - foo target list is:
+    //   Target   Profile        Mode
+    //   ------   -------        ----
+    //   lib      dev-panic      build (for tests)
+    //   lib      dev            build (for bins)
+    //   lib      test           test
+    //   test     test           test
+    //   example  dev-panic      build
+    //   bin      test           test
+    //   bin      dev            build
+    //
+    p.cargo("test -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] bdep [..]
+[RUNNING] `rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--emit=dep-info,link -C codegen-units=3 -C debuginfo=2 --test [..]
+[RUNNING] `rustc --crate-name test1 tests/test1.rs [..]--emit=dep-info,link -C codegen-units=3 -C debuginfo=2 --test [..]
+[RUNNING] `rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=dep-info,link -C codegen-units=3 -C debuginfo=2 --test [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=dep-info,link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/test1-[..]`
+[DOCTEST] foo
+[RUNNING] `rustdoc --test [..]
+").run();
+    p.cargo("test -vv")
+        .with_stderr_unordered(
+            "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/test1-[..]`
+[DOCTEST] foo
+[RUNNING] `rustdoc --test [..]
+",
+        ).run();
+}
+
+#[test]
+fn profile_selection_test_release() {
+    let p = all_target_project();
+    // Test default release.
+    // NOTES:
+    // - Dependency profiles:
+    //   Pkg  Target  Profile        Reason
+    //   ---  ------  -------        ------
+    //   bar  lib     release        For foo-bin
+    //   bar  lib     release-panic  For tests/benches and bdep
+    //   bdep lib     release-panic  For foo build.rs
+    //   foo  custom  release-panic
+    //
+    // - foo target list is:
+    //   Target   Profile        Mode
+    //   ------   -------        ----
+    //   lib      release-panic  build  (for tests)
+    //   lib      release        build  (for bins)
+    //   lib      bench          test
+    //   test     bench          test
+    //   example  release-panic  build
+    //   bin      bench          test
+    //   bin      release        build
+    //
+    p.cargo("test --release -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[COMPILING] bdep [..]
+[RUNNING] `rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]
+[RUNNING] `rustc --crate-name test1 tests/test1.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]
+[RUNNING] `rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[FINISHED] release [optimized] [..]
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/test1-[..]`
+[DOCTEST] foo
+[RUNNING] `rustdoc --test [..]`
+").run();
+    p.cargo("test --release -vv")
+        .with_stderr_unordered(
+            "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] release [optimized] [..]
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/foo-[..]`
+[RUNNING] `[..]/deps/test1-[..]`
+[DOCTEST] foo
+[RUNNING] `rustdoc --test [..]
+",
+        ).run();
+}
+
+#[test]
+fn profile_selection_bench() {
+    let p = all_target_project();
+
+    // Bench default.
+    // NOTES:
+    // - Dependency profiles:
+    //   Pkg  Target  Profile        Reason
+    //   ---  ------  -------        ------
+    //   bar  lib     release        For foo-bin
+    //   bar  lib     release-panic  For tests/benches and bdep
+    //   bdep lib     release-panic  For foo build.rs
+    //   foo  custom  release-panic
+    //
+    // - foo target list is:
+    //   Target   Profile        Mode
+    //   ------   -------        ----
+    //   lib      release-panic  build (for benches)
+    //   lib      release        build (for bins)
+    //   lib      bench          test(bench)
+    //   bench    bench          test(bench)
+    //   bin      bench          test(bench)
+    //   bin      release        build
+    //
+    p.cargo("bench -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[COMPILING] bdep [..]
+[RUNNING] `rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `[..]target/release/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]
+[RUNNING] `rustc --crate-name bench1 benches/bench1.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=dep-info,link -C opt-level=3 -C codegen-units=4 --test [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=dep-info,link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[FINISHED] release [optimized] [..]
+[RUNNING] `[..]/deps/foo-[..] --bench`
+[RUNNING] `[..]/deps/foo-[..] --bench`
+[RUNNING] `[..]/deps/bench1-[..] --bench`
+").run();
+    p.cargo("bench -vv")
+        .with_stderr_unordered(
+            "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] release [optimized] [..]
+[RUNNING] `[..]/deps/foo-[..] --bench`
+[RUNNING] `[..]/deps/foo-[..] --bench`
+[RUNNING] `[..]/deps/bench1-[..] --bench`
+",
+        ).run();
+}
+
+#[test]
+fn profile_selection_check_all_targets() {
+    if !is_nightly() {
+        // This can be removed once 1.27 is stable, see below.
+        return;
+    }
+
+    let p = all_target_project();
+    // check
+    // NOTES:
+    // - Dependency profiles:
+    //   Pkg  Target  Profile    Action   Reason
+    //   ---  ------  -------    ------   ------
+    //   bar  lib     dev*       link     For bdep
+    //   bar  lib     dev-panic  metadata For tests/benches
+    //   bar  lib     dev        metadata For lib/bins
+    //   bdep lib     dev*       link     For foo build.rs
+    //   foo  custom  dev*       link     For build.rs
+    //
+    //   `*` = wants panic, but it is cleared when args are built.
+    //
+    // - foo target list is:
+    //   Target   Profile        Mode
+    //   ------   -------        ----
+    //   lib      dev            check
+    //   lib      dev-panic      check (for tests/benches)
+    //   lib      dev-panic      check-test (checking lib as a unittest)
+    //   example  dev            check
+    //   test     dev-panic      check-test
+    //   bench    dev-panic      check-test
+    //   bin      dev            check
+    //   bin      dev-panic      check-test (checking bin as a unittest)
+    //
+    p.cargo("check --all-targets -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,metadata -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] bdep[..]
+[RUNNING] `rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,metadata -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `rustc --crate-name test1 tests/test1.rs [..]--emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `rustc --crate-name bench1 benches/bench1.rs [..]--emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=dep-info,metadata -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=dep-info,metadata -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+").run();
+    // Starting with Rust 1.27, rustc emits `rmeta` files for bins, so
+    // everything should be completely fresh.  Previously, bins were being
+    // rechecked.
+    // See https://github.com/rust-lang/rust/pull/49289 and
+    // https://github.com/rust-lang/cargo/issues/3624
+    p.cargo("check --all-targets -vv")
+        .with_stderr_unordered(
+            "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+",
+        ).run();
+}
+
+#[test]
+fn profile_selection_check_all_targets_release() {
+    if !is_nightly() {
+        // See note in profile_selection_check_all_targets.
+        return;
+    }
+
+    let p = all_target_project();
+    // check --release
+    // https://github.com/rust-lang/cargo/issues/5218
+    // This is a pretty straightforward variant of
+    // `profile_selection_check_all_targets` that uses `release` instead of
+    // `dev` for all targets.
+    p.cargo("check --all-targets --release -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,metadata -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,metadata -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[COMPILING] bdep[..]
+[RUNNING] `rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=dep-info,link -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `[..]target/release/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,metadata -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,metadata -C opt-level=3 -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--emit=dep-info,metadata -C opt-level=3 -C codegen-units=2 --test [..]
+[RUNNING] `rustc --crate-name test1 tests/test1.rs [..]--emit=dep-info,metadata -C opt-level=3 -C codegen-units=2 --test [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=dep-info,metadata -C opt-level=3 -C codegen-units=2 --test [..]
+[RUNNING] `rustc --crate-name bench1 benches/bench1.rs [..]--emit=dep-info,metadata -C opt-level=3 -C codegen-units=2 --test [..]
+[RUNNING] `rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=dep-info,metadata -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=dep-info,metadata -C opt-level=3 -C panic=abort -C codegen-units=2 [..]
+[FINISHED] release [optimized] [..]
+").run();
+
+    p.cargo("check --all-targets --release -vv")
+        .with_stderr_unordered(
+            "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] release [optimized] [..]
+",
+        ).run();
+}
+
+#[test]
+fn profile_selection_check_all_targets_test() {
+    if !is_nightly() {
+        // See note in profile_selection_check_all_targets.
+        return;
+    }
+
+    let p = all_target_project();
+    // check --profile=test
+    // NOTES:
+    // - This doesn't actually use the "test" profile.  Everything uses dev.
+    //   It probably should use "test"???  Probably doesn't really matter.
+    // - Dependency profiles:
+    //   Pkg  Target  Profile    Action   Reason
+    //   ---  ------  -------    ------   ------
+    //   bar  lib     dev*       link     For bdep
+    //   bar  lib     dev-panic  metadata For tests/benches
+    //   bdep lib     dev*       link     For foo build.rs
+    //   foo  custom  dev*       link     For build.rs
+    //
+    //   `*` = wants panic, but it is cleared when args are built.
+    //
+    // - foo target list is:
+    //   Target   Profile    Mode
+    //   ------   -------    ----
+    //   lib      dev-panic  check-test (for tests/benches)
+    //   lib      dev-panic  check-test (checking lib as a unittest)
+    //   example  dev-panic  check-test
+    //   test     dev-panic  check-test
+    //   bench    dev-panic  check-test
+    //   bin      dev-panic  check-test
+    //
+    p.cargo("check --all-targets --profile=test -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] bdep[..]
+[RUNNING] `rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]--emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `rustc --crate-name test1 tests/test1.rs [..]--emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `rustc --crate-name bench1 benches/bench1.rs [..]--emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 --test [..]
+[RUNNING] `rustc --crate-name ex1 examples/ex1.rs [..]--emit=dep-info,metadata -C codegen-units=1 -C debuginfo=2 --test [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+").run();
+
+    p.cargo("check --all-targets --profile=test -vv")
+        .with_stderr_unordered(
+            "\
+[FRESH] bar [..]
+[FRESH] bdep [..]
+[FRESH] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+",
+        ).run();
+}
+
+#[test]
+fn profile_selection_doc() {
+    let p = all_target_project();
+    // doc
+    // NOTES:
+    // - Dependency profiles:
+    //   Pkg  Target  Profile    Action   Reason
+    //   ---  ------  -------    ------   ------
+    //   bar  lib     dev*       link     For bdep
+    //   bar  lib     dev        metadata For rustdoc
+    //   bdep lib     dev*       link     For foo build.rs
+    //   foo  custom  dev*       link     For build.rs
+    //
+    //   `*` = wants panic, but it is cleared when args are built.
+    p.cargo("doc -vv").with_stderr_unordered("\
+[COMPILING] bar [..]
+[DOCUMENTING] bar [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `rustdoc --crate-name bar bar/src/lib.rs [..]
+[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=dep-info,metadata -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] bdep [..]
+[RUNNING] `rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[COMPILING] foo [..]
+[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=dep-info,link -C codegen-units=1 -C debuginfo=2 [..]
+[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build`
+[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0
+[DOCUMENTING] foo [..]
+[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]
+[FINISHED] dev [unoptimized + debuginfo] [..]
+").run();
+}
diff --git a/tests/testsuite/profiles.rs b/tests/testsuite/profiles.rs
new file mode 100644 (file)
index 0000000..cac62cd
--- /dev/null
@@ -0,0 +1,358 @@
+use std::env;
+
+use support::is_nightly;
+use support::project;
+
+#[test]
+fn profile_overrides() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+
+            name = "test"
+            version = "0.0.0"
+            authors = []
+
+            [profile.dev]
+            opt-level = 1
+            debug = false
+            rpath = true
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link \
+        -C opt-level=1 \
+        -C debug-assertions=on \
+        -C metadata=[..] \
+        -C rpath \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [optimized] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn opt_level_override_0() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+
+            name = "test"
+            version = "0.0.0"
+            authors = []
+
+            [profile.dev]
+            opt-level = 0
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link \
+        -C debuginfo=2 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] [..] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn debug_override_1() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "test"
+            version = "0.0.0"
+            authors = []
+
+            [profile.dev]
+            debug = 1
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link \
+        -C debuginfo=1 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] [..] target(s) in [..]
+",
+        ).run();
+}
+
+fn check_opt_level_override(profile_level: &str, rustc_level: &str) {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+
+            name = "test"
+            version = "0.0.0"
+            authors = []
+
+            [profile.dev]
+            opt-level = {level}
+        "#,
+                level = profile_level
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+    p.cargo("build -v")
+        .with_stderr(&format!(
+            "\
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link \
+        -C opt-level={level} \
+        -C debuginfo=2 \
+        -C debug-assertions=on \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] [..] target(s) in [..]
+",
+            level = rustc_level
+        )).run();
+}
+
+#[test]
+fn opt_level_overrides() {
+    if !is_nightly() {
+        return;
+    }
+
+    for &(profile_level, rustc_level) in &[
+        ("1", "1"),
+        ("2", "2"),
+        ("3", "3"),
+        ("\"s\"", "s"),
+        ("\"z\"", "z"),
+    ] {
+        check_opt_level_override(profile_level, rustc_level)
+    }
+}
+
+#[test]
+fn top_level_overrides_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+
+            name = "test"
+            version = "0.0.0"
+            authors = []
+
+            [profile.release]
+            opt-level = 1
+            debug = true
+
+            [dependencies.foo]
+            path = "foo"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+
+            name = "foo"
+            version = "0.0.0"
+            authors = []
+
+            [profile.release]
+            opt-level = 0
+            debug = false
+
+            [lib]
+            name = "foo"
+            crate_type = ["dylib", "rlib"]
+        "#,
+        ).file("foo/src/lib.rs", "")
+        .build();
+    p.cargo("build -v --release")
+        .with_stderr(&format!(
+            "\
+[COMPILING] foo v0.0.0 ([CWD]/foo)
+[RUNNING] `rustc --crate-name foo foo/src/lib.rs --color never \
+        --crate-type dylib --crate-type rlib \
+        --emit=dep-info,link \
+        -C prefer-dynamic \
+        -C opt-level=1 \
+        -C debuginfo=2 \
+        -C metadata=[..] \
+        --out-dir [CWD]/target/release/deps \
+        -L dependency=[CWD]/target/release/deps`
+[COMPILING] test v0.0.0 ([CWD])
+[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link \
+        -C opt-level=1 \
+        -C debuginfo=2 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/release/deps \
+        --extern foo=[CWD]/target/release/deps/\
+                     {prefix}foo[..]{suffix} \
+        --extern foo=[CWD]/target/release/deps/libfoo.rlib`
+[FINISHED] release [optimized + debuginfo] target(s) in [..]
+",
+            prefix = env::consts::DLL_PREFIX,
+            suffix = env::consts::DLL_SUFFIX
+        )).run();
+}
+
+#[test]
+fn profile_in_non_root_manifest_triggers_a_warning() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+
+            [profile.dev]
+            debug = false
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = ".."
+
+            [profile.dev]
+            opt-level = 1
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v")
+        .cwd(p.root().join("bar"))
+        .with_stderr(
+            "\
+[WARNING] profiles for the non root package will be ignored, specify profiles at the workspace root:
+package:   [..]
+workspace: [..]
+[COMPILING] bar v0.1.0 ([..])
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized] target(s) in [..]",
+        ).run();
+}
+
+#[test]
+fn profile_in_virtual_manifest_works() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar"]
+
+            [profile.dev]
+            opt-level = 1
+            debug = false
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = ".."
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v")
+        .cwd(p.root().join("bar"))
+        .with_stderr(
+            "\
+[COMPILING] bar v0.1.0 ([..])
+[RUNNING] `rustc [..]`
+[FINISHED] dev [optimized] target(s) in [..]",
+        ).run();
+}
+
+#[test]
+fn profile_panic_test_bench() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [profile.test]
+            panic = "abort"
+
+            [profile.bench]
+            panic = "abort"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr_contains(
+            "\
+[WARNING] `panic` setting is ignored for `test` profile
+[WARNING] `panic` setting is ignored for `bench` profile
+",
+        ).run();
+}
+
+#[test]
+fn profile_doc_deprecated() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [profile.doc]
+            opt-level = 0
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .with_stderr_contains("[WARNING] profile `doc` is deprecated and has no effect")
+        .run();
+}
diff --git a/tests/testsuite/publish.rs b/tests/testsuite/publish.rs
new file mode 100644 (file)
index 0000000..53b6cb3
--- /dev/null
@@ -0,0 +1,788 @@
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::io::SeekFrom;
+
+use flate2::read::GzDecoder;
+use support::git::repo;
+use support::paths;
+use support::{basic_manifest, project, publish};
+use tar::Archive;
+
+#[test]
+fn simple() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --no-verify --index")
+        .arg(publish::registry().to_string())
+        .with_stderr(&format!(
+            "\
+[UPDATING] `{reg}` index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[UPLOADING] foo v0.0.1 ([CWD])
+",
+            reg = publish::registry_path().to_str().unwrap()
+        )).run();
+
+    let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap();
+    // Skip the metadata payload and the size of the tarball
+    let mut sz = [0; 4];
+    assert_eq!(f.read(&mut sz).unwrap(), 4);
+    let sz = (u32::from(sz[0]) << 0)
+        | (u32::from(sz[1]) << 8)
+        | (u32::from(sz[2]) << 16)
+        | (u32::from(sz[3]) << 24);
+    f.seek(SeekFrom::Current(i64::from(sz) + 4)).unwrap();
+
+    // Verify the tarball
+    let mut rdr = GzDecoder::new(f);
+    assert_eq!(
+        rdr.header().unwrap().filename().unwrap(),
+        b"foo-0.0.1.crate"
+    );
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    for file in ar.entries().unwrap() {
+        let file = file.unwrap();
+        let fname = file.header().path_bytes();
+        let fname = &*fname;
+        assert!(
+            fname == b"foo-0.0.1/Cargo.toml"
+                || fname == b"foo-0.0.1/Cargo.toml.orig"
+                || fname == b"foo-0.0.1/src/main.rs",
+            "unexpected filename: {:?}",
+            file.header().path()
+        );
+    }
+}
+
+#[test]
+fn old_token_location() {
+    publish::setup();
+
+    // publish::setup puts a token in this file.
+    fs::remove_file(paths::root().join(".cargo/config")).unwrap();
+
+    let credentials = paths::root().join("home/.cargo/credentials");
+    File::create(credentials)
+        .unwrap()
+        .write_all(br#"token = "api-token""#)
+        .unwrap();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --no-verify --index")
+        .arg(publish::registry().to_string())
+        .with_stderr(&format!(
+            "\
+[UPDATING] `{reg}` index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[UPLOADING] foo v0.0.1 ([CWD])
+",
+            reg = publish::registry_path().to_str().unwrap()
+        )).run();
+
+    let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap();
+    // Skip the metadata payload and the size of the tarball
+    let mut sz = [0; 4];
+    assert_eq!(f.read(&mut sz).unwrap(), 4);
+    let sz = (u32::from(sz[0]) << 0)
+        | (u32::from(sz[1]) << 8)
+        | (u32::from(sz[2]) << 16)
+        | (u32::from(sz[3]) << 24);
+    f.seek(SeekFrom::Current(i64::from(sz) + 4)).unwrap();
+
+    // Verify the tarball
+    let mut rdr = GzDecoder::new(f);
+    assert_eq!(
+        rdr.header().unwrap().filename().unwrap(),
+        b"foo-0.0.1.crate"
+    );
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    for file in ar.entries().unwrap() {
+        let file = file.unwrap();
+        let fname = file.header().path_bytes();
+        let fname = &*fname;
+        assert!(
+            fname == b"foo-0.0.1/Cargo.toml"
+                || fname == b"foo-0.0.1/Cargo.toml.orig"
+                || fname == b"foo-0.0.1/src/main.rs",
+            "unexpected filename: {:?}",
+            file.header().path()
+        );
+    }
+}
+
+// TODO: Deprecated
+// remove once it has been decided --host can be removed
+#[test]
+fn simple_with_host() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --no-verify --host")
+        .arg(publish::registry().to_string())
+        .with_stderr(&format!(
+            "\
+[WARNING] The flag '--host' is no longer valid.
+
+Previous versions of Cargo accepted this flag, but it is being
+deprecated. The flag is being renamed to 'index', as the flag
+wants the location of the index. Please use '--index' instead.
+
+This will soon become a hard error, so it's either recommended
+to update to a fixed version or contact the upstream maintainer
+about this warning.
+[UPDATING] `{reg}` index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[UPLOADING] foo v0.0.1 ([CWD])
+",
+            reg = publish::registry_path().to_str().unwrap()
+        )).run();
+
+    let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap();
+    // Skip the metadata payload and the size of the tarball
+    let mut sz = [0; 4];
+    assert_eq!(f.read(&mut sz).unwrap(), 4);
+    let sz = (u32::from(sz[0]) << 0)
+        | (u32::from(sz[1]) << 8)
+        | (u32::from(sz[2]) << 16)
+        | (u32::from(sz[3]) << 24);
+    f.seek(SeekFrom::Current(i64::from(sz) + 4)).unwrap();
+
+    // Verify the tarball
+    let mut rdr = GzDecoder::new(f);
+    assert_eq!(
+        rdr.header().unwrap().filename().unwrap(),
+        b"foo-0.0.1.crate"
+    );
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    for file in ar.entries().unwrap() {
+        let file = file.unwrap();
+        let fname = file.header().path_bytes();
+        let fname = &*fname;
+        assert!(
+            fname == b"foo-0.0.1/Cargo.toml"
+                || fname == b"foo-0.0.1/Cargo.toml.orig"
+                || fname == b"foo-0.0.1/src/main.rs",
+            "unexpected filename: {:?}",
+            file.header().path()
+        );
+    }
+}
+
+// TODO: Deprecated
+// remove once it has been decided --host can be removed
+#[test]
+fn simple_with_index_and_host() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --no-verify --index")
+        .arg(publish::registry().to_string())
+        .arg("--host")
+        .arg(publish::registry().to_string())
+        .with_stderr(&format!(
+            "\
+[WARNING] The flag '--host' is no longer valid.
+
+Previous versions of Cargo accepted this flag, but it is being
+deprecated. The flag is being renamed to 'index', as the flag
+wants the location of the index. Please use '--index' instead.
+
+This will soon become a hard error, so it's either recommended
+to update to a fixed version or contact the upstream maintainer
+about this warning.
+[UPDATING] `{reg}` index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[UPLOADING] foo v0.0.1 ([CWD])
+",
+            reg = publish::registry_path().to_str().unwrap()
+        )).run();
+
+    let mut f = File::open(&publish::upload_path().join("api/v1/crates/new")).unwrap();
+    // Skip the metadata payload and the size of the tarball
+    let mut sz = [0; 4];
+    assert_eq!(f.read(&mut sz).unwrap(), 4);
+    let sz = (u32::from(sz[0]) << 0)
+        | (u32::from(sz[1]) << 8)
+        | (u32::from(sz[2]) << 16)
+        | (u32::from(sz[3]) << 24);
+    f.seek(SeekFrom::Current(i64::from(sz) + 4)).unwrap();
+
+    // Verify the tarball
+    let mut rdr = GzDecoder::new(f);
+    assert_eq!(
+        rdr.header().unwrap().filename().unwrap(),
+        b"foo-0.0.1.crate"
+    );
+    let mut contents = Vec::new();
+    rdr.read_to_end(&mut contents).unwrap();
+    let mut ar = Archive::new(&contents[..]);
+    for file in ar.entries().unwrap() {
+        let file = file.unwrap();
+        let fname = file.header().path_bytes();
+        let fname = &*fname;
+        assert!(
+            fname == b"foo-0.0.1/Cargo.toml"
+                || fname == b"foo-0.0.1/Cargo.toml.orig"
+                || fname == b"foo-0.0.1/src/main.rs",
+            "unexpected filename: {:?}",
+            file.header().path()
+        );
+    }
+}
+
+#[test]
+fn git_deps() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+
+            [dependencies.foo]
+            git = "git://path/to/nowhere"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish -v --no-verify --index")
+        .arg(publish::registry().to_string())
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+[ERROR] crates cannot be published to crates.io with dependencies sourced from \
+a repository\neither publish `foo` as its own crate on crates.io and \
+specify a crates.io version as a dependency or pull it into this \
+repository and specify it with a path and version\n\
+(crate `foo` has repository path `git://path/to/nowhere`)\
+",
+        ).run();
+}
+
+#[test]
+fn path_dependency_no_version() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("publish --index")
+        .arg(publish::registry().to_string())
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+[ERROR] all path dependencies must have a version specified when publishing.
+dependency `bar` does not specify a version
+",
+        ).run();
+}
+
+#[test]
+fn unpublishable_crate() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            publish = false
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --index")
+        .arg(publish::registry().to_string())
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] some crates cannot be published.
+`foo` is marked as unpublishable
+",
+        ).run();
+}
+
+#[test]
+fn dont_publish_dirty() {
+    publish::setup();
+    let p = project().file("bar", "").build();
+
+    let _ = repo(&paths::root().join("foo"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            documentation = "foo"
+            homepage = "foo"
+            repository = "foo"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --index")
+        .arg(publish::registry().to_string())
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+error: 1 files in the working directory contain changes that were not yet \
+committed into git:
+
+bar
+
+to proceed despite this, pass the `--allow-dirty` flag
+",
+        ).run();
+}
+
+#[test]
+fn publish_clean() {
+    publish::setup();
+
+    let p = project().build();
+
+    let _ = repo(&paths::root().join("foo"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            documentation = "foo"
+            homepage = "foo"
+            repository = "foo"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --index")
+        .arg(publish::registry().to_string())
+        .run();
+}
+
+#[test]
+fn publish_in_sub_repo() {
+    publish::setup();
+
+    let p = project().no_manifest().file("baz", "").build();
+
+    let _ = repo(&paths::root().join("foo"))
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            documentation = "foo"
+            homepage = "foo"
+            repository = "foo"
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish")
+        .cwd(p.root().join("bar"))
+        .arg("--index")
+        .arg(publish::registry().to_string())
+        .run();
+}
+
+#[test]
+fn publish_when_ignored() {
+    publish::setup();
+
+    let p = project().file("baz", "").build();
+
+    let _ = repo(&paths::root().join("foo"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            documentation = "foo"
+            homepage = "foo"
+            repository = "foo"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(".gitignore", "baz")
+        .build();
+
+    p.cargo("publish --index")
+        .arg(publish::registry().to_string())
+        .run();
+}
+
+#[test]
+fn ignore_when_crate_ignored() {
+    publish::setup();
+
+    let p = project().no_manifest().file("bar/baz", "").build();
+
+    let _ = repo(&paths::root().join("foo"))
+        .file(".gitignore", "bar")
+        .nocommit_file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            documentation = "foo"
+            homepage = "foo"
+            repository = "foo"
+        "#,
+        ).nocommit_file("bar/src/main.rs", "fn main() {}");
+    p.cargo("publish")
+        .cwd(p.root().join("bar"))
+        .arg("--index")
+        .arg(publish::registry().to_string())
+        .run();
+}
+
+#[test]
+fn new_crate_rejected() {
+    publish::setup();
+
+    let p = project().file("baz", "").build();
+
+    let _ = repo(&paths::root().join("foo"))
+        .nocommit_file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            documentation = "foo"
+            homepage = "foo"
+            repository = "foo"
+        "#,
+        ).nocommit_file("src/main.rs", "fn main() {}");
+    p.cargo("publish --index")
+        .arg(publish::registry().to_string())
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn dry_run() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --dry-run --index")
+        .arg(publish::registry().to_string())
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[WARNING] manifest has no documentation, [..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[UPLOADING] foo v0.0.1 ([CWD])
+[WARNING] aborting upload due to dry run
+",
+        ).run();
+
+    // Ensure the API request wasn't actually made
+    assert!(!publish::upload_path().join("api/v1/crates/new").exists());
+}
+
+#[test]
+fn block_publish_feature_not_enabled() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            publish = [
+                "test"
+            ]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --registry alternative -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  the `publish` manifest key is unstable for anything other than a value of true or false
+
+Caused by:
+  feature `alternative-registries` is required
+
+consider adding `cargo-features = [\"alternative-registries\"]` to the manifest
+",
+        ).run();
+}
+
+#[test]
+fn registry_not_in_publish_list() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            publish = [
+                "test"
+            ]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish")
+        .masquerade_as_nightly_cargo()
+        .arg("--registry")
+        .arg("alternative")
+        .arg("-Zunstable-options")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] some crates cannot be published.
+`foo` is marked as unpublishable
+",
+        ).run();
+}
+
+#[test]
+fn publish_empty_list() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            publish = []
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --registry alternative -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] some crates cannot be published.
+`foo` is marked as unpublishable
+",
+        ).run();
+}
+
+#[test]
+fn publish_allowed_registry() {
+    publish::setup();
+
+    let p = project().build();
+
+    let _ = repo(&paths::root().join("foo"))
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            documentation = "foo"
+            homepage = "foo"
+            publish = ["alternative"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --registry alternative -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .run();
+}
+
+#[test]
+fn block_publish_no_registry() {
+    publish::setup();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["alternative-registries"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            publish = []
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("publish --registry alternative -Zunstable-options")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] some crates cannot be published.
+`foo` is marked as unpublishable
+",
+        ).run();
+}
diff --git a/tests/testsuite/read_manifest.rs b/tests/testsuite/read_manifest.rs
new file mode 100644 (file)
index 0000000..c116f2a
--- /dev/null
@@ -0,0 +1,103 @@
+use support::{basic_bin_manifest, main_file, project};
+
+static MANIFEST_OUTPUT: &'static str = r#"
+{
+    "authors": [
+        "wycats@example.com"
+    ],
+    "categories": [],
+    "name":"foo",
+    "readme": null,
+    "repository": null,
+    "version":"0.5.0",
+    "id":"foo[..]0.5.0[..](path+file://[..]/foo)",
+    "keywords": [],
+    "license": null,
+    "license_file": null,
+    "description": null,
+    "edition": "2015",
+    "source":null,
+    "dependencies":[],
+    "targets":[{
+        "kind":["bin"],
+        "crate_types":["bin"],
+        "edition": "2015",
+        "name":"foo",
+        "src_path":"[..]/foo/src/foo.rs"
+    }],
+    "features":{},
+    "manifest_path":"[..]Cargo.toml",
+    "metadata": null
+}"#;
+
+#[test]
+fn cargo_read_manifest_path_to_cargo_toml_relative() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("read-manifest --manifest-path foo/Cargo.toml")
+        .cwd(p.root().parent().unwrap())
+        .with_json(MANIFEST_OUTPUT)
+        .run();
+}
+
+#[test]
+fn cargo_read_manifest_path_to_cargo_toml_absolute() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("read-manifest --manifest-path")
+        .arg(p.root().join("Cargo.toml"))
+        .cwd(p.root().parent().unwrap())
+        .with_json(MANIFEST_OUTPUT)
+        .run();
+}
+
+#[test]
+fn cargo_read_manifest_path_to_cargo_toml_parent_relative() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("read-manifest --manifest-path foo")
+        .cwd(p.root().parent().unwrap())
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] the manifest-path must be \
+             a path to a Cargo.toml file",
+        ).run();
+}
+
+#[test]
+fn cargo_read_manifest_path_to_cargo_toml_parent_absolute() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("read-manifest --manifest-path")
+        .arg(p.root())
+        .cwd(p.root().parent().unwrap())
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] the manifest-path must be \
+             a path to a Cargo.toml file",
+        ).run();
+}
+
+#[test]
+fn cargo_read_manifest_cwd() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("read-manifest")
+        .with_json(MANIFEST_OUTPUT)
+        .run();
+}
diff --git a/tests/testsuite/registry.rs b/tests/testsuite/registry.rs
new file mode 100644 (file)
index 0000000..804eba4
--- /dev/null
@@ -0,0 +1,1784 @@
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::PathBuf;
+
+use cargo::util::paths::remove_dir_all;
+use support::cargo_process;
+use support::git;
+use support::paths::{self, CargoPathExt};
+use support::registry::{self, Package, Dependency};
+use support::{basic_manifest, project};
+use url::Url;
+
+fn registry_path() -> PathBuf {
+    paths::root().join("registry")
+}
+fn registry() -> Url {
+    Url::from_file_path(&*registry_path()).ok().unwrap()
+}
+
+#[test]
+fn simple() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = ">= 0.0.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.0.1").publish();
+
+    p.cargo("build")
+        .with_stderr(&format!(
+            "\
+[UPDATING] `{reg}` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+            reg = registry::registry_path().to_str().unwrap()
+        )).run();
+
+    p.cargo("clean").run();
+
+    // Don't download a second time
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+}
+
+#[test]
+fn deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = ">= 0.0.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("baz", "0.0.1").publish();
+    Package::new("bar", "0.0.1").dep("baz", "*").publish();
+
+    p.cargo("build")
+        .with_stderr(&format!(
+            "\
+[UPDATING] `{reg}` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] baz v0.0.1
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+            reg = registry::registry_path().to_str().unwrap()
+        )).run();
+}
+
+#[test]
+fn nonexistent() {
+    Package::new("init", "0.0.1").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            nonexistent = ">= 0.0.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+error: no matching package named `nonexistent` found
+location searched: registry [..]
+required by package `foo v0.0.1 ([..])`
+",
+        ).run();
+}
+
+#[test]
+fn wrong_case() {
+    Package::new("init", "0.0.1").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            Init = ">= 0.0.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    // #5678 to make this work
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+error: no matching package named `Init` found
+location searched: registry [..]
+did you mean: init
+required by package `foo v0.0.1 ([..])`
+",
+        ).run();
+}
+
+#[test]
+fn mis_hyphenated() {
+    Package::new("mis-hyphenated", "0.0.1").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            mis_hyphenated = ">= 0.0.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    // #2775 to make this work
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+error: no matching package named `mis_hyphenated` found
+location searched: registry [..]
+did you mean: mis-hyphenated
+required by package `foo v0.0.1 ([..])`
+",
+        ).run();
+}
+
+#[test]
+fn wrong_version() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            foo = ">= 1.0.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("foo", "0.0.1").publish();
+    Package::new("foo", "0.0.2").publish();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: failed to select a version for the requirement `foo = \">= 1.0.0\"`
+  candidate versions found which didn't match: 0.0.2, 0.0.1
+  location searched: `[..]` index (which is replacing registry `[..]`)
+required by package `foo v0.0.1 ([..])`
+",
+        ).run();
+
+    Package::new("foo", "0.0.3").publish();
+    Package::new("foo", "0.0.4").publish();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: failed to select a version for the requirement `foo = \">= 1.0.0\"`
+  candidate versions found which didn't match: 0.0.4, 0.0.3, 0.0.2, ...
+  location searched: `[..]` index (which is replacing registry `[..]`)
+required by package `foo v0.0.1 ([..])`
+",
+        ).run();
+}
+
+#[test]
+fn bad_cksum() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bad-cksum = ">= 0.0.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    let pkg = Package::new("bad-cksum", "0.0.1");
+    pkg.publish();
+    t!(File::create(&pkg.archive_dst()));
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..] index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bad-cksum [..]
+[ERROR] failed to download replaced source registry `https://[..]`
+
+Caused by:
+  failed to verify the checksum of `bad-cksum v0.0.1 (registry `[ROOT][..]`)`
+",
+        ).run();
+}
+
+#[test]
+fn update_registry() {
+    Package::new("init", "0.0.1").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            notyet = ">= 0.0.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: no matching package named `notyet` found
+location searched: registry `[..]`
+required by package `foo v0.0.1 ([..])`
+",
+        ).run();
+
+    Package::new("notyet", "0.0.1").publish();
+
+    p.cargo("build")
+        .with_stderr(format!(
+            "\
+[UPDATING] `{reg}` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] notyet v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] notyet v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+            reg = registry::registry_path().to_str().unwrap()
+        )).run();
+}
+
+#[test]
+fn package_with_path_deps() {
+    Package::new("init", "0.0.1").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license = "MIT"
+            description = "foo"
+            repository = "bar"
+
+            [dependencies.notyet]
+            version = "0.0.1"
+            path = "notyet"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("notyet/Cargo.toml", &basic_manifest("notyet", "0.0.1"))
+        .file("notyet/src/lib.rs", "")
+        .build();
+
+    p.cargo("package -v")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[ERROR] failed to verify package tarball
+
+Caused by:
+  no matching package named `notyet` found
+location searched: registry [..]
+required by package `foo v0.0.1 ([..])`
+",
+        ).run();
+
+    Package::new("notyet", "0.0.1").publish();
+
+    p.cargo("package")
+        .with_stderr(
+            "\
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] notyet v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] notyet v0.0.1
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+}
+
+#[test]
+fn lockfile_locks() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.0.1").publish();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+
+    p.root().move_into_the_past();
+    Package::new("bar", "0.0.2").publish();
+
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn lockfile_locks_transitively() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("baz", "0.0.1").publish();
+    Package::new("bar", "0.0.1").dep("baz", "*").publish();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] baz v0.0.1
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+
+    p.root().move_into_the_past();
+    Package::new("baz", "0.0.2").publish();
+    Package::new("bar", "0.0.2").dep("baz", "*").publish();
+
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn yanks_are_not_used() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("baz", "0.0.1").publish();
+    Package::new("baz", "0.0.2").yanked(true).publish();
+    Package::new("bar", "0.0.1").dep("baz", "*").publish();
+    Package::new("bar", "0.0.2")
+        .dep("baz", "*")
+        .yanked(true)
+        .publish();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] baz v0.0.1
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+}
+
+#[test]
+fn relying_on_a_yank_is_bad() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("baz", "0.0.1").publish();
+    Package::new("baz", "0.0.2").yanked(true).publish();
+    Package::new("bar", "0.0.1").dep("baz", "=0.0.2").publish();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: failed to select a version for the requirement `baz = \"= 0.0.2\"`
+  candidate versions found which didn't match: 0.0.1
+  location searched: `[..]` index (which is replacing registry `[..]`)
+required by package `bar v0.0.1`
+    ... which is depended on by `foo [..]`
+",
+        ).run();
+}
+
+#[test]
+fn yanks_in_lockfiles_are_ok() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.0.1").publish();
+
+    p.cargo("build").run();
+
+    registry::registry_path().join("3").rm_rf();
+
+    Package::new("bar", "0.0.1").yanked(true).publish();
+
+    p.cargo("build").with_stdout("").run();
+
+    p.cargo("update")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+error: no matching package named `bar` found
+location searched: registry [..]
+required by package `foo v0.0.1 ([..])`
+",
+        ).run();
+}
+
+#[test]
+fn update_with_lockfile_if_packages_missing() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.0.1").publish();
+    p.cargo("build").run();
+    p.root().move_into_the_past();
+
+    paths::home().join(".cargo/registry").rm_rf();
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+}
+
+#[test]
+fn update_lockfile() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    println!("0.0.1");
+    Package::new("bar", "0.0.1").publish();
+    p.cargo("build").run();
+
+    Package::new("bar", "0.0.2").publish();
+    Package::new("bar", "0.0.3").publish();
+    paths::home().join(".cargo/registry").rm_rf();
+    println!("0.0.2 update");
+    p.cargo("update -p bar --precise 0.0.2")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[UPDATING] bar v0.0.1 -> v0.0.2
+",
+        ).run();
+
+    println!("0.0.2 build");
+    p.cargo("build")
+        .with_stderr(
+            "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.2 (registry `[ROOT][..]`)
+[COMPILING] bar v0.0.2
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+
+    println!("0.0.3 update");
+    p.cargo("update -p bar")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[UPDATING] bar v0.0.2 -> v0.0.3
+",
+        ).run();
+
+    println!("0.0.3 build");
+    p.cargo("build")
+        .with_stderr(
+            "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.3 (registry `[ROOT][..]`)
+[COMPILING] bar v0.0.3
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+
+    println!("new dependencies update");
+    Package::new("bar", "0.0.4").dep("spam", "0.2.5").publish();
+    Package::new("spam", "0.2.5").publish();
+    p.cargo("update -p bar")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[UPDATING] bar v0.0.3 -> v0.0.4
+[ADDING] spam v0.2.5
+",
+        ).run();
+
+    println!("new dependencies update");
+    Package::new("bar", "0.0.5").publish();
+    p.cargo("update -p bar")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[UPDATING] bar v0.0.4 -> v0.0.5
+[REMOVING] spam v0.2.5
+",
+        ).run();
+}
+
+#[test]
+fn update_offline() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+    p.cargo("update -Zoffline")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr("error: you can't update in the offline mode[..]")
+        .run();
+}
+
+#[test]
+fn dev_dependency_not_used() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("baz", "0.0.1").publish();
+    Package::new("bar", "0.0.1").dev_dep("baz", "*").publish();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+}
+
+#[test]
+fn login_with_no_cargo_dir() {
+    let home = paths::home().join("new-home");
+    t!(fs::create_dir(&home));
+    cargo_process("login foo -v").run();
+}
+
+#[test]
+fn login_with_differently_sized_token() {
+    // Verify that the configuration file gets properly trunchated.
+    let home = paths::home().join("new-home");
+    t!(fs::create_dir(&home));
+    cargo_process("login lmaolmaolmao -v").run();
+    cargo_process("login lmao -v").run();
+    cargo_process("login lmaolmaolmao -v").run();
+}
+
+#[test]
+fn bad_license_file() {
+    Package::new("foo", "1.0.0").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            license-file = "foo"
+            description = "bar"
+            repository = "baz"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+    p.cargo("publish -v --index")
+        .arg(registry().to_string())
+        .with_status(101)
+        .with_stderr_contains("[ERROR] the license file `foo` does not exist")
+        .run();
+}
+
+#[test]
+fn updating_a_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("a/src/lib.rs", "")
+        .build();
+
+    Package::new("bar", "0.0.1").publish();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] bar v0.0.1
+[COMPILING] a v0.0.1 ([CWD]/a)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+
+    t!(t!(File::create(&p.root().join("a/Cargo.toml"))).write_all(
+        br#"
+        [project]
+        name = "a"
+        version = "0.0.1"
+        authors = []
+
+        [dependencies]
+        bar = "0.1.0"
+    "#
+    ));
+    Package::new("bar", "0.1.0").publish();
+
+    println!("second");
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.1.0 (registry `[ROOT][..]`)
+[COMPILING] bar v0.1.0
+[COMPILING] a v0.0.1 ([CWD]/a)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+}
+
+#[test]
+fn git_and_registry_dep() {
+    let b = git::repo(&paths::root().join("b"))
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "b"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = "0.0.1"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = "0.0.1"
+
+            [dependencies.b]
+            git = '{}'
+        "#,
+                b.url()
+            ),
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("a", "0.0.1").publish();
+
+    p.root().move_into_the_past();
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] [..]
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] a v0.0.1 (registry `[ROOT][..]`)
+[COMPILING] a v0.0.1
+[COMPILING] b v0.0.1 ([..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+    p.root().move_into_the_past();
+
+    println!("second");
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn update_publish_then_update() {
+    // First generate a Cargo.lock and a clone of the registry index at the
+    // "head" of the current registry.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            a = "0.1.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+    Package::new("a", "0.1.0").publish();
+    p.cargo("build").run();
+
+    // Next, publish a new package and back up the copy of the registry we just
+    // created.
+    Package::new("a", "0.1.1").publish();
+    let registry = paths::home().join(".cargo/registry");
+    let backup = paths::root().join("registry-backup");
+    t!(fs::rename(&registry, &backup));
+
+    // Generate a Cargo.lock with the newer version, and then move the old copy
+    // of the registry back into place.
+    let p2 = project()
+        .at("foo2")
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            a = "0.1.1"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+    p2.cargo("build").run();
+    registry.rm_rf();
+    t!(fs::rename(&backup, &registry));
+    t!(fs::rename(
+        p2.root().join("Cargo.lock"),
+        p.root().join("Cargo.lock")
+    ));
+
+    // Finally, build the first project again (with our newer Cargo.lock) which
+    // should force an update of the old registry, download the new crate, and
+    // then build everything again.
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] a v0.1.1 (registry `[ROOT][..]`)
+[COMPILING] a v0.1.1
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+}
+
+#[test]
+fn fetch_downloads() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            a = "0.1.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("a", "0.1.0").publish();
+
+    p.cargo("fetch")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] a v0.1.0 (registry [..])
+",
+        ).run();
+}
+
+#[test]
+fn update_transitive_dependency() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            a = "0.1.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("a", "0.1.0").dep("b", "*").publish();
+    Package::new("b", "0.1.0").publish();
+
+    p.cargo("fetch").run();
+
+    Package::new("b", "0.1.1").publish();
+
+    p.cargo("update -pb")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[UPDATING] b v0.1.0 -> v0.1.1
+",
+        ).run();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] b v0.1.1 (registry `[ROOT][..]`)
+[COMPILING] b v0.1.1
+[COMPILING] a v0.1.0
+[COMPILING] foo v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+}
+
+#[test]
+fn update_backtracking_ok() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            webdriver = "0.1"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("webdriver", "0.1.0")
+        .dep("hyper", "0.6")
+        .publish();
+    Package::new("hyper", "0.6.5")
+        .dep("openssl", "0.1")
+        .dep("cookie", "0.1")
+        .publish();
+    Package::new("cookie", "0.1.0")
+        .dep("openssl", "0.1")
+        .publish();
+    Package::new("openssl", "0.1.0").publish();
+
+    p.cargo("generate-lockfile").run();
+
+    Package::new("openssl", "0.1.1").publish();
+    Package::new("hyper", "0.6.6")
+        .dep("openssl", "0.1.1")
+        .dep("cookie", "0.1.0")
+        .publish();
+
+    p.cargo("update -p hyper")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[UPDATING] hyper v0.6.5 -> v0.6.6
+[UPDATING] openssl v0.1.0 -> v0.1.1
+",
+        ).run();
+}
+
+#[test]
+fn update_multiple_packages() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            a = "*"
+            b = "*"
+            c = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("a", "0.1.0").publish();
+    Package::new("b", "0.1.0").publish();
+    Package::new("c", "0.1.0").publish();
+
+    p.cargo("fetch").run();
+
+    Package::new("a", "0.1.1").publish();
+    Package::new("b", "0.1.1").publish();
+    Package::new("c", "0.1.1").publish();
+
+    p.cargo("update -pa -pb")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[UPDATING] a v0.1.0 -> v0.1.1
+[UPDATING] b v0.1.0 -> v0.1.1
+",
+        ).run();
+
+    p.cargo("update -pb -pc")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[UPDATING] c v0.1.0 -> v0.1.1
+",
+        ).run();
+
+    p.cargo("build")
+        .with_stderr_contains("[DOWNLOADED] a v0.1.1 (registry `[ROOT][..]`)")
+        .with_stderr_contains("[DOWNLOADED] b v0.1.1 (registry `[ROOT][..]`)")
+        .with_stderr_contains("[DOWNLOADED] c v0.1.1 (registry `[ROOT][..]`)")
+        .with_stderr_contains("[COMPILING] a v0.1.1")
+        .with_stderr_contains("[COMPILING] b v0.1.1")
+        .with_stderr_contains("[COMPILING] c v0.1.1")
+        .with_stderr_contains("[COMPILING] foo v0.5.0 ([..])")
+        .run();
+}
+
+#[test]
+fn bundled_crate_in_registry() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            bar = "0.1"
+            baz = "0.1"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("bar", "0.1.0").publish();
+    Package::new("baz", "0.1.0")
+        .dep("bar", "0.1.0")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "baz"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar", version = "0.1.0" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "")
+        .publish();
+
+    p.cargo("run").run();
+}
+
+#[test]
+fn update_same_prefix_oh_my_how_was_this_a_bug() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "ugh"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            foo = "0.1"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("foobar", "0.2.0").publish();
+    Package::new("foo", "0.1.0")
+        .dep("foobar", "0.2.0")
+        .publish();
+
+    p.cargo("generate-lockfile").run();
+    p.cargo("update -pfoobar --precise=0.2.0").run();
+}
+
+#[test]
+fn use_semver() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            foo = "1.2.3-alpha.0"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("foo", "1.2.3-alpha.0").publish();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn only_download_relevant() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = []
+
+            [target.foo.dependencies]
+            foo = "*"
+            [dev-dependencies]
+            bar = "*"
+            [dependencies]
+            baz = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("foo", "0.1.0").publish();
+    Package::new("bar", "0.1.0").publish();
+    Package::new("baz", "0.1.0").publish();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] baz v0.1.0 ([..])
+[COMPILING] baz v0.1.0
+[COMPILING] bar v0.5.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+        ).run();
+}
+
+#[test]
+fn resolve_and_backtracking() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            foo = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("foo", "0.1.1")
+        .feature_dep("bar", "0.1", &["a", "b"])
+        .publish();
+    Package::new("foo", "0.1.0").publish();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn upstream_warnings_on_extra_verbose() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            foo = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("foo", "0.1.0")
+        .file("src/lib.rs", "fn unused() {}")
+        .publish();
+
+    p.cargo("build -vv")
+        .with_stderr_contains("[..]warning: function is never used[..]")
+        .run();
+}
+
+#[test]
+fn disallow_network() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            foo = "*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --frozen")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to load source for a dependency on `foo`
+
+Caused by:
+  Unable to update registry [..]
+
+Caused by:
+  attempting to make an HTTP request, but --frozen was specified
+",
+        ).run();
+}
+
+#[test]
+fn add_dep_dont_update_registry() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            baz = { path = "baz" }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "baz/Cargo.toml",
+            r#"
+            [project]
+            name = "baz"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            remote = "0.3"
+        "#,
+        ).file("baz/src/lib.rs", "")
+        .build();
+
+    Package::new("remote", "0.3.4").publish();
+
+    p.cargo("build").run();
+
+    t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(
+        br#"
+        [project]
+        name = "bar"
+        version = "0.5.0"
+        authors = []
+
+        [dependencies]
+        baz = { path = "baz" }
+        remote = "0.3"
+    "#
+    ));
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.5.0 ([..])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn bump_version_dont_update_registry() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            baz = { path = "baz" }
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "baz/Cargo.toml",
+            r#"
+            [project]
+            name = "baz"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            remote = "0.3"
+        "#,
+        ).file("baz/src/lib.rs", "")
+        .build();
+
+    Package::new("remote", "0.3.4").publish();
+
+    p.cargo("build").run();
+
+    t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(
+        br#"
+        [project]
+        name = "bar"
+        version = "0.6.0"
+        authors = []
+
+        [dependencies]
+        baz = { path = "baz" }
+    "#
+    ));
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.6.0 ([..])
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn old_version_req() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            remote = "0.2*"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("remote", "0.2.0").publish();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+warning: parsed version requirement `0.2*` is no longer valid
+
+Previous versions of Cargo accepted this malformed requirement,
+but it is being deprecated. This was found when parsing the manifest
+of bar 0.5.0, and the correct version requirement is `0.2.*`.
+
+This will soon become a hard error, so it's either recommended to
+update to a fixed version or contact the upstream maintainer about
+this warning.
+
+warning: parsed version requirement `0.2*` is no longer valid
+
+Previous versions of Cargo accepted this malformed requirement,
+but it is being deprecated. This was found when parsing the manifest
+of bar 0.5.0, and the correct version requirement is `0.2.*`.
+
+This will soon become a hard error, so it's either recommended to
+update to a fixed version or contact the upstream maintainer about
+this warning.
+
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+[COMPILING] [..]
+[COMPILING] [..]
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn old_version_req_upstream() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            remote = "0.3"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    Package::new("remote", "0.3.0")
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "remote"
+                version = "0.3.0"
+                authors = []
+
+                [dependencies]
+                bar = "0.2*"
+            "#,
+        ).file("src/lib.rs", "")
+        .publish();
+    Package::new("bar", "0.2.0").publish();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+warning: parsed version requirement `0.2*` is no longer valid
+
+Previous versions of Cargo accepted this malformed requirement,
+but it is being deprecated. This was found when parsing the manifest
+of remote 0.3.0, and the correct version requirement is `0.2.*`.
+
+This will soon become a hard error, so it's either recommended to
+update to a fixed version or contact the upstream maintainer about
+this warning.
+
+[COMPILING] [..]
+[COMPILING] [..]
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn toml_lies_but_index_is_truth() {
+    Package::new("foo", "0.2.0").publish();
+    Package::new("bar", "0.3.0")
+        .dep("foo", "0.2.0")
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "bar"
+                version = "0.3.0"
+                authors = []
+
+                [dependencies]
+                foo = "0.1.0"
+            "#,
+        ).file("src/lib.rs", "extern crate foo;")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            bar = "0.3"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn vv_prints_warnings() {
+    Package::new("foo", "0.2.0")
+        .file(
+            "src/lib.rs",
+            "#![deny(warnings)] fn foo() {} // unused function",
+        ).publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "fo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            foo = "0.2"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -vv").run();
+}
+
+#[test]
+fn bad_and_or_malicious_packages_rejected() {
+    Package::new("foo", "0.2.0")
+        .extra_file("foo-0.1.0/src/lib.rs", "")
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "fo"
+            version = "0.5.0"
+            authors = []
+
+            [dependencies]
+            foo = "0.2"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -vv")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] [..]
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..]
+error: failed to download [..]
+
+Caused by:
+  failed to unpack [..]
+
+Caused by:
+  [..] contains a file at \"foo-0.1.0/src/lib.rs\" which isn't under \"foo-0.2.0\"
+",
+        ).run();
+}
+
+#[test]
+fn git_init_templatedir_missing() {
+    Package::new("foo", "0.2.0").dep("bar", "*").publish();
+    Package::new("bar", "0.2.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "fo"
+                version = "0.5.0"
+                authors = []
+
+                [dependencies]
+                foo = "0.2"
+            "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").run();
+
+    remove_dir_all(paths::home().join(".cargo/registry")).unwrap();
+    File::create(paths::home().join(".gitconfig"))
+        .unwrap()
+        .write_all(
+            br#"
+            [init]
+            templatedir = nowhere
+        "#,
+        ).unwrap();
+
+    p.cargo("build").run();
+    p.cargo("build").run();
+}
+
+#[test]
+fn rename_deps_and_features() {
+    Package::new("foo", "0.1.0")
+        .file("src/lib.rs", "pub fn f1() {}")
+        .publish();
+    Package::new("foo", "0.2.0")
+        .file("src/lib.rs", "pub fn f2() {}")
+        .publish();
+    Package::new("bar", "0.2.0")
+        .add_dep(Dependency::new("foo01", "0.1.0").package("foo").optional(true))
+        .add_dep(Dependency::new("foo02", "0.2.0").package("foo"))
+        .feature("another", &["foo01"])
+        .file(
+            "src/lib.rs",
+            r#"
+                extern crate foo02;
+                #[cfg(feature = "foo01")]
+                extern crate foo01;
+
+                pub fn foo() {
+                    foo02::f2();
+                    #[cfg(feature = "foo01")]
+                    foo01::f1();
+                }
+            "#,
+        )
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [project]
+                name = "a"
+                version = "0.5.0"
+                authors = []
+
+                [dependencies]
+                bar = "0.2"
+            "#,
+        ).file(
+            "src/main.rs",
+            "
+                extern crate bar;
+                fn main() { bar::foo(); }
+            ",
+        )
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("build --features bar/foo01").run();
+    p.cargo("build --features bar/another").run();
+}
diff --git a/tests/testsuite/rename_deps.rs b/tests/testsuite/rename_deps.rs
new file mode 100644 (file)
index 0000000..b70c8e8
--- /dev/null
@@ -0,0 +1,372 @@
+use support::git;
+use support::paths;
+use support::registry::Package;
+use support::{basic_manifest, project};
+
+#[test]
+fn rename_dependency() {
+    Package::new("bar", "0.1.0").publish();
+    Package::new("bar", "0.2.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = { version = "0.1.0" }
+            baz = { version = "0.2.0", package = "bar" }
+        "#,
+        ).file("src/lib.rs", "extern crate bar; extern crate baz;")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn rename_with_different_names() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            baz = { path = "bar", package = "bar" }
+        "#,
+        ).file("src/lib.rs", "extern crate baz;")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "random_name"
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn lots_of_names() {
+    Package::new("foo", "0.1.0")
+        .file("src/lib.rs", "pub fn foo1() {}")
+        .publish();
+    Package::new("foo", "0.2.0")
+        .file("src/lib.rs", "pub fn foo() {}")
+        .publish();
+    Package::new("foo", "0.1.0")
+        .file("src/lib.rs", "pub fn foo2() {}")
+        .alternative(true)
+        .publish();
+
+    let g = git::repo(&paths::root().join("another"))
+        .file("Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("src/lib.rs", "pub fn foo3() {}")
+        .build();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+                cargo-features = ["alternative-registries"]
+                [package]
+                name = "test"
+                version = "0.1.0"
+                authors = []
+
+                [dependencies]
+                foo = "0.2"
+                foo1 = {{ version = "0.1", package = "foo" }}
+                foo2 = {{ version = "0.1", registry = "alternative", package = "foo" }}
+                foo3 = {{ git = '{}', package = "foo" }}
+                foo4 = {{ path = "foo", package = "foo" }}
+            "#,
+                g.url()
+            ),
+        ).file(
+            "src/lib.rs",
+            "
+                extern crate foo;
+                extern crate foo1;
+                extern crate foo2;
+                extern crate foo3;
+                extern crate foo4;
+
+                pub fn foo() {
+                    foo::foo();
+                    foo1::foo1();
+                    foo2::foo2();
+                    foo3::foo3();
+                    foo4::foo4();
+                }
+            ",
+        ).file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("foo/src/lib.rs", "pub fn foo4() {}")
+        .build();
+
+    p.cargo("build -v").masquerade_as_nightly_cargo().run();
+}
+
+#[test]
+fn rename_and_patch() {
+    Package::new("foo", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "test"
+                version = "0.1.0"
+                authors = []
+
+                [dependencies]
+                bar = { version = "0.1", package = "foo" }
+
+                [patch.crates-io]
+                foo = { path = "foo" }
+            "#,
+        ).file(
+            "src/lib.rs",
+            "extern crate bar; pub fn foo() { bar::foo(); }",
+        ).file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("foo/src/lib.rs", "pub fn foo() {}")
+        .build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn rename_twice() {
+    Package::new("foo", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "test"
+                version = "0.1.0"
+                authors = []
+
+                [dependencies]
+                bar = { version = "0.1", package = "foo" }
+                [build-dependencies]
+                foo = { version = "0.1" }
+            "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] foo v0.1.0 (registry [..])
+error: multiple dependencies listed for the same crate must all have the same \
+name, but the dependency on `foo v0.1.0` is listed as having different names
+",
+        ).run();
+}
+
+#[test]
+fn rename_affects_fingerprint() {
+    Package::new("foo", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "test"
+                version = "0.1.0"
+                authors = []
+
+                [dependencies]
+                foo = { version = "0.1", package = "foo" }
+            "#,
+        ).file("src/lib.rs", "extern crate foo;")
+        .build();
+
+    p.cargo("build -v").run();
+
+    p.change_file(
+        "Cargo.toml",
+        r#"
+                [package]
+                name = "test"
+                version = "0.1.0"
+                authors = []
+
+                [dependencies]
+                bar = { version = "0.1", package = "foo" }
+        "#,
+    );
+
+    p.cargo("build -v")
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn can_run_doc_tests() {
+    Package::new("bar", "0.1.0").publish();
+    Package::new("bar", "0.2.0").publish();
+
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = { version = "0.1.0" }
+            baz = { version = "0.2.0", package = "bar" }
+        "#,
+        ).file(
+            "src/lib.rs",
+            "
+            extern crate bar;
+            extern crate baz;
+        ",
+        ).build();
+
+    foo.cargo("test -v")
+        .with_stderr_contains(
+            "\
+[DOCTEST] foo
+[RUNNING] `rustdoc --test [CWD]/src/lib.rs \
+        [..] \
+        --extern baz=[CWD]/target/debug/deps/libbar-[..].rlib \
+        --extern bar=[CWD]/target/debug/deps/libbar-[..].rlib \
+        [..]`
+",
+        ).run();
+}
+
+#[test]
+fn features_still_work() {
+    Package::new("foo", "0.1.0").publish();
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "test"
+                version = "0.1.0"
+                authors = []
+
+                [dependencies]
+                p1 = { path = 'a', features = ['b'] }
+                p2 = { path = 'b' }
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "a/Cargo.toml",
+            r#"
+                [package]
+                name = "p1"
+                version = "0.1.0"
+                authors = []
+
+                [dependencies]
+                b = { version = "0.1", package = "foo", optional = true }
+            "#,
+        ).file("a/src/lib.rs", "extern crate b;")
+        .file(
+            "b/Cargo.toml",
+            r#"
+                [package]
+                name = "p2"
+                version = "0.1.0"
+                authors = []
+
+                [dependencies]
+                b = { version = "0.1", package = "bar", optional = true }
+
+                [features]
+                default = ['b']
+            "#,
+        ).file("b/src/lib.rs", "extern crate b;")
+        .build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn features_not_working() {
+    Package::new("foo", "0.1.0").publish();
+    Package::new("bar", "0.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "test"
+                version = "0.1.0"
+                authors = []
+
+                [dependencies]
+                a = { path = 'a', package = 'p1', optional = true }
+
+                [features]
+                default = ['p1']
+            "#,
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("p1", "0.1.0"))
+        .build();
+
+    p.cargo("build -v")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+  Feature `default` includes `p1` which is neither a dependency nor another feature
+",
+        ).run();
+}
+
+#[test]
+fn rename_with_dash() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "qwerty"
+                version = "0.1.0"
+
+                [dependencies]
+                foo-bar = { path = 'a', package = 'a' }
+            "#,
+        )
+        .file("src/lib.rs", "extern crate foo_bar;")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("build")
+        .run();
+}
diff --git a/tests/testsuite/required_features.rs b/tests/testsuite/required_features.rs
new file mode 100644 (file)
index 0000000..6f174e1
--- /dev/null
@@ -0,0 +1,1098 @@
+use support::install::{cargo_home, assert_has_installed_exe, assert_has_not_installed_exe};
+use support::is_nightly;
+use support::project;
+
+#[test]
+fn build_bin_default_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["a"]
+            a = []
+
+            [[bin]]
+            name = "foo"
+            required-features = ["a"]
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            extern crate foo;
+
+            #[cfg(feature = "a")]
+            fn test() {
+                foo::foo();
+            }
+
+            fn main() {}
+        "#,
+        ).file("src/lib.rs", r#"#[cfg(feature = "a")] pub fn foo() {}"#)
+        .build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+
+    p.cargo("build --no-default-features").run();
+
+    p.cargo("build --bin=foo").run();
+    assert!(p.bin("foo").is_file());
+
+    p.cargo("build --bin=foo --no-default-features")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing e.g. `--features=\"a\"`
+",
+        ).run();
+}
+
+#[test]
+fn build_bin_arg_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            a = []
+
+            [[bin]]
+            name = "foo"
+            required-features = ["a"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --features a").run();
+    assert!(p.bin("foo").is_file());
+}
+
+#[test]
+fn build_bin_multiple_required_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["a", "b"]
+            a = []
+            b = ["a"]
+            c = []
+
+            [[bin]]
+            name = "foo_1"
+            path = "src/foo_1.rs"
+            required-features = ["b", "c"]
+
+            [[bin]]
+            name = "foo_2"
+            path = "src/foo_2.rs"
+            required-features = ["a"]
+        "#,
+        ).file("src/foo_1.rs", "fn main() {}")
+        .file("src/foo_2.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build").run();
+
+    assert!(!p.bin("foo_1").is_file());
+    assert!(p.bin("foo_2").is_file());
+
+    p.cargo("build --features c").run();
+
+    assert!(p.bin("foo_1").is_file());
+    assert!(p.bin("foo_2").is_file());
+
+    p.cargo("build --no-default-features").run();
+}
+
+#[test]
+fn build_example_default_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["a"]
+            a = []
+
+            [[example]]
+            name = "foo"
+            required-features = ["a"]
+        "#,
+        ).file("examples/foo.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --example=foo").run();
+    assert!(p.bin("examples/foo").is_file());
+
+    p.cargo("build --example=foo --no-default-features")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing e.g. `--features=\"a\"`
+",
+        ).run();
+}
+
+#[test]
+fn build_example_arg_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            a = []
+
+            [[example]]
+            name = "foo"
+            required-features = ["a"]
+        "#,
+        ).file("examples/foo.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --example=foo --features a").run();
+    assert!(p.bin("examples/foo").is_file());
+}
+
+#[test]
+fn build_example_multiple_required_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["a", "b"]
+            a = []
+            b = ["a"]
+            c = []
+
+            [[example]]
+            name = "foo_1"
+            required-features = ["b", "c"]
+
+            [[example]]
+            name = "foo_2"
+            required-features = ["a"]
+        "#,
+        ).file("examples/foo_1.rs", "fn main() {}")
+        .file("examples/foo_2.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build --example=foo_1")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: target `foo_1` in package `foo` requires the features: `b`, `c`
+Consider enabling them by passing e.g. `--features=\"b c\"`
+",
+        ).run();
+    p.cargo("build --example=foo_2").run();
+
+    assert!(!p.bin("examples/foo_1").is_file());
+    assert!(p.bin("examples/foo_2").is_file());
+
+    p.cargo("build --example=foo_1 --features c").run();
+    p.cargo("build --example=foo_2 --features c").run();
+
+    assert!(p.bin("examples/foo_1").is_file());
+    assert!(p.bin("examples/foo_2").is_file());
+
+    p.cargo("build --example=foo_1 --no-default-features")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: target `foo_1` in package `foo` requires the features: `b`, `c`
+Consider enabling them by passing e.g. `--features=\"b c\"`
+",
+        ).run();
+    p.cargo("build --example=foo_2 --no-default-features")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: target `foo_2` in package `foo` requires the features: `a`
+Consider enabling them by passing e.g. `--features=\"a\"`
+",
+        ).run();
+}
+
+#[test]
+fn test_default_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["a"]
+            a = []
+
+            [[test]]
+            name = "foo"
+            required-features = ["a"]
+        "#,
+        ).file("tests/foo.rs", "#[test]\nfn test() {}")
+        .build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test test ... ok")
+        .run();
+
+    p.cargo("test --no-default-features")
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .with_stdout("")
+        .run();
+
+    p.cargo("test --test=foo")
+        .with_stderr(
+            "\
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test test ... ok")
+        .run();
+
+    p.cargo("test --test=foo --no-default-features")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing e.g. `--features=\"a\"`
+",
+        ).run();
+}
+
+#[test]
+fn test_arg_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            a = []
+
+            [[test]]
+            name = "foo"
+            required-features = ["a"]
+        "#,
+        ).file("tests/foo.rs", "#[test]\nfn test() {}")
+        .build();
+
+    p.cargo("test --features a")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test test ... ok")
+        .run();
+}
+
+#[test]
+fn test_multiple_required_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["a", "b"]
+            a = []
+            b = ["a"]
+            c = []
+
+            [[test]]
+            name = "foo_1"
+            required-features = ["b", "c"]
+
+            [[test]]
+            name = "foo_2"
+            required-features = ["a"]
+        "#,
+        ).file("tests/foo_1.rs", "#[test]\nfn test() {}")
+        .file("tests/foo_2.rs", "#[test]\nfn test() {}")
+        .build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo_2-[..][EXE]",
+        ).with_stdout_contains("test test ... ok")
+        .run();
+
+    p.cargo("test --features c")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo_1-[..][EXE]
+[RUNNING] target/debug/deps/foo_2-[..][EXE]",
+        ).with_stdout_contains_n("test test ... ok", 2)
+        .run();
+
+    p.cargo("test --no-default-features")
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .with_stdout("")
+        .run();
+}
+
+#[test]
+fn bench_default_features() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["a"]
+            a = []
+
+            [[bench]]
+            name = "foo"
+            required-features = ["a"]
+        "#,
+        ).file(
+            "benches/foo.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            #[bench]
+            fn bench(_: &mut test::Bencher) {
+            }"#,
+        ).build();
+
+    p.cargo("bench")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test bench ... bench: [..]")
+        .run();
+
+    p.cargo("bench --no-default-features")
+        .with_stderr("[FINISHED] release [optimized] target(s) in [..]".to_string())
+        .with_stdout("")
+        .run();
+
+    p.cargo("bench --bench=foo")
+        .with_stderr(
+            "\
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test bench ... bench: [..]")
+        .run();
+
+    p.cargo("bench --bench=foo --no-default-features")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing e.g. `--features=\"a\"`
+",
+        ).run();
+}
+
+#[test]
+fn bench_arg_features() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            a = []
+
+            [[bench]]
+            name = "foo"
+            required-features = ["a"]
+        "#,
+        ).file(
+            "benches/foo.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            #[bench]
+            fn bench(_: &mut test::Bencher) {
+            }"#,
+        ).build();
+
+    p.cargo("bench --features a")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test bench ... bench: [..]")
+        .run();
+}
+
+#[test]
+fn bench_multiple_required_features() {
+    if !is_nightly() {
+        return;
+    }
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["a", "b"]
+            a = []
+            b = ["a"]
+            c = []
+
+            [[bench]]
+            name = "foo_1"
+            required-features = ["b", "c"]
+
+            [[bench]]
+            name = "foo_2"
+            required-features = ["a"]
+        "#,
+        ).file(
+            "benches/foo_1.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            #[bench]
+            fn bench(_: &mut test::Bencher) {
+            }"#,
+        ).file(
+            "benches/foo_2.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            #[bench]
+            fn bench(_: &mut test::Bencher) {
+            }"#,
+        ).build();
+
+    p.cargo("bench")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo_2-[..][EXE]",
+        ).with_stdout_contains("test bench ... bench: [..]")
+        .run();
+
+    p.cargo("bench --features c")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo_1-[..][EXE]
+[RUNNING] target/release/deps/foo_2-[..][EXE]",
+        ).with_stdout_contains_n("test bench ... bench: [..]", 2)
+        .run();
+
+    p.cargo("bench --no-default-features")
+        .with_stderr("[FINISHED] release [optimized] target(s) in [..]")
+        .with_stdout("")
+        .run();
+}
+
+#[test]
+fn install_default_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["a"]
+            a = []
+
+            [[bin]]
+            name = "foo"
+            required-features = ["a"]
+
+            [[example]]
+            name = "foo"
+            required-features = ["a"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("examples/foo.rs", "fn main() {}")
+        .build();
+
+    p.cargo("install --path .").run();
+    assert_has_installed_exe(cargo_home(), "foo");
+    p.cargo("uninstall foo").run();
+
+    p.cargo("install --path . --no-default-features")
+        .with_status(101)
+        .with_stderr(
+            "\
+[INSTALLING] foo v0.0.1 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[ERROR] no binaries are available for install using the selected features
+",
+        ).run();
+    assert_has_not_installed_exe(cargo_home(), "foo");
+
+    p.cargo("install --path . --bin=foo").run();
+    assert_has_installed_exe(cargo_home(), "foo");
+    p.cargo("uninstall foo").run();
+
+    p.cargo("install --path . --bin=foo --no-default-features")
+        .with_status(101)
+        .with_stderr(
+            "\
+[INSTALLING] foo v0.0.1 ([..])
+[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \
+    `[..]target`
+
+Caused by:
+  target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing e.g. `--features=\"a\"`
+",
+        ).run();
+    assert_has_not_installed_exe(cargo_home(), "foo");
+
+    p.cargo("install --path . --example=foo").run();
+    assert_has_installed_exe(cargo_home(), "foo");
+    p.cargo("uninstall foo").run();
+
+    p.cargo("install --path . --example=foo --no-default-features")
+        .with_status(101)
+        .with_stderr(
+            "\
+[INSTALLING] foo v0.0.1 ([..])
+[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \
+    `[..]target`
+
+Caused by:
+  target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing e.g. `--features=\"a\"`
+",
+        ).run();
+    assert_has_not_installed_exe(cargo_home(), "foo");
+}
+
+#[test]
+fn install_arg_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            a = []
+
+            [[bin]]
+            name = "foo"
+            required-features = ["a"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("install --features a").run();
+    assert_has_installed_exe(cargo_home(), "foo");
+    p.cargo("uninstall foo").run();
+}
+
+#[test]
+fn install_multiple_required_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["a", "b"]
+            a = []
+            b = ["a"]
+            c = []
+
+            [[bin]]
+            name = "foo_1"
+            path = "src/foo_1.rs"
+            required-features = ["b", "c"]
+
+            [[bin]]
+            name = "foo_2"
+            path = "src/foo_2.rs"
+            required-features = ["a"]
+        "#,
+        ).file("src/foo_1.rs", "fn main() {}")
+        .file("src/foo_2.rs", "fn main() {}")
+        .build();
+
+    p.cargo("install --path .").run();
+    assert_has_not_installed_exe(cargo_home(), "foo_1");
+    assert_has_installed_exe(cargo_home(), "foo_2");
+    p.cargo("uninstall foo").run();
+
+    p.cargo("install --path . --features c").run();
+    assert_has_installed_exe(cargo_home(), "foo_1");
+    assert_has_installed_exe(cargo_home(), "foo_2");
+    p.cargo("uninstall foo").run();
+
+    p.cargo("install --path . --no-default-features")
+        .with_status(101)
+        .with_stderr(
+            "\
+[INSTALLING] foo v0.0.1 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[ERROR] no binaries are available for install using the selected features
+",
+        ).run();
+    assert_has_not_installed_exe(cargo_home(), "foo_1");
+    assert_has_not_installed_exe(cargo_home(), "foo_2");
+}
+
+#[test]
+fn dep_feature_in_toml() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar", features = ["a"] }
+
+            [[bin]]
+            name = "foo"
+            required-features = ["bar/a"]
+
+            [[example]]
+            name = "foo"
+            required-features = ["bar/a"]
+
+            [[test]]
+            name = "foo"
+            required-features = ["bar/a"]
+
+            [[bench]]
+            name = "foo"
+            required-features = ["bar/a"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("examples/foo.rs", "fn main() {}")
+        .file("tests/foo.rs", "#[test]\nfn test() {}")
+        .file(
+            "benches/foo.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            #[bench]
+            fn bench(_: &mut test::Bencher) {
+            }"#,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            a = []
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    // bin
+    p.cargo("build --bin=foo").run();
+    assert!(p.bin("foo").is_file());
+
+    // example
+    p.cargo("build --example=foo").run();
+    assert!(p.bin("examples/foo").is_file());
+
+    // test
+    p.cargo("test --test=foo")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test test ... ok")
+        .run();
+
+    // bench
+    if is_nightly() {
+        p.cargo("bench --bench=foo")
+            .with_stderr(
+                "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+            ).with_stdout_contains("test bench ... bench: [..]")
+            .run();
+    }
+
+    // install
+    p.cargo("install").run();
+    assert_has_installed_exe(cargo_home(), "foo");
+    p.cargo("uninstall foo").run();
+}
+
+#[test]
+fn dep_feature_in_cmd_line() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [[bin]]
+            name = "foo"
+            required-features = ["bar/a"]
+
+            [[example]]
+            name = "foo"
+            required-features = ["bar/a"]
+
+            [[test]]
+            name = "foo"
+            required-features = ["bar/a"]
+
+            [[bench]]
+            name = "foo"
+            required-features = ["bar/a"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("examples/foo.rs", "fn main() {}")
+        .file("tests/foo.rs", "#[test]\nfn test() {}")
+        .file(
+            "benches/foo.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+
+            #[bench]
+            fn bench(_: &mut test::Bencher) {
+            }"#,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            a = []
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    // bin
+    p.cargo("build --bin=foo")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: target `foo` in package `foo` requires the features: `bar/a`
+Consider enabling them by passing e.g. `--features=\"bar/a\"`
+",
+        ).run();
+
+    p.cargo("build --bin=foo --features bar/a").run();
+    assert!(p.bin("foo").is_file());
+
+    // example
+    p.cargo("build --example=foo")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: target `foo` in package `foo` requires the features: `bar/a`
+Consider enabling them by passing e.g. `--features=\"bar/a\"`
+",
+        ).run();
+
+    p.cargo("build --example=foo --features bar/a").run();
+    assert!(p.bin("examples/foo").is_file());
+
+    // test
+    p.cargo("test")
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .with_stdout("")
+        .run();
+
+    p.cargo("test --test=foo --features bar/a")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test test ... ok")
+        .run();
+
+    // bench
+    if is_nightly() {
+        p.cargo("bench")
+            .with_stderr("[FINISHED] release [optimized] target(s) in [..]")
+            .with_stdout("")
+            .run();
+
+        p.cargo("bench --bench=foo --features bar/a")
+            .with_stderr(
+                "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+            ).with_stdout_contains("test bench ... bench: [..]")
+            .run();
+    }
+
+    // install
+    p.cargo("install --path .")
+        .with_status(101)
+        .with_stderr(
+            "\
+[INSTALLING] foo v0.0.1 ([..])
+[FINISHED] release [optimized] target(s) in [..]
+[ERROR] no binaries are available for install using the selected features
+",
+        ).run();
+    assert_has_not_installed_exe(cargo_home(), "foo");
+
+    p.cargo("install --features bar/a").run();
+    assert_has_installed_exe(cargo_home(), "foo");
+    p.cargo("uninstall foo").run();
+}
+
+#[test]
+fn test_skips_compiling_bin_with_missing_required_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            a = []
+
+            [[bin]]
+            name = "bin_foo"
+            path = "src/bin/foo.rs"
+            required-features = ["a"]
+        "#,
+        ).file("src/bin/foo.rs", "extern crate bar; fn main() {}")
+        .file("tests/foo.rs", "")
+        .file("benches/foo.rs", "")
+        .build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]",
+        ).with_stdout_contains("running 0 tests")
+        .run();
+
+    p.cargo("test --features a -j 1")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+error[E0463]: can't find crate for `bar`",
+        ).run();
+
+    if is_nightly() {
+        p.cargo("bench")
+            .with_stderr(
+                "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] target/release/deps/foo-[..][EXE]",
+            ).with_stdout_contains("running 0 tests")
+            .run();
+
+        p.cargo("bench --features a -j 1")
+            .with_status(101)
+            .with_stderr_contains(
+                "\
+[COMPILING] foo v0.0.1 ([CWD])
+error[E0463]: can't find crate for `bar`",
+            ).run();
+    }
+}
+
+#[test]
+fn run_default() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = []
+            a = []
+
+            [[bin]]
+            name = "foo"
+            required-features = ["a"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("src/main.rs", "extern crate foo; fn main() {}")
+        .build();
+
+    p.cargo("run")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: target `foo` in package `foo` requires the features: `a`
+Consider enabling them by passing e.g. `--features=\"a\"`
+",
+        ).run();
+
+    p.cargo("run --features a").run();
+}
+
+#[test]
+fn run_default_multiple_required_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            default = ["a"]
+            a = []
+            b = []
+
+            [[bin]]
+            name = "foo1"
+            path = "src/foo1.rs"
+            required-features = ["a"]
+
+            [[bin]]
+            name = "foo2"
+            path = "src/foo2.rs"
+            required-features = ["b"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("src/foo1.rs", "extern crate foo; fn main() {}")
+        .file("src/foo2.rs", "extern crate foo; fn main() {}")
+        .build();
+
+    p.cargo("run")
+        .with_status(101)
+        .with_stderr(
+            "\
+             error: `cargo run` requires that a package only have one executable; \
+             use the `--bin` option to specify which one to run\navailable binaries: foo1, foo2",
+        ).run();
+}
diff --git a/tests/testsuite/resolve.rs b/tests/testsuite/resolve.rs
new file mode 100644 (file)
index 0000000..afe06f5
--- /dev/null
@@ -0,0 +1,1115 @@
+use std::env;
+
+use cargo::core::dependency::Kind::Development;
+use cargo::core::{enable_nightly_features, Dependency};
+use cargo::util::Config;
+
+use support::project;
+use support::registry::Package;
+use support::resolver::{
+    assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, loc_names, names, pkg, pkg_dep,
+    pkg_id, pkg_loc, registry, registry_strategy, resolve, resolve_and_validated,
+    resolve_with_config, PrettyPrintRegistry, ToDep, ToPkgId,
+};
+
+use proptest::collection::vec;
+use proptest::prelude::*;
+
+proptest! {
+    #![proptest_config(ProptestConfig {
+        // Note that this is a little low in terms of cases we'd like to test,
+        // but this number affects how long this function takes. It can be
+        // increased locally to execute more tests and try to find more bugs,
+        // but for now it's semi-low to run in a small-ish amount of time on CI
+        // and locally.
+        cases: 256,
+        max_shrink_iters:
+            if env::var("CI").is_ok() {
+                // This attempts to make sure that CI will fail fast,
+                0
+            } else {
+                // but that local builds will give a small clear test case.
+                ProptestConfig::default().max_shrink_iters
+            },
+        .. ProptestConfig::default()
+    })]
+    #[test]
+    fn passes_validation(
+        PrettyPrintRegistry(input) in registry_strategy(50, 20, 60)
+    )  {
+        let reg = registry(input.clone());
+        // there is only a small chance that eny one
+        // crate will be interesting.
+        // So we try some of the most complicated.
+        for this in input.iter().rev().take(20) {
+            let _ = resolve_and_validated(
+                &pkg_id("root"),
+                vec![dep_req(&this.name(), &format!("={}", this.version()))],
+                &reg,
+            );
+        }
+    }
+    #[test]
+    fn limited_independence_of_irrelevant_alternatives(
+        PrettyPrintRegistry(input) in registry_strategy(50, 20, 60),
+        indexs_to_unpublish in vec(any::<prop::sample::Index>(), 10)
+    )  {
+        let reg = registry(input.clone());
+        // there is only a small chance that eny one
+        // crate will be interesting.
+        // So we try some of the most complicated.
+        for this in input.iter().rev().take(10) {
+            let res = resolve(
+                &pkg_id("root"),
+                vec![dep_req(&this.name(), &format!("={}", this.version()))],
+                &reg,
+            );
+
+            match res {
+                Ok(r) => {
+                    // If resolution was successful, then unpublishing a version of a crate
+                    // that was not selected should not change that.
+                    let not_selected: Vec<_> = input
+                        .iter()
+                        .cloned()
+                        .filter(|x| !r.contains(x.package_id()))
+                        .collect();
+                    if !not_selected.is_empty() {
+                        let indexs_to_unpublish: Vec<_> = indexs_to_unpublish.iter().map(|x| x.get(&not_selected)).collect();
+
+                        let new_reg = registry(
+                            input
+                                .iter()
+                                .cloned()
+                                .filter(|x| !indexs_to_unpublish.contains(&x))
+                                .collect(),
+                        );
+
+                        let res = resolve(
+                            &pkg_id("root"),
+                            vec![dep_req(&this.name(), &format!("={}", this.version()))],
+                            &new_reg,
+                        );
+
+                        // Note: that we can not assert that the two `res` are identical
+                        // as the resolver does depend on irrelevant alternatives.
+                        // It uses how constrained a dependency requirement is
+                        // to determine what order to evaluate requirements.
+
+                        prop_assert!(
+                            res.is_ok(),
+                            "unpublishing {:?} stopped `{} = \"={}\"` from working",
+                            indexs_to_unpublish.iter().map(|x| x.package_id()).collect::<Vec<_>>(),
+                            this.name(),
+                            this.version()
+                        )
+                    }
+                }
+
+                Err(_) => {
+                    // If resolution was unsuccessful, then it should stay unsuccessful
+                    // even if any version of a crate is unpublished.
+                    let indexs_to_unpublish: Vec<_> = indexs_to_unpublish.iter().map(|x| x.get(&input)).collect();
+
+                    let new_reg = registry(
+                        input
+                            .iter()
+                            .cloned()
+                            .filter(|x| !indexs_to_unpublish.contains(&x))
+                            .collect(),
+                    );
+
+                    let res = resolve(
+                        &pkg_id("root"),
+                        vec![dep_req(&this.name(), &format!("={}", this.version()))],
+                        &new_reg,
+                    );
+
+                    prop_assert!(
+                        res.is_err(),
+                        "full index did not work for `{} = \"={}\"` but unpublishing {:?} fixed it!",
+                        this.name(),
+                        this.version(),
+                        indexs_to_unpublish.iter().map(|x| x.package_id()).collect::<Vec<_>>(),
+                    )
+                }
+            }
+        }
+    }
+}
+
+#[test]
+#[should_panic(expected = "assertion failed: !name.is_empty()")]
+fn test_dependency_with_empty_name() {
+    // Bug 5229, dependency-names must not be empty
+    "".to_dep();
+}
+
+#[test]
+fn test_resolving_empty_dependency_list() {
+    let res = resolve(&pkg_id("root"), Vec::new(), &registry(vec![])).unwrap();
+
+    assert_eq!(res, names(&["root"]));
+}
+
+#[test]
+fn test_resolving_only_package() {
+    let reg = registry(vec![pkg!("foo")]);
+    let res = resolve(&pkg_id("root"), vec![dep("foo")], &reg).unwrap();
+    assert_same(&res, &names(&["root", "foo"]));
+}
+
+#[test]
+fn test_resolving_one_dep() {
+    let reg = registry(vec![pkg!("foo"), pkg!("bar")]);
+    let res = resolve(&pkg_id("root"), vec![dep("foo")], &reg).unwrap();
+    assert_same(&res, &names(&["root", "foo"]));
+}
+
+#[test]
+fn test_resolving_multiple_deps() {
+    let reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]);
+    let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("baz")], &reg).unwrap();
+    assert_same(&res, &names(&["root", "foo", "baz"]));
+}
+
+#[test]
+fn test_resolving_transitive_deps() {
+    let reg = registry(vec![pkg!("foo"), pkg!("bar" => ["foo"])]);
+    let res = resolve(&pkg_id("root"), vec![dep("bar")], &reg).unwrap();
+
+    assert_same(&res, &names(&["root", "foo", "bar"]));
+}
+
+#[test]
+fn test_resolving_common_transitive_deps() {
+    let reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]);
+    let res = resolve(&pkg_id("root"), vec![dep("foo"), dep("bar")], &reg).unwrap();
+
+    assert_same(&res, &names(&["root", "foo", "bar"]));
+}
+
+#[test]
+fn test_resolving_with_same_name() {
+    let list = vec![
+        pkg_loc("foo", "http://first.example.com"),
+        pkg_loc("bar", "http://second.example.com"),
+    ];
+
+    let reg = registry(list);
+    let res = resolve(
+        &pkg_id("root"),
+        vec![
+            dep_loc("foo", "http://first.example.com"),
+            dep_loc("bar", "http://second.example.com"),
+        ],
+        &reg,
+    )
+    .unwrap();
+
+    let mut names = loc_names(&[
+        ("foo", "http://first.example.com"),
+        ("bar", "http://second.example.com"),
+    ]);
+
+    names.push(pkg_id("root"));
+    assert_same(&res, &names);
+}
+
+#[test]
+fn test_resolving_with_dev_deps() {
+    let reg = registry(vec![
+        pkg!("foo" => ["bar", dep_kind("baz", Development)]),
+        pkg!("baz" => ["bat", dep_kind("bam", Development)]),
+        pkg!("bar"),
+        pkg!("bat"),
+    ]);
+
+    let res = resolve(
+        &pkg_id("root"),
+        vec![dep("foo"), dep_kind("baz", Development)],
+        &reg,
+    )
+    .unwrap();
+
+    assert_same(&res, &names(&["root", "foo", "bar", "baz", "bat"]));
+}
+
+#[test]
+fn resolving_with_many_versions() {
+    let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
+
+    let res = resolve(&pkg_id("root"), vec![dep("foo")], &reg).unwrap();
+
+    assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.2")]));
+}
+
+#[test]
+fn resolving_with_specific_version() {
+    let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]);
+
+    let res = resolve(&pkg_id("root"), vec![dep_req("foo", "=1.0.1")], &reg).unwrap();
+
+    assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.1")]));
+}
+
+#[test]
+fn test_resolving_maximum_version_with_transitive_deps() {
+    let reg = registry(vec![
+        pkg!(("util", "1.2.2")),
+        pkg!(("util", "1.0.0")),
+        pkg!(("util", "1.1.1")),
+        pkg!("foo" => [dep_req("util", "1.0.0")]),
+        pkg!("bar" => [dep_req("util", ">=1.0.1")]),
+    ]);
+
+    let res = resolve(
+        &pkg_id("root"),
+        vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")],
+        &reg,
+    )
+    .unwrap();
+
+    assert_contains(
+        &res,
+        &names(&[
+            ("root", "1.0.0"),
+            ("foo", "1.0.0"),
+            ("bar", "1.0.0"),
+            ("util", "1.2.2"),
+        ]),
+    );
+    assert!(!res.contains(&("util", "1.0.1").to_pkgid()));
+    assert!(!res.contains(&("util", "1.1.1").to_pkgid()));
+}
+
+#[test]
+fn test_resolving_minimum_version_with_transitive_deps() {
+    enable_nightly_features(); // -Z minimal-versions
+                               // When the minimal-versions config option is specified then the lowest
+                               // possible version of a package should be selected. "util 1.0.0" can't be
+                               // selected because of the requirements of "bar", so the minimum version
+                               // must be 1.1.1.
+    let reg = registry(vec![
+        pkg!(("util", "1.2.2")),
+        pkg!(("util", "1.0.0")),
+        pkg!(("util", "1.1.1")),
+        pkg!("foo" => [dep_req("util", "1.0.0")]),
+        pkg!("bar" => [dep_req("util", ">=1.0.1")]),
+    ]);
+
+    let mut config = Config::default().unwrap();
+    config
+        .configure(
+            1,
+            None,
+            &None,
+            false,
+            false,
+            &None,
+            &["minimal-versions".to_string()],
+        )
+        .unwrap();
+
+    let res = resolve_with_config(
+        &pkg_id("root"),
+        vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")],
+        &reg,
+        Some(&config),
+    )
+    .unwrap();
+
+    assert_contains(
+        &res,
+        &names(&[
+            ("root", "1.0.0"),
+            ("foo", "1.0.0"),
+            ("bar", "1.0.0"),
+            ("util", "1.1.1"),
+        ]),
+    );
+    assert!(!res.contains(&("util", "1.2.2").to_pkgid()));
+    assert!(!res.contains(&("util", "1.0.0").to_pkgid()));
+}
+
+// Ensure that the "-Z minimal-versions" CLI option works and the minimal
+// version of a dependency ends up in the lock file.
+#[test]
+fn minimal_version_cli() {
+    Package::new("dep", "1.0.0").publish();
+    Package::new("dep", "1.1.0").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.1"
+
+            [dependencies]
+            dep = "1.0"
+        "#,
+        )
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("generate-lockfile -Zminimal-versions")
+        .masquerade_as_nightly_cargo()
+        .run();
+
+    let lock = p.read_lockfile();
+
+    assert!(lock.contains("dep 1.0.0"));
+}
+
+#[test]
+fn resolving_incompat_versions() {
+    let reg = registry(vec![
+        pkg!(("foo", "1.0.1")),
+        pkg!(("foo", "1.0.2")),
+        pkg!("bar" => [dep_req("foo", "=1.0.2")]),
+    ]);
+
+    assert!(
+        resolve(
+            &pkg_id("root"),
+            vec![dep_req("foo", "=1.0.1"), dep("bar")],
+            &reg
+        )
+        .is_err()
+    );
+}
+
+#[test]
+fn resolving_wrong_case_from_registry() {
+    // In the future we may #5678 allow this to happen.
+    // For back compatibility reasons, we probably won't.
+    // But we may want to future prove ourselves by understanding it.
+    // This test documents the current behavior.
+    let reg = registry(vec![pkg!(("foo", "1.0.0")), pkg!("bar" => ["Foo"])]);
+
+    assert!(resolve(&pkg_id("root"), vec![dep("bar")], &reg).is_err());
+}
+
+#[test]
+fn resolving_mis_hyphenated_from_registry() {
+    // In the future we may #2775 allow this to happen.
+    // For back compatibility reasons, we probably won't.
+    // But we may want to future prove ourselves by understanding it.
+    // This test documents the current behavior.
+    let reg = registry(vec![pkg!(("fo-o", "1.0.0")), pkg!("bar" => ["fo_o"])]);
+
+    assert!(resolve(&pkg_id("root"), vec![dep("bar")], &reg).is_err());
+}
+
+#[test]
+fn resolving_backtrack() {
+    let reg = registry(vec![
+        pkg!(("foo", "1.0.2") => [dep("bar")]),
+        pkg!(("foo", "1.0.1") => [dep("baz")]),
+        pkg!("bar" => [dep_req("foo", "=2.0.2")]),
+        pkg!("baz"),
+    ]);
+
+    let res = resolve(&pkg_id("root"), vec![dep_req("foo", "^1")], &reg).unwrap();
+
+    assert_contains(
+        &res,
+        &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("baz", "1.0.0")]),
+    );
+}
+
+#[test]
+fn resolving_backtrack_features() {
+    // test for cargo/issues/4347
+    let mut bad = dep("bar");
+    bad.set_features(vec!["bad"]);
+
+    let reg = registry(vec![
+        pkg!(("foo", "1.0.2") => [bad]),
+        pkg!(("foo", "1.0.1") => [dep("bar")]),
+        pkg!("bar"),
+    ]);
+
+    let res = resolve(&pkg_id("root"), vec![dep_req("foo", "^1")], &reg).unwrap();
+
+    assert_contains(
+        &res,
+        &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("bar", "1.0.0")]),
+    );
+}
+
+#[test]
+fn resolving_allows_multiple_compatible_versions() {
+    let reg = registry(vec![
+        pkg!(("foo", "1.0.0")),
+        pkg!(("foo", "2.0.0")),
+        pkg!(("foo", "0.1.0")),
+        pkg!(("foo", "0.2.0")),
+        pkg!("bar" => ["d1", "d2", "d3", "d4"]),
+        pkg!("d1" => [dep_req("foo", "1")]),
+        pkg!("d2" => [dep_req("foo", "2")]),
+        pkg!("d3" => [dep_req("foo", "0.1")]),
+        pkg!("d4" => [dep_req("foo", "0.2")]),
+    ]);
+
+    let res = resolve(&pkg_id("root"), vec![dep("bar")], &reg).unwrap();
+
+    assert_same(
+        &res,
+        &names(&[
+            ("root", "1.0.0"),
+            ("foo", "1.0.0"),
+            ("foo", "2.0.0"),
+            ("foo", "0.1.0"),
+            ("foo", "0.2.0"),
+            ("d1", "1.0.0"),
+            ("d2", "1.0.0"),
+            ("d3", "1.0.0"),
+            ("d4", "1.0.0"),
+            ("bar", "1.0.0"),
+        ]),
+    );
+}
+
+#[test]
+fn resolving_with_deep_backtracking() {
+    let reg = registry(vec![
+        pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]),
+        pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]),
+        pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"),
+                                  dep_req("other", "1")]),
+        pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
+        pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
+        pkg!(("baz", "1.0.1")),
+        pkg!(("dep_req", "1.0.0")),
+        pkg!(("dep_req", "2.0.0")),
+    ]);
+
+    let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], &reg).unwrap();
+
+    assert_same(
+        &res,
+        &names(&[
+            ("root", "1.0.0"),
+            ("foo", "1.0.0"),
+            ("bar", "2.0.0"),
+            ("baz", "1.0.1"),
+        ]),
+    );
+}
+
+#[test]
+fn resolving_with_sys_crates() {
+    // This is based on issues/4902
+    // With `l` a normal library we get 2copies so everyone gets the newest compatible.
+    // But `l-sys` a library with a links attribute we make sure there is only one.
+    let reg = registry(vec![
+        pkg!(("l-sys", "0.9.1")),
+        pkg!(("l-sys", "0.10.0")),
+        pkg!(("l", "0.9.1")),
+        pkg!(("l", "0.10.0")),
+        pkg!(("d", "1.0.0") => [dep_req("l-sys", ">=0.8.0, <=0.10.0"), dep_req("l", ">=0.8.0, <=0.10.0")]),
+        pkg!(("r", "1.0.0") => [dep_req("l-sys", "0.9"), dep_req("l", "0.9")]),
+    ]);
+
+    let res = resolve(
+        &pkg_id("root"),
+        vec![dep_req("d", "1"), dep_req("r", "1")],
+        &reg,
+    )
+    .unwrap();
+
+    assert_same(
+        &res,
+        &names(&[
+            ("root", "1.0.0"),
+            ("d", "1.0.0"),
+            ("r", "1.0.0"),
+            ("l-sys", "0.9.1"),
+            ("l", "0.9.1"),
+            ("l", "0.10.0"),
+        ]),
+    );
+}
+
+#[test]
+fn resolving_with_constrained_sibling_backtrack_parent() {
+    // There is no point in considering all of the backtrack_trap{1,2}
+    // candidates since they can't change the result of failing to
+    // resolve 'constrained'. Cargo should (ideally) skip past them and resume
+    // resolution once the activation of the parent, 'bar', is rolled back.
+    // Note that the traps are slightly more constrained to make sure they
+    // get picked first.
+    let mut reglist = vec![
+        pkg!(("foo", "1.0.0") => [dep_req("bar", "1.0"),
+                                  dep_req("constrained", "=1.0.0")]),
+        pkg!(("bar", "1.0.0") => [dep_req("backtrack_trap1", "1.0.2"),
+                                  dep_req("backtrack_trap2", "1.0.2"),
+                                  dep_req("constrained", "1.0.0")]),
+        pkg!(("constrained", "1.0.0")),
+        pkg!(("backtrack_trap1", "1.0.0")),
+        pkg!(("backtrack_trap2", "1.0.0")),
+    ];
+    // Bump this to make the test harder - it adds more versions of bar that will
+    // fail to resolve, and more versions of the traps to consider.
+    const NUM_BARS_AND_TRAPS: usize = 50; // minimum 2
+    for i in 1..NUM_BARS_AND_TRAPS {
+        let vsn = format!("1.0.{}", i);
+        reglist.push(
+            pkg!(("bar", vsn.clone()) => [dep_req("backtrack_trap1", "1.0.2"),
+                                                   dep_req("backtrack_trap2", "1.0.2"),
+                                                   dep_req("constrained", "1.0.1")]),
+        );
+        reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
+        reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
+        reglist.push(pkg!(("constrained", vsn.clone())));
+    }
+    let reg = registry(reglist);
+
+    let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], &reg).unwrap();
+
+    assert_contains(
+        &res,
+        &names(&[
+            ("root", "1.0.0"),
+            ("foo", "1.0.0"),
+            ("bar", "1.0.0"),
+            ("constrained", "1.0.0"),
+        ]),
+    );
+}
+
+#[test]
+fn resolving_with_many_equivalent_backtracking() {
+    let mut reglist = Vec::new();
+
+    const DEPTH: usize = 200;
+    const BRANCHING_FACTOR: usize = 100;
+
+    // Each level depends on the next but the last level does not exist.
+    // Without cashing we need to test every path to the last level O(BRANCHING_FACTOR ^ DEPTH)
+    // and this test will time out. With cashing we need to discover that none of these
+    // can be activated O(BRANCHING_FACTOR * DEPTH)
+    for l in 0..DEPTH {
+        let name = format!("level{}", l);
+        let next = format!("level{}", l + 1);
+        for i in 1..BRANCHING_FACTOR {
+            let vsn = format!("1.0.{}", i);
+            reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep_req(next.as_str(), "*")]));
+        }
+    }
+
+    let reg = registry(reglist.clone());
+
+    let res = resolve(&pkg_id("root"), vec![dep_req("level0", "*")], &reg);
+
+    assert!(res.is_err());
+
+    // It is easy to write code that quickly returns an error.
+    // Lets make sure we can find a good answer if it is there.
+    reglist.push(pkg!(("level0", "1.0.0")));
+
+    let reg = registry(reglist.clone());
+
+    let res = resolve(&pkg_id("root"), vec![dep_req("level0", "*")], &reg).unwrap();
+
+    assert_contains(&res, &names(&[("root", "1.0.0"), ("level0", "1.0.0")]));
+
+    // Make sure we have not special case no candidates.
+    reglist.push(pkg!(("constrained", "1.1.0")));
+    reglist.push(pkg!(("constrained", "1.0.0")));
+    reglist.push(
+        pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep_req("constrained", "=1.0.0")]),
+    );
+
+    let reg = registry(reglist.clone());
+
+    let res = resolve(
+        &pkg_id("root"),
+        vec![dep_req("level0", "*"), dep_req("constrained", "*")],
+        &reg,
+    )
+    .unwrap();
+
+    assert_contains(
+        &res,
+        &names(&[
+            ("root", "1.0.0"),
+            ("level0", "1.0.0"),
+            ("constrained", "1.1.0"),
+        ]),
+    );
+
+    let reg = registry(reglist.clone());
+
+    let res = resolve(
+        &pkg_id("root"),
+        vec![dep_req("level0", "1.0.1"), dep_req("constrained", "*")],
+        &reg,
+    )
+    .unwrap();
+
+    assert_contains(
+        &res,
+        &names(&[
+            ("root", "1.0.0"),
+            (format!("level{}", DEPTH).as_str(), "1.0.0"),
+            ("constrained", "1.0.0"),
+        ]),
+    );
+
+    let reg = registry(reglist.clone());
+
+    let res = resolve(
+        &pkg_id("root"),
+        vec![dep_req("level0", "1.0.1"), dep_req("constrained", "1.1.0")],
+        &reg,
+    );
+
+    assert!(res.is_err());
+}
+
+#[test]
+fn resolving_with_deep_traps() {
+    let mut reglist = Vec::new();
+
+    const DEPTH: usize = 200;
+    const BRANCHING_FACTOR: usize = 100;
+
+    // Each backtrack_trap depends on the next, and adds a backtrack frame.
+    // None of witch is going to help with `bad`.
+    for l in 0..DEPTH {
+        let name = format!("backtrack_trap{}", l);
+        let next = format!("backtrack_trap{}", l + 1);
+        for i in 1..BRANCHING_FACTOR {
+            let vsn = format!("1.0.{}", i);
+            reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep_req(next.as_str(), "*")]));
+        }
+    }
+    {
+        let name = format!("backtrack_trap{}", DEPTH);
+        for i in 1..BRANCHING_FACTOR {
+            let vsn = format!("1.0.{}", i);
+            reglist.push(pkg!((name.as_str(), vsn.as_str())));
+        }
+    }
+    {
+        // slightly less constrained to make sure `cloaking` gets picked last.
+        for i in 1..(BRANCHING_FACTOR + 10) {
+            let vsn = format!("1.0.{}", i);
+            reglist.push(pkg!(("cloaking", vsn.as_str()) => [dep_req("bad", "1.0.1")]));
+        }
+    }
+
+    let reg = registry(reglist.clone());
+
+    let res = resolve(
+        &pkg_id("root"),
+        vec![dep_req("backtrack_trap0", "*"), dep_req("cloaking", "*")],
+        &reg,
+    );
+
+    assert!(res.is_err());
+}
+
+#[test]
+fn resolving_with_constrained_cousins_backtrack() {
+    let mut reglist = Vec::new();
+
+    const DEPTH: usize = 100;
+    const BRANCHING_FACTOR: usize = 50;
+
+    // Each backtrack_trap depends on the next.
+    // The last depends on a specific ver of constrained.
+    for l in 0..DEPTH {
+        let name = format!("backtrack_trap{}", l);
+        let next = format!("backtrack_trap{}", l + 1);
+        for i in 1..BRANCHING_FACTOR {
+            let vsn = format!("1.0.{}", i);
+            reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep_req(next.as_str(), "*")]));
+        }
+    }
+    {
+        let name = format!("backtrack_trap{}", DEPTH);
+        for i in 1..BRANCHING_FACTOR {
+            let vsn = format!("1.0.{}", i);
+            reglist.push(
+                pkg!((name.as_str(), vsn.as_str()) => [dep_req("constrained", ">=1.1.0, <=2.0.0")]),
+            );
+        }
+    }
+    {
+        // slightly less constrained to make sure `constrained` gets picked last.
+        for i in 0..(BRANCHING_FACTOR + 10) {
+            let vsn = format!("1.0.{}", i);
+            reglist.push(pkg!(("constrained", vsn.as_str())));
+        }
+        reglist.push(pkg!(("constrained", "1.1.0")));
+        reglist.push(pkg!(("constrained", "2.0.0")));
+        reglist.push(pkg!(("constrained", "2.0.1")));
+    }
+    reglist.push(pkg!(("cloaking", "1.0.0") => [dep_req("constrained", "~1.0.0")]));
+
+    let reg = registry(reglist.clone());
+
+    // `backtrack_trap0 = "*"` is a lot of ways of saying `constrained = ">=1.1.0, <=2.0.0"`
+    // but `constrained= "2.0.1"` is already picked.
+    // Only then to try and solve `constrained= "~1.0.0"` which is incompatible.
+    let res = resolve(
+        &pkg_id("root"),
+        vec![
+            dep_req("backtrack_trap0", "*"),
+            dep_req("constrained", "2.0.1"),
+            dep_req("cloaking", "*"),
+        ],
+        &reg,
+    );
+
+    assert!(res.is_err());
+
+    // Each level depends on the next but the last depends on incompatible deps.
+    // Let's make sure that we can cache that a dep has incompatible deps.
+    for l in 0..DEPTH {
+        let name = format!("level{}", l);
+        let next = format!("level{}", l + 1);
+        for i in 1..BRANCHING_FACTOR {
+            let vsn = format!("1.0.{}", i);
+            reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep_req(next.as_str(), "*")]));
+        }
+    }
+    reglist.push(
+        pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep_req("backtrack_trap0", "*"),
+            dep_req("cloaking", "*")
+        ]),
+    );
+
+    let reg = registry(reglist.clone());
+
+    let res = resolve(
+        &pkg_id("root"),
+        vec![dep_req("level0", "*"), dep_req("constrained", "2.0.1")],
+        &reg,
+    );
+
+    assert!(res.is_err());
+
+    let res = resolve(
+        &pkg_id("root"),
+        vec![dep_req("level0", "*"), dep_req("constrained", "2.0.0")],
+        &reg,
+    )
+    .unwrap();
+
+    assert_contains(
+        &res,
+        &names(&[("constrained", "2.0.0"), ("cloaking", "1.0.0")]),
+    );
+}
+
+#[test]
+fn resolving_with_constrained_sibling_backtrack_activation() {
+    // It makes sense to resolve most-constrained deps first, but
+    // with that logic the backtrack traps here come between the two
+    // attempted resolutions of 'constrained'. When backtracking,
+    // cargo should skip past them and resume resolution once the
+    // number of activations for 'constrained' changes.
+    let mut reglist = vec![
+        pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"),
+                                  dep_req("backtrack_trap1", "1.0"),
+                                  dep_req("backtrack_trap2", "1.0"),
+                                  dep_req("constrained", "<=1.0.60")]),
+        pkg!(("bar", "1.0.0") => [dep_req("constrained", ">=1.0.60")]),
+    ];
+    // Bump these to make the test harder, but you'll also need to
+    // change the version constraints on `constrained` above. To correctly
+    // exercise Cargo, the relationship between the values is:
+    // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn
+    // to make sure the traps are resolved between `constrained`.
+    const NUM_TRAPS: usize = 45; // min 1
+    const NUM_CONSTRAINED: usize = 100; // min 1
+    for i in 0..NUM_TRAPS {
+        let vsn = format!("1.0.{}", i);
+        reglist.push(pkg!(("backtrack_trap1", vsn.clone())));
+        reglist.push(pkg!(("backtrack_trap2", vsn.clone())));
+    }
+    for i in 0..NUM_CONSTRAINED {
+        let vsn = format!("1.0.{}", i);
+        reglist.push(pkg!(("constrained", vsn.clone())));
+    }
+    let reg = registry(reglist);
+
+    let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], &reg).unwrap();
+
+    assert_contains(
+        &res,
+        &names(&[
+            ("root", "1.0.0"),
+            ("foo", "1.0.0"),
+            ("bar", "1.0.0"),
+            ("constrained", "1.0.60"),
+        ]),
+    );
+}
+
+#[test]
+fn resolving_with_constrained_sibling_transitive_dep_effects() {
+    // When backtracking due to a failed dependency, if Cargo is
+    // trying to be clever and skip irrelevant dependencies, care must
+    // be taken to not miss the transitive effects of alternatives. E.g.
+    // in the right-to-left resolution of the graph below, B may
+    // affect whether D is successfully resolved.
+    //
+    //    A
+    //  / | \
+    // B  C  D
+    // |  |
+    // C  D
+    let reg = registry(vec![
+        pkg!(("A", "1.0.0") => [dep_req("B", "1.0"),
+                                dep_req("C", "1.0"),
+                                dep_req("D", "1.0.100")]),
+        pkg!(("B", "1.0.0") => [dep_req("C", ">=1.0.0")]),
+        pkg!(("B", "1.0.1") => [dep_req("C", ">=1.0.1")]),
+        pkg!(("C", "1.0.0") => [dep_req("D", "1.0.0")]),
+        pkg!(("C", "1.0.1") => [dep_req("D", ">=1.0.1,<1.0.100")]),
+        pkg!(("C", "1.0.2") => [dep_req("D", ">=1.0.2,<1.0.100")]),
+        pkg!(("D", "1.0.0")),
+        pkg!(("D", "1.0.1")),
+        pkg!(("D", "1.0.2")),
+        pkg!(("D", "1.0.100")),
+        pkg!(("D", "1.0.101")),
+        pkg!(("D", "1.0.102")),
+        pkg!(("D", "1.0.103")),
+        pkg!(("D", "1.0.104")),
+        pkg!(("D", "1.0.105")),
+    ]);
+
+    let res = resolve(&pkg_id("root"), vec![dep_req("A", "1")], &reg).unwrap();
+
+    assert_same(
+        &res,
+        &names(&[
+            ("root", "1.0.0"),
+            ("A", "1.0.0"),
+            ("B", "1.0.0"),
+            ("C", "1.0.0"),
+            ("D", "1.0.105"),
+        ]),
+    );
+}
+
+#[test]
+fn incomplete_information_skiping() {
+    // When backtracking due to a failed dependency, if Cargo is
+    // trying to be clever and skip irrelevant dependencies, care must
+    // be taken to not miss the transitive effects of alternatives.
+    // Fuzzing discovered that for some reason cargo was skiping based
+    // on incomplete information in the following case:
+    // minimized bug found in:
+    // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+    let input = vec![
+        pkg!(("a", "1.0.0")),
+        pkg!(("a", "1.1.0")),
+        pkg!("b" => [dep("a")]),
+        pkg!(("c", "1.0.0")),
+        pkg!(("c", "1.1.0")),
+        pkg!("d" => [dep_req("c", "=1.0")]),
+        pkg!(("e", "1.0.0")),
+        pkg!(("e", "1.1.0") => [dep_req("c", "1.1")]),
+        pkg!("to_yank"),
+        pkg!(("f", "1.0.0") => [
+            dep("to_yank"),
+            dep("d"),
+        ]),
+        pkg!(("f", "1.1.0") => [dep("d")]),
+        pkg!("g" => [
+            dep("b"),
+            dep("e"),
+            dep("f"),
+        ]),
+    ];
+    let reg = registry(input.clone());
+
+    let res = resolve(&pkg_id("root"), vec![dep("g")], &reg).unwrap();
+    let package_to_yank = "to_yank".to_pkgid();
+    // this package is not used in the resolution.
+    assert!(!res.contains(&package_to_yank));
+    // so when we yank it
+    let new_reg = registry(
+        input
+            .iter()
+            .cloned()
+            .filter(|x| &package_to_yank != x.package_id())
+            .collect(),
+    );
+    assert_eq!(input.len(), new_reg.len() + 1);
+    // it should still build
+    assert!(resolve(&pkg_id("root"), vec![dep("g")], &new_reg).is_ok());
+}
+
+#[test]
+fn incomplete_information_skiping_2() {
+    // When backtracking due to a failed dependency, if Cargo is
+    // trying to be clever and skip irrelevant dependencies, care must
+    // be taken to not miss the transitive effects of alternatives.
+    // Fuzzing discovered that for some reason cargo was skiping based
+    // on incomplete information in the following case:
+    // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+    let input = vec![
+        pkg!(("b", "3.8.10")),
+        pkg!(("b", "8.7.4")),
+        pkg!(("b", "9.4.6")),
+        pkg!(("c", "1.8.8")),
+        pkg!(("c", "10.2.5")),
+        pkg!(("d", "4.1.2") => [
+            dep_req("bad", "=6.10.9"),
+        ]),
+        pkg!(("d", "5.5.6")),
+        pkg!(("d", "5.6.10")),
+        pkg!(("to_yank", "8.0.1")),
+        pkg!(("to_yank", "8.8.1")),
+        pkg!(("e", "4.7.8") => [
+            dep_req("d", ">=5.5.6, <=5.6.10"),
+            dep_req("to_yank", "=8.0.1"),
+        ]),
+        pkg!(("e", "7.4.9") => [
+            dep_req("bad", "=4.7.5"),
+        ]),
+        pkg!("f" => [
+            dep_req("d", ">=4.1.2, <=5.5.6"),
+        ]),
+        pkg!("g" => [
+            dep("bad"),
+        ]),
+        pkg!(("h", "3.8.3") => [
+            dep_req("g", "*"),
+        ]),
+        pkg!(("h", "6.8.3") => [
+            dep("f"),
+        ]),
+        pkg!(("h", "8.1.9") => [
+            dep_req("to_yank", "=8.8.1"),
+        ]),
+        pkg!("i" => [
+            dep_req("b", "*"),
+            dep_req("c", "*"),
+            dep_req("e", "*"),
+            dep_req("h", "*"),
+        ]),
+    ];
+    let reg = registry(input.clone());
+
+    let res = resolve(&pkg_id("root"), vec![dep("i")], &reg).unwrap();
+    let package_to_yank = ("to_yank", "8.8.1").to_pkgid();
+    // this package is not used in the resolution.
+    assert!(!res.contains(&package_to_yank));
+    // so when we yank it
+    let new_reg = registry(
+        input
+            .iter()
+            .cloned()
+            .filter(|x| &package_to_yank != x.package_id())
+            .collect(),
+    );
+    assert_eq!(input.len(), new_reg.len() + 1);
+    // it should still build
+    assert!(resolve(&pkg_id("root"), vec![dep("i")], &new_reg).is_ok());
+}
+
+#[test]
+fn incomplete_information_skiping_3() {
+    // When backtracking due to a failed dependency, if Cargo is
+    // trying to be clever and skip irrelevant dependencies, care must
+    // be taken to not miss the transitive effects of alternatives.
+    // Fuzzing discovered that for some reason cargo was skiping based
+    // on incomplete information in the following case:
+    // minimized bug found in:
+    // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9
+    let input = vec![
+        pkg!{("to_yank", "3.0.3")},
+        pkg!{("to_yank", "3.3.0")},
+        pkg!{("to_yank", "3.3.1")},
+        pkg!{("a", "3.3.0") => [
+            dep_req("to_yank", "=3.0.3"),
+        ] },
+        pkg!{("a", "3.3.2") => [
+            dep_req("to_yank", "<=3.3.0"),
+        ] },
+        pkg!{("b", "0.1.3") => [
+            dep_req("a", "=3.3.0"),
+        ] },
+        pkg!{("b", "2.0.2") => [
+            dep_req("to_yank", "3.3.0"),
+            dep_req("a", "*"),
+        ] },
+        pkg!{("b", "2.3.3") => [
+            dep_req("to_yank", "3.3.0"),
+            dep_req("a", "=3.3.0"),
+        ] },
+    ];
+    let reg = registry(input.clone());
+
+    let res = resolve(&pkg_id("root"), vec![dep_req("b", "*")], &reg).unwrap();
+    let package_to_yank = ("to_yank", "3.0.3").to_pkgid();
+    // this package is not used in the resolution.
+    assert!(!res.contains(&package_to_yank));
+    // so when we yank it
+    let new_reg = registry(
+        input
+            .iter()
+            .cloned()
+            .filter(|x| &package_to_yank != x.package_id())
+            .collect(),
+    );
+    assert_eq!(input.len(), new_reg.len() + 1);
+    // it should still build
+    assert!(resolve(&pkg_id("root"), vec![dep_req("b", "*")], &new_reg).is_ok());
+}
+
+#[test]
+fn resolving_but_no_exists() {
+    let reg = registry(vec![]);
+
+    let res = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], &reg);
+    assert!(res.is_err());
+
+    assert_eq!(
+        res.err().unwrap().to_string(),
+        "\
+         no matching package named `foo` found\n\
+         location searched: registry `http://example.com/`\n\
+         required by package `root v1.0.0 (registry `http://example.com/`)`\
+         "
+    );
+}
+
+#[test]
+fn resolving_cycle() {
+    let reg = registry(vec![pkg!("foo" => ["foo"])]);
+
+    let _ = resolve(&pkg_id("root"), vec![dep_req("foo", "1")], &reg);
+}
+
+#[test]
+fn hard_equality() {
+    let reg = registry(vec![
+        pkg!(("foo", "1.0.1")),
+        pkg!(("foo", "1.0.0")),
+        pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]),
+    ]);
+
+    let res = resolve(
+        &pkg_id("root"),
+        vec![dep_req("bar", "1"), dep_req("foo", "=1.0.0")],
+        &reg,
+    )
+    .unwrap();
+
+    assert_same(
+        &res,
+        &names(&[("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "1.0.0")]),
+    );
+}
diff --git a/tests/testsuite/run.rs b/tests/testsuite/run.rs
new file mode 100644 (file)
index 0000000..d9d15e2
--- /dev/null
@@ -0,0 +1,1031 @@
+use cargo::util::paths::dylib_path_envvar;
+use support;
+use support::{basic_bin_manifest, basic_lib_manifest, project, Project};
+
+#[test]
+fn simple() {
+    let p = project()
+        .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .build();
+
+    p.cargo("run")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/foo[EXE]`",
+        ).with_stdout("hello")
+        .run();
+    assert!(p.bin("foo").is_file());
+}
+
+#[test]
+fn simple_quiet() {
+    let p = project()
+        .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .build();
+
+    p.cargo("run -q").with_stdout("hello").run();
+
+    p.cargo("run --quiet").with_stdout("hello").run();
+}
+
+#[test]
+fn simple_quiet_and_verbose() {
+    let p = project()
+        .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .build();
+
+    p.cargo("run -q -v")
+        .with_status(101)
+        .with_stderr("[ERROR] cannot set both --verbose and --quiet")
+        .run();
+}
+
+#[test]
+fn quiet_and_verbose_config() {
+    let p = project()
+        .file(
+            ".cargo/config",
+            r#"
+            [term]
+            verbose = true
+        "#,
+        ).file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .build();
+
+    p.cargo("run -q").run();
+}
+
+#[test]
+fn simple_with_args() {
+    let p = project()
+        .file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                assert_eq!(std::env::args().nth(1).unwrap(), "hello");
+                assert_eq!(std::env::args().nth(2).unwrap(), "world");
+            }
+        "#,
+        ).build();
+
+    p.cargo("run hello world").run();
+}
+
+#[test]
+fn exit_code() {
+    let p = project()
+        .file("src/main.rs", "fn main() { std::process::exit(2); }")
+        .build();
+
+    let mut output = String::from(
+        "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target[..]`
+",
+    );
+    if !cfg!(unix) {
+        output.push_str(
+            "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit code: 2)",
+        );
+    }
+    p.cargo("run").with_status(2).with_stderr(output).run();
+}
+
+#[test]
+fn exit_code_verbose() {
+    let p = project()
+        .file("src/main.rs", "fn main() { std::process::exit(2); }")
+        .build();
+
+    let mut output = String::from(
+        "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target[..]`
+",
+    );
+    if !cfg!(unix) {
+        output.push_str(
+            "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit code: 2)",
+        );
+    }
+
+    p.cargo("run -v").with_status(2).with_stderr(output).run();
+}
+
+#[test]
+fn no_main_file() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("run")
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] a bin target must be available \
+             for `cargo run`\n",
+        ).run();
+}
+
+#[test]
+fn too_many_bins() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/bin/a.rs", "")
+        .file("src/bin/b.rs", "")
+        .build();
+
+    // Using [..] here because the order is not stable
+    p.cargo("run")
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] `cargo run` requires that a package only \
+             have one executable; use the `--bin` option \
+             to specify which one to run\navailable binaries: [..]\n",
+        ).run();
+
+    // Using [..] here because the order is not stable
+    p.cargo("run")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] `cargo run` could not determine which binary to run. \
+             Use the `--bin` option to specify a binary, or (on \
+             nightly) the `default-run` manifest key.\
+             \navailable binaries: [..]\n",
+        ).run();
+}
+
+#[test]
+fn specify_name() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            "src/bin/a.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+            fn main() { println!("hello a.rs"); }
+        "#,
+        ).file(
+            "src/bin/b.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+            fn main() { println!("hello b.rs"); }
+        "#,
+        ).build();
+
+    p.cargo("run --bin a -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..] src/lib.rs [..]`
+[RUNNING] `rustc [..] src/bin/a.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/a[EXE]`",
+        ).with_stdout("hello a.rs")
+        .run();
+
+    p.cargo("run --bin b -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] src/bin/b.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/b[EXE]`",
+        ).with_stdout("hello b.rs")
+        .run();
+}
+
+#[test]
+fn specify_default_run() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["default-run"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            default-run = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#)
+        .file("src/bin/b.rs", r#"fn main() { println!("hello B"); }"#)
+        .build();
+
+    p.cargo("run")
+        .masquerade_as_nightly_cargo()
+        .with_stdout("hello A")
+        .run();
+    p.cargo("run --bin a")
+        .masquerade_as_nightly_cargo()
+        .with_stdout("hello A")
+        .run();
+    p.cargo("run --bin b")
+        .masquerade_as_nightly_cargo()
+        .with_stdout("hello B")
+        .run();
+}
+
+#[test]
+fn bogus_default_run() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            cargo-features = ["default-run"]
+
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            default-run = "b"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#)
+        .build();
+
+    p.cargo("run")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr("error: no bin target named `b`\n\nDid you mean [..]?")
+        .run();
+}
+
+#[test]
+fn default_run_unstable() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            default-run = "a"
+        "#,
+        ).file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#)
+        .build();
+
+    p.cargo("run")
+        .with_status(101)
+        .with_stderr(
+            r#"error: failed to parse manifest at [..]
+
+Caused by:
+  the `default-run` manifest key is unstable
+
+Caused by:
+  feature `default-run` is required
+
+this Cargo does not support nightly features, but if you
+switch to nightly channel you can add
+`cargo-features = ["default-run"]` to enable this feature
+"#,
+        ).run();
+
+    p.cargo("run")
+        .masquerade_as_nightly_cargo()
+        .with_status(101)
+        .with_stderr(
+            r#"error: failed to parse manifest at [..]
+
+Caused by:
+  the `default-run` manifest key is unstable
+
+Caused by:
+  feature `default-run` is required
+
+consider adding `cargo-features = ["default-run"]` to the manifest
+"#,
+        ).run();
+}
+
+#[test]
+fn run_example() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("examples/a.rs", r#"fn main() { println!("example"); }"#)
+        .file("src/bin/a.rs", r#"fn main() { println!("bin"); }"#)
+        .build();
+
+    p.cargo("run --example a")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/examples/a[EXE]`",
+        ).with_stdout("example")
+        .run();
+}
+
+#[test]
+fn run_library_example() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            [[example]]
+            name = "bar"
+            crate_type = ["lib"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("examples/bar.rs", "fn foo() {}")
+        .build();
+
+    p.cargo("run --example bar")
+        .with_status(101)
+        .with_stderr("[ERROR] example target `bar` is a library and cannot be executed")
+        .run();
+}
+
+fn autodiscover_examples_project(rust_edition: &str, autoexamples: Option<bool>) -> Project {
+    let autoexamples = match autoexamples {
+        None => "".to_string(),
+        Some(bool) => format!("autoexamples = {}", bool),
+    };
+    project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+                    [project]
+                    name = "foo"
+                    version = "0.0.1"
+                    authors = []
+                    edition = "{rust_edition}"
+                    {autoexamples}
+
+                    [features]
+                    magic = []
+
+                    [[example]]
+                    name = "do_magic"
+                    required-features = ["magic"]
+                "#,
+                rust_edition = rust_edition,
+                autoexamples = autoexamples
+            ),
+        ).file("examples/a.rs", r#"fn main() { println!("example"); }"#)
+        .file(
+            "examples/do_magic.rs",
+            r#"
+                fn main() { println!("magic example"); }
+            "#,
+        ).build()
+}
+
+#[test]
+fn run_example_autodiscover_2015() {
+    if !support::is_nightly() {
+        return;
+    }
+
+    let p = autodiscover_examples_project("2015", None);
+    p.cargo("run --example a")
+        .with_status(101)
+        .with_stderr(
+            "warning: \
+An explicit [[example]] section is specified in Cargo.toml which currently
+disables Cargo from automatically inferring other example targets.
+This inference behavior will change in the Rust 2018 edition and the following
+files will be included as a example target:
+
+* [..]a.rs
+
+This is likely to break cargo build or cargo test as these files may not be
+ready to be compiled as a example target today. You can future-proof yourself
+and disable this warning by adding `autoexamples = false` to your [package]
+section. You may also move the files to a location where Cargo would not
+automatically infer them to be a target, such as in subfolders.
+
+For more information on this warning you can consult
+https://github.com/rust-lang/cargo/issues/5330
+error: no example target named `a`
+",
+        ).run();
+}
+
+#[test]
+fn run_example_autodiscover_2015_with_autoexamples_enabled() {
+    if !support::is_nightly() {
+        return;
+    }
+
+    let p = autodiscover_examples_project("2015", Some(true));
+    p.cargo("run --example a")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/examples/a[EXE]`",
+        ).with_stdout("example")
+        .run();
+}
+
+#[test]
+fn run_example_autodiscover_2015_with_autoexamples_disabled() {
+    if !support::is_nightly() {
+        return;
+    }
+
+    let p = autodiscover_examples_project("2015", Some(false));
+    p.cargo("run --example a")
+        .with_status(101)
+        .with_stderr("error: no example target named `a`\n")
+        .run();
+}
+
+#[test]
+fn run_example_autodiscover_2018() {
+    if !support::is_nightly() {
+        return;
+    }
+
+    let p = autodiscover_examples_project("2018", None);
+    p.cargo("run --example a")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/examples/a[EXE]`",
+        ).with_stdout("example")
+        .run();
+}
+
+#[test]
+fn run_bins() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("examples/a.rs", r#"fn main() { println!("example"); }"#)
+        .file("src/bin/a.rs", r#"fn main() { println!("bin"); }"#)
+        .build();
+
+    p.cargo("run --bins")
+        .with_status(1)
+        .with_stderr_contains(
+            "error: Found argument '--bins' which wasn't expected, or isn't valid in this context",
+        ).run();
+}
+
+#[test]
+fn run_with_filename() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            "src/bin/a.rs",
+            r#"
+            extern crate foo;
+            fn main() { println!("hello a.rs"); }
+        "#,
+        ).file("examples/a.rs", r#"fn main() { println!("example"); }"#)
+        .build();
+
+    p.cargo("run --bin bin.rs")
+        .with_status(101)
+        .with_stderr("[ERROR] no bin target named `bin.rs`")
+        .run();
+
+    p.cargo("run --bin a.rs")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] no bin target named `a.rs`
+
+Did you mean `a`?",
+        ).run();
+
+    p.cargo("run --example example.rs")
+        .with_status(101)
+        .with_stderr("[ERROR] no example target named `example.rs`")
+        .run();
+
+    p.cargo("run --example a.rs")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] no example target named `a.rs`
+
+Did you mean `a`?",
+        ).run();
+}
+
+#[test]
+fn either_name_or_example() {
+    let p = project()
+        .file("src/bin/a.rs", r#"fn main() { println!("hello a.rs"); }"#)
+        .file("examples/b.rs", r#"fn main() { println!("hello b.rs"); }"#)
+        .build();
+
+    p.cargo("run --bin a --example b")
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] `cargo run` can run at most one \
+             executable, but multiple were \
+             specified",
+        ).run();
+}
+
+#[test]
+fn one_bin_multiple_examples() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            "src/bin/main.rs",
+            r#"fn main() { println!("hello main.rs"); }"#,
+        ).file("examples/a.rs", r#"fn main() { println!("hello a.rs"); }"#)
+        .file("examples/b.rs", r#"fn main() { println!("hello b.rs"); }"#)
+        .build();
+
+    p.cargo("run")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/main[EXE]`",
+        ).with_stdout("hello main.rs")
+        .run();
+}
+
+#[test]
+fn example_with_release_flag() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            version = "*"
+            path = "bar"
+        "#,
+        ).file(
+            "examples/a.rs",
+            r#"
+            extern crate bar;
+
+            fn main() {
+                if cfg!(debug_assertions) {
+                    println!("slow1")
+                } else {
+                    println!("fast1")
+                }
+                bar::baz();
+            }
+        "#,
+        ).file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file(
+            "bar/src/bar.rs",
+            r#"
+            pub fn baz() {
+                if cfg!(debug_assertions) {
+                    println!("slow2")
+                } else {
+                    println!("fast2")
+                }
+            }
+        "#,
+        ).build();
+
+    p.cargo("run -v --release --example a")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.5.0 ([CWD]/bar)
+[RUNNING] `rustc --crate-name bar bar/src/bar.rs --color never --crate-type lib \
+        --emit=dep-info,link \
+        -C opt-level=3 \
+        -C metadata=[..] \
+        --out-dir [CWD]/target/release/deps \
+        -L dependency=[CWD]/target/release/deps`
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name a examples/a.rs --color never --crate-type bin \
+        --emit=dep-info,link \
+        -C opt-level=3 \
+        -C metadata=[..] \
+        --out-dir [CWD]/target/release/examples \
+        -L dependency=[CWD]/target/release/deps \
+         --extern bar=[CWD]/target/release/deps/libbar-[..].rlib`
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `target/release/examples/a[EXE]`
+",
+        ).with_stdout(
+            "\
+fast1
+fast2",
+        ).run();
+
+    p.cargo("run -v --example a")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.5.0 ([CWD]/bar)
+[RUNNING] `rustc --crate-name bar bar/src/bar.rs --color never --crate-type lib \
+        --emit=dep-info,link \
+        -C debuginfo=2 \
+        -C metadata=[..] \
+        --out-dir [CWD]/target/debug/deps \
+        -L dependency=[CWD]/target/debug/deps`
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name a examples/a.rs --color never --crate-type bin \
+        --emit=dep-info,link \
+        -C debuginfo=2 \
+        -C metadata=[..] \
+        --out-dir [CWD]/target/debug/examples \
+        -L dependency=[CWD]/target/debug/deps \
+         --extern bar=[CWD]/target/debug/deps/libbar-[..].rlib`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `target/debug/examples/a[EXE]`
+",
+        ).with_stdout(
+            "\
+slow1
+slow2",
+        ).run();
+}
+
+#[test]
+fn run_dylib_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"extern crate bar; fn main() { bar::bar(); }"#,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "bar"
+            crate-type = ["dylib"]
+        "#,
+        ).file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("run hello world").run();
+}
+
+#[test]
+fn release_works() {
+    let p = project()
+        .file(
+            "src/main.rs",
+            r#"
+            fn main() { if cfg!(debug_assertions) { panic!() } }
+        "#,
+        ).build();
+
+    p.cargo("run --release")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `target/release/foo[EXE]`
+",
+        ).run();
+    assert!(p.release_bin("foo").is_file());
+}
+
+#[test]
+fn run_bin_different_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[bin]]
+            name = "bar"
+        "#,
+        ).file("src/bar.rs", "fn main() {}")
+        .build();
+
+    p.cargo("run").run();
+}
+
+#[test]
+fn dashes_are_forwarded() {
+    let p = project()
+        .file(
+            "src/bin/bar.rs",
+            r#"
+            fn main() {
+                let s: Vec<String> = std::env::args().collect();
+                assert_eq!(s[1], "--");
+                assert_eq!(s[2], "a");
+                assert_eq!(s[3], "--");
+                assert_eq!(s[4], "b");
+            }
+        "#,
+        ).build();
+
+    p.cargo("run -- -- a -- b").run();
+}
+
+#[test]
+fn run_from_executable_folder() {
+    let p = project()
+        .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .build();
+
+    let cwd = p.root().join("target").join("debug");
+    p.cargo("build").run();
+
+    p.cargo("run")
+        .cwd(cwd)
+        .with_stderr(
+            "\
+             [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\
+             [RUNNING] `./foo[EXE]`",
+        ).with_stdout("hello")
+        .run();
+}
+
+#[test]
+fn run_with_library_paths() {
+    let p = project();
+
+    // Only link search directories within the target output directory are
+    // propagated through to dylib_path_envvar() (see #3366).
+    let mut dir1 = p.target_debug_dir();
+    dir1.push("foo\\backslash");
+
+    let mut dir2 = p.target_debug_dir();
+    dir2.push("dir=containing=equal=signs");
+
+    let p = p
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "build.rs",
+            &format!(
+                r##"
+            fn main() {{
+                println!(r#"cargo:rustc-link-search=native={}"#);
+                println!(r#"cargo:rustc-link-search={}"#);
+            }}
+        "##,
+                dir1.display(),
+                dir2.display()
+            ),
+        ).file(
+            "src/main.rs",
+            &format!(
+                r##"
+            fn main() {{
+                let search_path = std::env::var_os("{}").unwrap();
+                let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
+                assert!(paths.contains(&r#"{}"#.into()));
+                assert!(paths.contains(&r#"{}"#.into()));
+            }}
+        "##,
+                dylib_path_envvar(),
+                dir1.display(),
+                dir2.display()
+            ),
+        ).build();
+
+    p.cargo("run").run();
+}
+
+#[test]
+fn library_paths_sorted_alphabetically() {
+    let p = project();
+
+    let mut dir1 = p.target_debug_dir();
+    dir1.push("zzzzzzz");
+
+    let mut dir2 = p.target_debug_dir();
+    dir2.push("BBBBBBB");
+
+    let mut dir3 = p.target_debug_dir();
+    dir3.push("aaaaaaa");
+
+    let p = p
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file(
+            "build.rs",
+            &format!(
+                r##"
+            fn main() {{
+                println!(r#"cargo:rustc-link-search=native={}"#);
+                println!(r#"cargo:rustc-link-search=native={}"#);
+                println!(r#"cargo:rustc-link-search=native={}"#);
+            }}
+        "##,
+                dir1.display(),
+                dir2.display(),
+                dir3.display()
+            ),
+        ).file(
+            "src/main.rs",
+            &format!(
+                r##"
+            fn main() {{
+                let search_path = std::env::var_os("{}").unwrap();
+                let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
+                // ASCII case-sensitive sort
+                assert_eq!("BBBBBBB", paths[0].file_name().unwrap().to_string_lossy());
+                assert_eq!("aaaaaaa", paths[1].file_name().unwrap().to_string_lossy());
+                assert_eq!("zzzzzzz", paths[2].file_name().unwrap().to_string_lossy());
+            }}
+        "##,
+                dylib_path_envvar()
+            ),
+        ).build();
+
+    p.cargo("run").run();
+}
+
+#[test]
+fn fail_no_extra_verbose() {
+    let p = project()
+        .file("src/main.rs", "fn main() { std::process::exit(1); }")
+        .build();
+
+    p.cargo("run -q")
+        .with_status(1)
+        .with_stdout("")
+        .with_stderr("")
+        .run();
+}
+
+#[test]
+fn run_multiple_packages() {
+    let p = project()
+        .no_manifest()
+        .file(
+            "foo/Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [workspace]
+
+            [dependencies]
+            d1 = { path = "d1" }
+            d2 = { path = "d2" }
+            d3 = { path = "../d3" } # outside of the workspace
+
+            [[bin]]
+            name = "foo"
+        "#,
+        ).file("foo/src/foo.rs", "fn main() { println!(\"foo\"); }")
+        .file("foo/d1/Cargo.toml", &basic_bin_manifest("d1"))
+        .file("foo/d1/src/lib.rs", "")
+        .file("foo/d1/src/main.rs", "fn main() { println!(\"d1\"); }")
+        .file("foo/d2/Cargo.toml", &basic_bin_manifest("d2"))
+        .file("foo/d2/src/main.rs", "fn main() { println!(\"d2\"); }")
+        .file("d3/Cargo.toml", &basic_bin_manifest("d3"))
+        .file("d3/src/main.rs", "fn main() { println!(\"d2\"); }")
+        .build();
+
+    let cargo = || {
+        let mut process_builder = p.cargo("run");
+        process_builder.cwd(p.root().join("foo"));
+        process_builder
+    };
+
+    cargo().arg("-p").arg("d1").with_stdout("d1").run();
+
+    cargo()
+        .arg("-p")
+        .arg("d2")
+        .arg("--bin")
+        .arg("d2")
+        .with_stdout("d2")
+        .run();
+
+    cargo().with_stdout("foo").run();
+
+    cargo().arg("-p").arg("d1").arg("-p").arg("d2")
+                    .with_status(1)
+                    .with_stderr_contains("error: The argument '--package <SPEC>' was provided more than once, but cannot be used multiple times").run();
+
+    cargo()
+        .arg("-p")
+        .arg("d3")
+        .with_status(101)
+        .with_stderr_contains("[ERROR] package `d3` is not a member of the workspace")
+        .run();
+}
+
+#[test]
+fn explicit_bin_with_args() {
+    let p = project()
+        .file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                assert_eq!(std::env::args().nth(1).unwrap(), "hello");
+                assert_eq!(std::env::args().nth(2).unwrap(), "world");
+            }
+        "#,
+        ).build();
+
+    p.cargo("run --bin foo hello world").run();
+}
+
+#[test]
+fn run_workspace() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["a", "b"]
+        "#,
+        ).file("a/Cargo.toml", &basic_bin_manifest("a"))
+        .file("a/src/main.rs", r#"fn main() {println!("run-a");}"#)
+        .file("b/Cargo.toml", &basic_bin_manifest("b"))
+        .file("b/src/main.rs", r#"fn main() {println!("run-b");}"#)
+        .build();
+
+    p.cargo("run")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] `cargo run` requires that a package only have one executable[..]
+available binaries: a, b",
+        ).run();
+    p.cargo("run --bin a")
+        .with_status(0)
+        .with_stdout("run-a")
+        .run();
+}
+
+#[test]
+fn default_run_workspace() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["a", "b"]
+        "#,
+        ).file(
+            "a/Cargo.toml",
+            r#"
+            cargo-features = ["default-run"]
+
+            [project]
+            name = "a"
+            version = "0.0.1"
+            default-run = "a"
+        "#,
+        ).file("a/src/main.rs", r#"fn main() {println!("run-a");}"#)
+        .file("b/Cargo.toml", &basic_bin_manifest("b"))
+        .file("b/src/main.rs", r#"fn main() {println!("run-b");}"#)
+        .build();
+
+    p.cargo("run")
+        .masquerade_as_nightly_cargo()
+        .with_status(0)
+        .with_stdout("run-a")
+        .run();
+}
diff --git a/tests/testsuite/rustc.rs b/tests/testsuite/rustc.rs
new file mode 100644 (file)
index 0000000..76ff321
--- /dev/null
@@ -0,0 +1,411 @@
+use support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project};
+
+const CARGO_RUSTC_ERROR: &str =
+    "[ERROR] extra arguments to `rustc` can only be passed to one target, consider filtering
+the package by passing e.g. `--lib` or `--bin NAME` to specify a single target";
+
+#[test]
+fn build_lib_for_foo() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/lib.rs", r#" "#)
+        .build();
+
+    p.cargo("rustc --lib -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link -C debuginfo=2 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn lib() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/lib.rs", r#" "#)
+        .build();
+
+    p.cargo("rustc --lib -v -- -C debug-assertions=off")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link -C debuginfo=2 \
+        -C debug-assertions=off \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn build_main_and_allow_unstable_options() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/lib.rs", r#" "#)
+        .build();
+
+    p.cargo("rustc -v --bin foo -- -C debug-assertions")
+        .with_stderr(format!(
+            "\
+[COMPILING] {name} v{version} ([CWD])
+[RUNNING] `rustc --crate-name {name} src/lib.rs --color never --crate-type lib \
+        --emit=dep-info,link -C debuginfo=2 \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps`
+[RUNNING] `rustc --crate-name {name} src/main.rs --color never --crate-type bin \
+        --emit=dep-info,link -C debuginfo=2 \
+        -C debug-assertions \
+        -C metadata=[..] \
+        --out-dir [..] \
+        -L dependency=[CWD]/target/debug/deps \
+        --extern {name}=[CWD]/target/debug/deps/lib{name}-[..].rlib`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            name = "foo",
+            version = "0.0.1"
+        )).run();
+}
+
+#[test]
+fn fails_when_trying_to_build_main_and_lib_with_args() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/lib.rs", r#" "#)
+        .build();
+
+    p.cargo("rustc -v -- -C debug-assertions")
+        .with_status(101)
+        .with_stderr(CARGO_RUSTC_ERROR)
+        .run();
+}
+
+#[test]
+fn build_with_args_to_one_of_multiple_binaries() {
+    let p = project()
+        .file("src/bin/foo.rs", "fn main() {}")
+        .file("src/bin/bar.rs", "fn main() {}")
+        .file("src/bin/baz.rs", "fn main() {}")
+        .file("src/lib.rs", r#" "#)
+        .build();
+
+    p.cargo("rustc -v --bin bar -- -C debug-assertions")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib --emit=dep-info,link \
+        -C debuginfo=2 -C metadata=[..] \
+        --out-dir [..]`
+[RUNNING] `rustc --crate-name bar src/bin/bar.rs --color never --crate-type bin --emit=dep-info,link \
+        -C debuginfo=2 -C debug-assertions [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn fails_with_args_to_all_binaries() {
+    let p = project()
+        .file("src/bin/foo.rs", "fn main() {}")
+        .file("src/bin/bar.rs", "fn main() {}")
+        .file("src/bin/baz.rs", "fn main() {}")
+        .file("src/lib.rs", r#" "#)
+        .build();
+
+    p.cargo("rustc -v -- -C debug-assertions")
+        .with_status(101)
+        .with_stderr(CARGO_RUSTC_ERROR)
+        .run();
+}
+
+#[test]
+fn build_with_args_to_one_of_multiple_tests() {
+    let p = project()
+        .file("tests/foo.rs", r#" "#)
+        .file("tests/bar.rs", r#" "#)
+        .file("tests/baz.rs", r#" "#)
+        .file("src/lib.rs", r#" "#)
+        .build();
+
+    p.cargo("rustc -v --test bar -- -C debug-assertions")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib --emit=dep-info,link \
+        -C debuginfo=2 -C metadata=[..] \
+        --out-dir [..]`
+[RUNNING] `rustc --crate-name bar tests/bar.rs --color never --emit=dep-info,link -C debuginfo=2 \
+        -C debug-assertions [..]--test[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn build_foo_with_bar_dependency() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/main.rs", "extern crate bar; fn main() { bar::baz() }")
+        .build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    foo.cargo("rustc -v -- -C debug-assertions")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.1.0 ([..])
+[RUNNING] `[..] -C debuginfo=2 [..]`
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `[..] -C debuginfo=2 -C debug-assertions [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn build_only_bar_dependency() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/main.rs", "extern crate bar; fn main() { bar::baz() }")
+        .build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    foo.cargo("rustc -v -p bar -- -C debug-assertions")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.1.0 ([..])
+[RUNNING] `rustc --crate-name bar [..] --color never --crate-type lib [..] -C debug-assertions [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn targets_selected_default() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+    p.cargo("rustc -v")
+        // bin
+        .with_stderr_contains("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
+            --emit=dep-info,link[..]")
+        // bench
+        .with_stderr_does_not_contain("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
+            -C opt-level=3 --test [..]")
+        // unit test
+        .with_stderr_does_not_contain("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
+            -C debuginfo=2 --test [..]").run();
+}
+
+#[test]
+fn targets_selected_all() {
+    let p = project().file("src/main.rs", "fn main() {}").build();
+    p.cargo("rustc -v --all-targets")
+        // bin
+        .with_stderr_contains("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
+            --emit=dep-info,link[..]")
+        // bench
+        .with_stderr_contains("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
+            -C opt-level=3 --test [..]")
+        // unit test
+        .with_stderr_contains("\
+            [RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
+            -C debuginfo=2 --test [..]").run();
+}
+
+#[test]
+fn fail_with_multiple_packages() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+                path = "../bar"
+
+            [dependencies.baz]
+                path = "../baz"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .build();
+
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                if cfg!(flag = "1") { println!("Yeah from bar!"); }
+            }
+        "#,
+        ).build();
+
+    let _baz = project()
+        .at("baz")
+        .file("Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                if cfg!(flag = "1") { println!("Yeah from baz!"); }
+            }
+        "#,
+        ).build();
+
+    foo.cargo("rustc -v -p bar -p baz")
+        .with_status(1)
+        .with_stderr_contains(
+            "\
+error: The argument '--package <SPEC>' was provided more than once, \
+       but cannot be used multiple times
+",
+        ).run();
+}
+
+#[test]
+fn rustc_with_other_profile() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dev-dependencies]
+            a = { path = "a" }
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            #[cfg(test)] extern crate a;
+
+            #[test]
+            fn foo() {}
+        "#,
+        ).file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("rustc --profile test").run();
+}
+
+#[test]
+fn rustc_fingerprint() {
+    // Verify that the fingerprint includes the rustc args.
+    let p = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file("src/lib.rs", "")
+        .build();
+
+    p.cargo("rustc -v -- -C debug-assertions")
+        .with_stderr(
+            "\
+[COMPILING] foo [..]
+[RUNNING] `rustc [..]-C debug-assertions [..]
+[FINISHED] [..]
+",
+        ).run();
+
+    p.cargo("rustc -v -- -C debug-assertions")
+        .with_stderr(
+            "\
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+        ).run();
+
+    p.cargo("rustc -v")
+        .with_stderr_does_not_contain("-C debug-assertions")
+        .with_stderr(
+            "\
+[COMPILING] foo [..]
+[RUNNING] `rustc [..]
+[FINISHED] [..]
+",
+        ).run();
+
+    p.cargo("rustc -v")
+        .with_stderr(
+            "\
+[FRESH] foo [..]
+[FINISHED] [..]
+",
+        ).run();
+}
+
+#[test]
+fn rustc_test_with_implicit_bin() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/main.rs",
+            r#"
+            #[cfg(foo)]
+            fn f() { compile_fail!("Foo shouldn't be set."); }
+            fn main() {}
+        "#,
+        ).file(
+            "tests/test1.rs",
+            r#"
+            #[cfg(not(foo))]
+            fn f() { compile_fail!("Foo should be set."); } "#,
+        ).build();
+
+    p.cargo("rustc --test test1 -v -- --cfg foo")
+        .with_stderr_contains(
+            "\
+[RUNNING] `rustc --crate-name test1 tests/test1.rs [..] --cfg foo [..]
+",
+        ).with_stderr_contains(
+            "\
+[RUNNING] `rustc --crate-name foo src/main.rs [..]
+",
+        ).run();
+}
diff --git a/tests/testsuite/rustc_info_cache.rs b/tests/testsuite/rustc_info_cache.rs
new file mode 100644 (file)
index 0000000..defd26d
--- /dev/null
@@ -0,0 +1,101 @@
+use std::env;
+use support::paths::CargoPathExt;
+use support::{basic_manifest, project};
+
+#[test]
+fn rustc_info_cache() {
+    let p = project()
+        .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+        .build();
+
+    let miss = "[..] rustc info cache miss[..]";
+    let hit = "[..]rustc info cache hit[..]";
+    let update = "[..]updated rustc info cache[..]";
+
+    p.cargo("build")
+        .env("RUST_LOG", "cargo::util::rustc=info")
+        .with_stderr_contains("[..]failed to read rustc info cache[..]")
+        .with_stderr_contains(miss)
+        .with_stderr_does_not_contain(hit)
+        .with_stderr_contains(update)
+        .run();
+
+    p.cargo("build")
+        .env("RUST_LOG", "cargo::util::rustc=info")
+        .with_stderr_contains("[..]reusing existing rustc info cache[..]")
+        .with_stderr_contains(hit)
+        .with_stderr_does_not_contain(miss)
+        .with_stderr_does_not_contain(update)
+        .run();
+
+    p.cargo("build")
+        .env("RUST_LOG", "cargo::util::rustc=info")
+        .env("CARGO_CACHE_RUSTC_INFO", "0")
+        .with_stderr_contains("[..]rustc info cache disabled[..]")
+        .with_stderr_does_not_contain(update)
+        .run();
+
+    let other_rustc = {
+        let p = project()
+            .at("compiler")
+            .file("Cargo.toml", &basic_manifest("compiler", "0.1.0"))
+            .file(
+                "src/main.rs",
+                r#"
+            use std::process::Command;
+            use std::env;
+
+            fn main() {
+                let mut cmd = Command::new("rustc");
+                for arg in env::args_os().skip(1) {
+                    cmd.arg(arg);
+                }
+                std::process::exit(cmd.status().unwrap().code().unwrap());
+            }
+        "#,
+            ).build();
+        p.cargo("build").run();
+
+        p.root()
+            .join("target/debug/compiler")
+            .with_extension(env::consts::EXE_EXTENSION)
+    };
+
+    p.cargo("build")
+        .env("RUST_LOG", "cargo::util::rustc=info")
+        .env("RUSTC", other_rustc.display().to_string())
+        .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]")
+        .with_stderr_contains(miss)
+        .with_stderr_does_not_contain(hit)
+        .with_stderr_contains(update)
+        .run();
+
+    p.cargo("build")
+        .env("RUST_LOG", "cargo::util::rustc=info")
+        .env("RUSTC", other_rustc.display().to_string())
+        .with_stderr_contains("[..]reusing existing rustc info cache[..]")
+        .with_stderr_contains(hit)
+        .with_stderr_does_not_contain(miss)
+        .with_stderr_does_not_contain(update)
+        .run();
+
+    other_rustc.move_into_the_future();
+
+    p.cargo("build")
+        .env("RUST_LOG", "cargo::util::rustc=info")
+        .env("RUSTC", other_rustc.display().to_string())
+        .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]")
+        .with_stderr_contains(miss)
+        .with_stderr_does_not_contain(hit)
+        .with_stderr_contains(update)
+        .run();
+
+    p.cargo("build")
+        .env("RUST_LOG", "cargo::util::rustc=info")
+        .env("RUSTC", other_rustc.display().to_string())
+        .with_stderr_contains("[..]reusing existing rustc info cache[..]")
+        .with_stderr_contains(hit)
+        .with_stderr_does_not_contain(miss)
+        .with_stderr_does_not_contain(update)
+        .run();
+}
diff --git a/tests/testsuite/rustdoc.rs b/tests/testsuite/rustdoc.rs
new file mode 100644 (file)
index 0000000..3511034
--- /dev/null
@@ -0,0 +1,171 @@
+use support::{basic_manifest, project};
+
+#[test]
+fn rustdoc_simple() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("rustdoc -v")
+        .with_stderr(
+            "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\
+        -o [CWD]/target/doc \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn rustdoc_args() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("rustdoc -v -- --cfg=foo")
+        .with_stderr(
+            "\
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\
+        -o [CWD]/target/doc \
+        --cfg=foo \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn rustdoc_foo_with_bar_dependency() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/lib.rs", "extern crate bar; pub fn foo() {}")
+        .build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    foo.cargo("rustdoc -v -- --cfg=foo")
+        .with_stderr(
+            "\
+[CHECKING] bar v0.0.1 ([..])
+[RUNNING] `rustc [..]bar/src/lib.rs [..]`
+[DOCUMENTING] foo v0.0.1 ([CWD])
+[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\
+        -o [CWD]/target/doc \
+        --cfg=foo \
+        -L dependency=[CWD]/target/debug/deps \
+        --extern [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn rustdoc_only_bar_dependency() {
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/main.rs", "extern crate bar; fn main() { bar::baz() }")
+        .build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    foo.cargo("rustdoc -v -p bar -- --cfg=foo")
+        .with_stderr(
+            "\
+[DOCUMENTING] bar v0.0.1 ([..])
+[RUNNING] `rustdoc --crate-name bar [..]bar/src/lib.rs [..]\
+        -o [CWD]/target/doc \
+        --cfg=foo \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn rustdoc_same_name_documents_lib() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("src/lib.rs", r#" "#)
+        .build();
+
+    p.cargo("rustdoc -v -- --cfg=foo")
+        .with_stderr(
+            "\
+[DOCUMENTING] foo v0.0.1 ([..])
+[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\
+        -o [CWD]/target/doc \
+        --cfg=foo \
+        -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [features]
+            quux = []
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("rustdoc --verbose --features quux")
+        .with_stderr_contains("[..]feature=[..]quux[..]")
+        .run();
+}
+
+#[test]
+#[cfg(all(
+    target_arch = "x86_64",
+    target_os = "linux",
+    target_env = "gnu"
+))]
+fn rustdoc_target() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("rustdoc --verbose --target x86_64-unknown-linux-gnu")
+        .with_stderr(
+            "\
+[DOCUMENTING] foo v0.0.1 ([..])
+[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\
+    --target x86_64-unknown-linux-gnu \
+    -o [CWD]/target/x86_64-unknown-linux-gnu/doc \
+    -L dependency=[CWD]/target/x86_64-unknown-linux-gnu/debug/deps \
+    -L dependency=[CWD]/target/debug/deps`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+        ).run();
+}
diff --git a/tests/testsuite/rustdocflags.rs b/tests/testsuite/rustdocflags.rs
new file mode 100644 (file)
index 0000000..d99e805
--- /dev/null
@@ -0,0 +1,83 @@
+use support::project;
+
+#[test]
+fn parses_env() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("doc -v")
+        .env("RUSTDOCFLAGS", "--cfg=foo")
+        .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg=foo[..]`")
+        .run();
+}
+
+#[test]
+fn parses_config() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustdocflags = ["--cfg", "foo"]
+        "#,
+        ).build();
+
+    p.cargo("doc -v")
+        .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg foo[..]`")
+        .run();
+}
+
+#[test]
+fn bad_flags() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("doc")
+        .env("RUSTDOCFLAGS", "--bogus")
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn rerun() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo").run();
+    p.cargo("doc")
+        .env("RUSTDOCFLAGS", "--cfg=foo")
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+    p.cargo("doc")
+        .env("RUSTDOCFLAGS", "--cfg=bar")
+        .with_stderr(
+            "\
+[DOCUMENTING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn rustdocflags_passed_to_rustdoc_through_cargo_test() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            //! ```
+            //! assert!(cfg!(do_not_choke));
+            //! ```
+        "#,
+        ).build();
+
+    p.cargo("test --doc")
+        .env("RUSTDOCFLAGS", "--cfg do_not_choke")
+        .run();
+}
+
+#[test]
+fn rustdocflags_passed_to_rustdoc_through_cargo_test_only_once() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("test --doc")
+        .env("RUSTDOCFLAGS", "--markdown-no-toc")
+        .run();
+}
diff --git a/tests/testsuite/rustflags.rs b/tests/testsuite/rustflags.rs
new file mode 100644 (file)
index 0000000..1cf02da
--- /dev/null
@@ -0,0 +1,1177 @@
+use std::fs::{self, File};
+use std::io::Write;
+
+use support::rustc_host;
+use support::{basic_lib_manifest, basic_manifest, paths, project, project_in_home};
+
+#[test]
+fn env_rustflags_normal_source() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/bin/a.rs", "fn main() {}")
+        .file("examples/b.rs", "fn main() {}")
+        .file("tests/c.rs", "#[test] fn f() { }")
+        .file(
+            "benches/d.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+        ).build();
+
+    // Use RUSTFLAGS to pass an argument that will generate an error
+    p.cargo("build --lib")
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+    p.cargo("build --bin=a")
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+    p.cargo("build --example=b")
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+    p.cargo("test")
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+    p.cargo("bench")
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn env_rustflags_build_script() {
+    // RUSTFLAGS should be passed to rustc for build scripts
+    // when --target is not specified.
+    // In this test if --cfg foo is passed the build will fail.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() { }
+            #[cfg(not(foo))]
+            fn main() { }
+        "#,
+        ).build();
+
+    p.cargo("build").env("RUSTFLAGS", "--cfg foo").run();
+}
+
+#[test]
+fn env_rustflags_build_script_dep() {
+    // RUSTFLAGS should be passed to rustc for build scripts
+    // when --target is not specified.
+    // In this test if --cfg foo is not passed the build will fail.
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            build = "build.rs"
+
+            [build-dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "fn main() {}")
+        .build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file(
+            "src/lib.rs",
+            r#"
+            fn bar() { }
+            #[cfg(not(foo))]
+            fn bar() { }
+        "#,
+        ).build();
+
+    foo.cargo("build").env("RUSTFLAGS", "--cfg foo").run();
+}
+
+#[test]
+fn env_rustflags_plugin() {
+    // RUSTFLAGS should be passed to rustc for plugins
+    // when --target is not specified.
+    // In this test if --cfg foo is not passed the build will fail.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [lib]
+            name = "foo"
+            plugin = true
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            fn main() { }
+            #[cfg(not(foo))]
+            fn main() { }
+        "#,
+        ).build();
+
+    p.cargo("build").env("RUSTFLAGS", "--cfg foo").run();
+}
+
+#[test]
+fn env_rustflags_plugin_dep() {
+    // RUSTFLAGS should be passed to rustc for plugins
+    // when --target is not specified.
+    // In this test if --cfg foo is not passed the build will fail.
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [lib]
+            name = "foo"
+            plugin = true
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/lib.rs", "fn foo() {}")
+        .build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_lib_manifest("bar"))
+        .file(
+            "src/lib.rs",
+            r#"
+            fn bar() { }
+            #[cfg(not(foo))]
+            fn bar() { }
+        "#,
+        ).build();
+
+    foo.cargo("build").env("RUSTFLAGS", "--cfg foo").run();
+}
+
+#[test]
+fn env_rustflags_normal_source_with_target() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/bin/a.rs", "fn main() {}")
+        .file("examples/b.rs", "fn main() {}")
+        .file("tests/c.rs", "#[test] fn f() { }")
+        .file(
+            "benches/d.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+        ).build();
+
+    let host = &rustc_host();
+
+    // Use RUSTFLAGS to pass an argument that will generate an error
+    p.cargo("build --lib --target")
+        .arg(host)
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+    p.cargo("build --bin=a --target")
+        .arg(host)
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+    p.cargo("build --example=b --target")
+        .arg(host)
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+    p.cargo("test --target")
+        .arg(host)
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+    p.cargo("bench --target")
+        .arg(host)
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn env_rustflags_build_script_with_target() {
+    // RUSTFLAGS should not be passed to rustc for build scripts
+    // when --target is specified.
+    // In this test if --cfg foo is passed the build will fail.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() { }
+            #[cfg(foo)]
+            fn main() { }
+        "#,
+        ).build();
+
+    let host = rustc_host();
+    p.cargo("build --target")
+        .arg(host)
+        .env("RUSTFLAGS", "--cfg foo")
+        .run();
+}
+
+#[test]
+fn env_rustflags_build_script_dep_with_target() {
+    // RUSTFLAGS should not be passed to rustc for build scripts
+    // when --target is specified.
+    // In this test if --cfg foo is passed the build will fail.
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            build = "build.rs"
+
+            [build-dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "fn main() {}")
+        .build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file(
+            "src/lib.rs",
+            r#"
+            fn bar() { }
+            #[cfg(foo)]
+            fn bar() { }
+        "#,
+        ).build();
+
+    let host = rustc_host();
+    foo.cargo("build --target")
+        .arg(host)
+        .env("RUSTFLAGS", "--cfg foo")
+        .run();
+}
+
+#[test]
+fn env_rustflags_plugin_with_target() {
+    // RUSTFLAGS should not be passed to rustc for plugins
+    // when --target is specified.
+    // In this test if --cfg foo is passed the build will fail.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [lib]
+            name = "foo"
+            plugin = true
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            fn main() { }
+            #[cfg(foo)]
+            fn main() { }
+        "#,
+        ).build();
+
+    let host = rustc_host();
+    p.cargo("build --target")
+        .arg(host)
+        .env("RUSTFLAGS", "--cfg foo")
+        .run();
+}
+
+#[test]
+fn env_rustflags_plugin_dep_with_target() {
+    // RUSTFLAGS should not be passed to rustc for plugins
+    // when --target is specified.
+    // In this test if --cfg foo is passed the build will fail.
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [lib]
+            name = "foo"
+            plugin = true
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/lib.rs", "fn foo() {}")
+        .build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_lib_manifest("bar"))
+        .file(
+            "src/lib.rs",
+            r#"
+            fn bar() { }
+            #[cfg(foo)]
+            fn bar() { }
+        "#,
+        ).build();
+
+    let host = rustc_host();
+    foo.cargo("build --target")
+        .arg(host)
+        .env("RUSTFLAGS", "--cfg foo")
+        .run();
+}
+
+#[test]
+fn env_rustflags_recompile() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("build").run();
+    // Setting RUSTFLAGS forces a recompile
+    p.cargo("build")
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn env_rustflags_recompile2() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("build").env("RUSTFLAGS", "--cfg foo").run();
+    // Setting RUSTFLAGS forces a recompile
+    p.cargo("build")
+        .env("RUSTFLAGS", "-Z bogus")
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn env_rustflags_no_recompile() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("build").env("RUSTFLAGS", "--cfg foo").run();
+    p.cargo("build")
+        .env("RUSTFLAGS", "--cfg foo")
+        .with_stdout("")
+        .run();
+}
+
+#[test]
+fn build_rustflags_normal_source() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/bin/a.rs", "fn main() {}")
+        .file("examples/b.rs", "fn main() {}")
+        .file("tests/c.rs", "#[test] fn f() { }")
+        .file(
+            "benches/d.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+        ).file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["-Z", "bogus"]
+            "#,
+        ).build();
+
+    p.cargo("build --lib").with_status(101).run();
+    p.cargo("build --bin=a").with_status(101).run();
+    p.cargo("build --example=b").with_status(101).run();
+    p.cargo("test").with_status(101).run();
+    p.cargo("bench").with_status(101).run();
+}
+
+#[test]
+fn build_rustflags_build_script() {
+    // RUSTFLAGS should be passed to rustc for build scripts
+    // when --target is not specified.
+    // In this test if --cfg foo is passed the build will fail.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() { }
+            #[cfg(not(foo))]
+            fn main() { }
+        "#,
+        ).file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["--cfg", "foo"]
+            "#,
+        ).build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn build_rustflags_build_script_dep() {
+    // RUSTFLAGS should be passed to rustc for build scripts
+    // when --target is not specified.
+    // In this test if --cfg foo is not passed the build will fail.
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            build = "build.rs"
+
+            [build-dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["--cfg", "foo"]
+            "#,
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file(
+            "src/lib.rs",
+            r#"
+            fn bar() { }
+            #[cfg(not(foo))]
+            fn bar() { }
+        "#,
+        ).build();
+
+    foo.cargo("build").run();
+}
+
+#[test]
+fn build_rustflags_plugin() {
+    // RUSTFLAGS should be passed to rustc for plugins
+    // when --target is not specified.
+    // In this test if --cfg foo is not passed the build will fail.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [lib]
+            name = "foo"
+            plugin = true
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            fn main() { }
+            #[cfg(not(foo))]
+            fn main() { }
+        "#,
+        ).file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["--cfg", "foo"]
+            "#,
+        ).build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn build_rustflags_plugin_dep() {
+    // RUSTFLAGS should be passed to rustc for plugins
+    // when --target is not specified.
+    // In this test if --cfg foo is not passed the build will fail.
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [lib]
+            name = "foo"
+            plugin = true
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/lib.rs", "fn foo() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["--cfg", "foo"]
+            "#,
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_lib_manifest("bar"))
+        .file(
+            "src/lib.rs",
+            r#"
+            fn bar() { }
+            #[cfg(not(foo))]
+            fn bar() { }
+        "#,
+        ).build();
+
+    foo.cargo("build").run();
+}
+
+#[test]
+fn build_rustflags_normal_source_with_target() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/bin/a.rs", "fn main() {}")
+        .file("examples/b.rs", "fn main() {}")
+        .file("tests/c.rs", "#[test] fn f() { }")
+        .file(
+            "benches/d.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+        ).file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["-Z", "bogus"]
+            "#,
+        ).build();
+
+    let host = &rustc_host();
+
+    // Use RUSTFLAGS to pass an argument that will generate an error
+    p.cargo("build --lib --target")
+        .arg(host)
+        .with_status(101)
+        .run();
+    p.cargo("build --bin=a --target")
+        .arg(host)
+        .with_status(101)
+        .run();
+    p.cargo("build --example=b --target")
+        .arg(host)
+        .with_status(101)
+        .run();
+    p.cargo("test --target").arg(host).with_status(101).run();
+    p.cargo("bench --target").arg(host).with_status(101).run();
+}
+
+#[test]
+fn build_rustflags_build_script_with_target() {
+    // RUSTFLAGS should not be passed to rustc for build scripts
+    // when --target is specified.
+    // In this test if --cfg foo is passed the build will fail.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            build = "build.rs"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "build.rs",
+            r#"
+            fn main() { }
+            #[cfg(foo)]
+            fn main() { }
+        "#,
+        ).file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["--cfg", "foo"]
+            "#,
+        ).build();
+
+    let host = rustc_host();
+    p.cargo("build --target").arg(host).run();
+}
+
+#[test]
+fn build_rustflags_build_script_dep_with_target() {
+    // RUSTFLAGS should not be passed to rustc for build scripts
+    // when --target is specified.
+    // In this test if --cfg foo is passed the build will fail.
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            build = "build.rs"
+
+            [build-dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("build.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["--cfg", "foo"]
+            "#,
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file(
+            "src/lib.rs",
+            r#"
+            fn bar() { }
+            #[cfg(foo)]
+            fn bar() { }
+        "#,
+        ).build();
+
+    let host = rustc_host();
+    foo.cargo("build --target").arg(host).run();
+}
+
+#[test]
+fn build_rustflags_plugin_with_target() {
+    // RUSTFLAGS should not be passed to rustc for plugins
+    // when --target is specified.
+    // In this test if --cfg foo is passed the build will fail.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [lib]
+            name = "foo"
+            plugin = true
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            fn main() { }
+            #[cfg(foo)]
+            fn main() { }
+        "#,
+        ).file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["--cfg", "foo"]
+            "#,
+        ).build();
+
+    let host = rustc_host();
+    p.cargo("build --target").arg(host).run();
+}
+
+#[test]
+fn build_rustflags_plugin_dep_with_target() {
+    // RUSTFLAGS should not be passed to rustc for plugins
+    // when --target is specified.
+    // In this test if --cfg foo is passed the build will fail.
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [lib]
+            name = "foo"
+            plugin = true
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file("src/lib.rs", "fn foo() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["--cfg", "foo"]
+            "#,
+        ).build();
+    let _bar = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_lib_manifest("bar"))
+        .file(
+            "src/lib.rs",
+            r#"
+            fn bar() { }
+            #[cfg(foo)]
+            fn bar() { }
+        "#,
+        ).build();
+
+    let host = rustc_host();
+    foo.cargo("build --target").arg(host).run();
+}
+
+#[test]
+fn build_rustflags_recompile() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("build").run();
+
+    // Setting RUSTFLAGS forces a recompile
+    let config = r#"
+        [build]
+        rustflags = ["-Z", "bogus"]
+        "#;
+    let config_file = paths::root().join("foo/.cargo/config");
+    fs::create_dir_all(config_file.parent().unwrap()).unwrap();
+    let mut config_file = File::create(config_file).unwrap();
+    config_file.write_all(config.as_bytes()).unwrap();
+
+    p.cargo("build").with_status(101).run();
+}
+
+#[test]
+fn build_rustflags_recompile2() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("build").env("RUSTFLAGS", "--cfg foo").run();
+
+    // Setting RUSTFLAGS forces a recompile
+    let config = r#"
+        [build]
+        rustflags = ["-Z", "bogus"]
+        "#;
+    let config_file = paths::root().join("foo/.cargo/config");
+    fs::create_dir_all(config_file.parent().unwrap()).unwrap();
+    let mut config_file = File::create(config_file).unwrap();
+    config_file.write_all(config.as_bytes()).unwrap();
+
+    p.cargo("build").with_status(101).run();
+}
+
+#[test]
+fn build_rustflags_no_recompile() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["--cfg", "foo"]
+            "#,
+        ).build();
+
+    p.cargo("build").env("RUSTFLAGS", "--cfg foo").run();
+    p.cargo("build")
+        .env("RUSTFLAGS", "--cfg foo")
+        .with_stdout("")
+        .run();
+}
+
+#[test]
+fn build_rustflags_with_home_config() {
+    // We need a config file inside the home directory
+    let home = paths::home();
+    let home_config = home.join(".cargo");
+    fs::create_dir(&home_config).unwrap();
+    File::create(&home_config.join("config"))
+        .unwrap()
+        .write_all(
+            br#"
+        [build]
+        rustflags = ["-Cllvm-args=-x86-asm-syntax=intel"]
+    "#,
+        ).unwrap();
+
+    // And we need the project to be inside the home directory
+    // so the walking process finds the home project twice.
+    let p = project_in_home("foo").file("src/lib.rs", "").build();
+
+    p.cargo("build -v").run();
+}
+
+#[test]
+fn target_rustflags_normal_source() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/bin/a.rs", "fn main() {}")
+        .file("examples/b.rs", "fn main() {}")
+        .file("tests/c.rs", "#[test] fn f() { }")
+        .file(
+            "benches/d.rs",
+            r#"
+            #![feature(test)]
+            extern crate test;
+            #[bench] fn run1(_ben: &mut test::Bencher) { }"#,
+        ).file(
+            ".cargo/config",
+            &format!(
+                "
+            [target.{}]
+            rustflags = [\"-Z\", \"bogus\"]
+            ",
+                rustc_host()
+            ),
+        ).build();
+
+    p.cargo("build --lib").with_status(101).run();
+    p.cargo("build --bin=a").with_status(101).run();
+    p.cargo("build --example=b").with_status(101).run();
+    p.cargo("test").with_status(101).run();
+    p.cargo("bench").with_status(101).run();
+}
+
+// target.{}.rustflags takes precedence over build.rustflags
+#[test]
+fn target_rustflags_precedence() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            &format!(
+                "
+            [build]
+            rustflags = [\"--cfg\", \"foo\"]
+
+            [target.{}]
+            rustflags = [\"-Z\", \"bogus\"]
+            ",
+                rustc_host()
+            ),
+        ).build();
+
+    p.cargo("build --lib").with_status(101).run();
+    p.cargo("build --bin=a").with_status(101).run();
+    p.cargo("build --example=b").with_status(101).run();
+    p.cargo("test").with_status(101).run();
+    p.cargo("bench").with_status(101).run();
+}
+
+#[test]
+fn cfg_rustflags_normal_source() {
+    let p = project()
+        .file("src/lib.rs", "pub fn t() {}")
+        .file("src/bin/a.rs", "fn main() {}")
+        .file("examples/b.rs", "fn main() {}")
+        .file("tests/c.rs", "#[test] fn f() { }")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.'cfg({})']
+            rustflags = ["--cfg", "bar"]
+            "#,
+                if rustc_host().contains("-windows-") {
+                    "windows"
+                } else {
+                    "not(windows)"
+                }
+            ),
+        ).build();
+
+    p.cargo("build --lib -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build --bin=a -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build --example=b -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("test --no-run -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("bench --no-run -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] release [optimized] target(s) in [..]
+",
+        ).run();
+}
+
+// target.'cfg(...)'.rustflags takes precedence over build.rustflags
+#[test]
+fn cfg_rustflags_precedence() {
+    let p = project()
+        .file("src/lib.rs", "pub fn t() {}")
+        .file("src/bin/a.rs", "fn main() {}")
+        .file("examples/b.rs", "fn main() {}")
+        .file("tests/c.rs", "#[test] fn f() { }")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [build]
+            rustflags = ["--cfg", "foo"]
+
+            [target.'cfg({})']
+            rustflags = ["--cfg", "bar"]
+            "#,
+                if rustc_host().contains("-windows-") {
+                    "windows"
+                } else {
+                    "not(windows)"
+                }
+            ),
+        ).build();
+
+    p.cargo("build --lib -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build --bin=a -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build --example=b -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("test --no-run -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("bench --no-run -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[RUNNING] `rustc [..] --cfg bar[..]`
+[FINISHED] release [optimized] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn target_rustflags_string_and_array_form1() {
+    let p1 = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = ["--cfg", "foo"]
+            "#,
+        ).build();
+
+    p1.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg foo[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    let p2 = project()
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            rustflags = "--cfg foo"
+            "#,
+        ).build();
+
+    p2.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg foo[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn target_rustflags_string_and_array_form2() {
+    let p1 = project()
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}]
+            rustflags = ["--cfg", "foo"]
+        "#,
+                rustc_host()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p1.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg foo[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    let p2 = project()
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}]
+            rustflags = "--cfg foo"
+        "#,
+                rustc_host()
+            ),
+        ).file("src/lib.rs", "")
+        .build();
+
+    p2.cargo("build -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] --cfg foo[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn two_matching_in_config() {
+    let p1 = project()
+        .file(
+            ".cargo/config",
+            r#"
+            [target.'cfg(unix)']
+            rustflags = ["--cfg", 'foo="a"']
+            [target.'cfg(windows)']
+            rustflags = ["--cfg", 'foo="a"']
+            [target.'cfg(target_pointer_width = "32")']
+            rustflags = ["--cfg", 'foo="b"']
+            [target.'cfg(target_pointer_width = "64")']
+            rustflags = ["--cfg", 'foo="b"']
+        "#,
+        ).file(
+            "src/main.rs",
+            r#"
+            fn main() {
+                if cfg!(foo = "a") {
+                    println!("a");
+                } else if cfg!(foo = "b") {
+                    println!("b");
+                } else {
+                    panic!()
+                }
+            }
+        "#,
+        ).build();
+
+    p1.cargo("run").run();
+    p1.cargo("build").with_stderr("[FINISHED] [..]").run();
+}
diff --git a/tests/testsuite/search.rs b/tests/testsuite/search.rs
new file mode 100644 (file)
index 0000000..714913a
--- /dev/null
@@ -0,0 +1,212 @@
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::Path;
+
+use support::cargo_process;
+use support::git::repo;
+use support::paths;
+use support::registry::{api_path, registry as registry_url, registry_path};
+use url::Url;
+
+fn api() -> Url {
+    Url::from_file_path(&*api_path()).ok().unwrap()
+}
+
+fn write_crates(dest: &Path) {
+    let content = r#"{
+        "crates": [{
+            "created_at": "2014-11-16T20:17:35Z",
+            "description": "Design by contract style assertions for Rust",
+            "documentation": null,
+            "downloads": 2,
+            "homepage": null,
+            "id": "hoare",
+            "keywords": [],
+            "license": null,
+            "links": {
+                "owners": "/api/v1/crates/hoare/owners",
+                "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies",
+                "version_downloads": "/api/v1/crates/hoare/downloads",
+                "versions": "/api/v1/crates/hoare/versions"
+            },
+            "max_version": "0.1.1",
+            "name": "hoare",
+            "repository": "https://github.com/nick29581/libhoare",
+            "updated_at": "2014-11-20T21:49:21Z",
+            "versions": null
+        }],
+        "meta": {
+            "total": 1
+        }
+    }"#;
+
+    // Older versions of curl don't peel off query parameters when looking for
+    // filenames, so just make both files.
+    //
+    // On windows, though, `?` is an invalid character, but we always build curl
+    // from source there anyway!
+    File::create(&dest)
+        .unwrap()
+        .write_all(content.as_bytes())
+        .unwrap();
+    if !cfg!(windows) {
+        File::create(&dest.with_file_name("crates?q=postgres&per_page=10"))
+            .unwrap()
+            .write_all(content.as_bytes())
+            .unwrap();
+    }
+}
+
+fn setup() {
+    let cargo_home = paths::root().join(".cargo");
+    fs::create_dir_all(cargo_home).unwrap();
+    fs::create_dir_all(&api_path().join("api/v1")).unwrap();
+
+    // Init a new registry
+    let _ = repo(&registry_path())
+        .file(
+            "config.json",
+            &format!(r#"{{"dl":"{0}","api":"{0}"}}"#, api()),
+        ).build();
+
+    let base = api_path().join("api/v1/crates");
+    write_crates(&base);
+}
+
+fn set_cargo_config() {
+    let config = paths::root().join(".cargo/config");
+
+    File::create(&config)
+        .unwrap()
+        .write_all(
+            format!(
+                r#"
+[source.crates-io]
+registry = 'https://wut'
+replace-with = 'dummy-registry'
+
+[source.dummy-registry]
+registry = '{reg}'
+"#,
+                reg = registry_url(),
+            ).as_bytes(),
+        ).unwrap();
+}
+
+#[test]
+fn not_update() {
+    setup();
+    set_cargo_config();
+
+    use cargo::core::{Shell, Source, SourceId};
+    use cargo::sources::RegistrySource;
+    use cargo::util::Config;
+
+    let sid = SourceId::for_registry(&registry_url()).unwrap();
+    let cfg = Config::new(Shell::new(), paths::root(), paths::home().join(".cargo"));
+    let mut regsrc = RegistrySource::remote(&sid, &cfg);
+    regsrc.update().unwrap();
+
+    cargo_process("search postgres")
+            .with_stdout_contains(
+                "hoare = \"0.1.1\"    # Design by contract style assertions for Rust",
+            )
+            .with_stderr("") // without "Updating ... index"
+    .run();
+}
+
+#[test]
+fn replace_default() {
+    setup();
+    set_cargo_config();
+
+    cargo_process("search postgres")
+        .with_stdout_contains("hoare = \"0.1.1\"    # Design by contract style assertions for Rust")
+        .with_stderr_contains("[..]Updating [..] index")
+        .run();
+}
+
+#[test]
+fn simple() {
+    setup();
+
+    cargo_process("search postgres --index")
+        .arg(registry_url().to_string())
+        .with_stdout_contains("hoare = \"0.1.1\"    # Design by contract style assertions for Rust")
+        .run();
+}
+
+// TODO: Deprecated
+// remove once it has been decided '--host' can be safely removed
+#[test]
+fn simple_with_host() {
+    setup();
+
+    cargo_process("search postgres --host").arg(registry_url().to_string())
+            .with_stderr(
+                "\
+[WARNING] The flag '--host' is no longer valid.
+
+Previous versions of Cargo accepted this flag, but it is being
+deprecated. The flag is being renamed to 'index', as the flag
+wants the location of the index. Please use '--index' instead.
+
+This will soon become a hard error, so it's either recommended
+to update to a fixed version or contact the upstream maintainer
+about this warning.
+[UPDATING] `[CWD]/registry` index
+",
+            )
+            .with_stdout_contains(
+                "hoare = \"0.1.1\"    # Design by contract style assertions for Rust",
+            )
+        .run();
+}
+
+// TODO: Deprecated
+// remove once it has been decided '--host' can be safely removed
+#[test]
+fn simple_with_index_and_host() {
+    setup();
+
+    cargo_process("search postgres --index").arg(registry_url().to_string()).arg("--host").arg(registry_url().to_string())
+            .with_stderr(
+                "\
+[WARNING] The flag '--host' is no longer valid.
+
+Previous versions of Cargo accepted this flag, but it is being
+deprecated. The flag is being renamed to 'index', as the flag
+wants the location of the index. Please use '--index' instead.
+
+This will soon become a hard error, so it's either recommended
+to update to a fixed version or contact the upstream maintainer
+about this warning.
+[UPDATING] `[CWD]/registry` index
+",
+            )
+            .with_stdout_contains(
+                "hoare = \"0.1.1\"    # Design by contract style assertions for Rust",
+            )
+        .run();
+}
+
+#[test]
+fn multiple_query_params() {
+    setup();
+
+    cargo_process("search postgres sql --index")
+        .arg(registry_url().to_string())
+        .with_stdout_contains("hoare = \"0.1.1\"    # Design by contract style assertions for Rust")
+        .run();
+}
+
+#[test]
+fn help() {
+    cargo_process("search -h").run();
+    cargo_process("help search").run();
+    // Ensure that help output goes to stdout, not stderr.
+    cargo_process("search --help").with_stderr("").run();
+    cargo_process("search --help")
+        .with_stdout_contains("[..] --frozen [..]")
+        .run();
+}
diff --git a/tests/testsuite/shell_quoting.rs b/tests/testsuite/shell_quoting.rs
new file mode 100644 (file)
index 0000000..dcbb0fc
--- /dev/null
@@ -0,0 +1,36 @@
+//! this file tests that when the commands being run are shown
+//! in the output, their arguments are quoted properly
+//! so that the command can be run in a terminal
+
+use support::project;
+
+#[test]
+fn features_are_quoted() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = ["mikeyhew@example.com"]
+
+            [features]
+            some_feature = []
+            default = ["some_feature"]
+            "#,
+        ).file("src/main.rs", "fn main() {error}")
+        .build();
+
+    p.cargo("check -v")
+            .env("MSYSTEM", "1")
+            .with_status(101)
+            .with_stderr_contains(
+                r#"[RUNNING] `rustc [..] --cfg 'feature="default"' --cfg 'feature="some_feature"' [..]`"#
+            ).with_stderr_contains(
+                r#"
+Caused by:
+  process didn't exit successfully: [..] --cfg 'feature="default"' --cfg 'feature="some_feature"' [..]"#
+            )
+    .run();
+}
diff --git a/tests/testsuite/small_fd_limits.rs b/tests/testsuite/small_fd_limits.rs
new file mode 100644 (file)
index 0000000..9e15f85
--- /dev/null
@@ -0,0 +1,116 @@
+use std::env;
+use std::ffi::OsStr;
+use std::path::PathBuf;
+use std::process::Command;
+
+use git2;
+use support::git;
+use support::paths;
+use support::project;
+use support::registry::Package;
+
+use url::Url;
+
+fn find_index() -> PathBuf {
+    let dir = paths::home().join(".cargo/registry/index");
+    dir.read_dir().unwrap().next().unwrap().unwrap().path()
+}
+
+fn run_test(path_env: Option<&OsStr>) {
+    const N: usize = 50;
+
+    let foo = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+    Package::new("bar", "0.1.0").publish();
+
+    foo.cargo("build").run();
+
+    let index = find_index();
+    let path = paths::home().join("tmp");
+    let url = Url::from_file_path(&path).unwrap().to_string();
+    let repo = git2::Repository::init(&path).unwrap();
+    let index = git2::Repository::open(&index).unwrap();
+    let mut cfg = repo.config().unwrap();
+    cfg.set_str("user.email", "foo@bar.com").unwrap();
+    cfg.set_str("user.name", "Foo Bar").unwrap();
+    let mut cfg = index.config().unwrap();
+    cfg.set_str("user.email", "foo@bar.com").unwrap();
+    cfg.set_str("user.name", "Foo Bar").unwrap();
+
+    for _ in 0..N {
+        git::commit(&repo);
+        index
+            .remote_anonymous(&url)
+            .unwrap()
+            .fetch(&["refs/heads/master:refs/remotes/foo/master"], None, None)
+            .unwrap();
+    }
+    drop((repo, index));
+    Package::new("bar", "0.1.1").publish();
+
+    let before = find_index()
+        .join(".git/objects/pack")
+        .read_dir()
+        .unwrap()
+        .count();
+    assert!(before > N);
+
+    let mut cmd = foo.cargo("update");
+    cmd.env("__CARGO_PACKFILE_LIMIT", "10");
+    if let Some(path) = path_env {
+        cmd.env("PATH", path);
+    }
+    cmd.env("RUST_LOG", "trace");
+    cmd.run();
+    let after = find_index()
+        .join(".git/objects/pack")
+        .read_dir()
+        .unwrap()
+        .count();
+    assert!(
+        after < before,
+        "packfiles before: {}\n\
+         packfiles after:  {}",
+        before,
+        after
+    );
+}
+
+#[test]
+fn use_git_gc() {
+    if Command::new("git").arg("--version").output().is_err() {
+        return;
+    }
+    run_test(None);
+}
+
+#[test]
+// it looks like this test passes on some windows machines but not others,
+// notably not on AppVeyor's machines. Sounds like another but for another day.
+#[cfg_attr(windows, ignore)]
+fn avoid_using_git() {
+    let path = env::var_os("PATH").unwrap_or_default();
+    let mut paths = env::split_paths(&path).collect::<Vec<_>>();
+    let idx = paths
+        .iter()
+        .position(|p| p.join("git").exists() || p.join("git.exe").exists());
+    match idx {
+        Some(i) => {
+            paths.remove(i);
+        }
+        None => return,
+    }
+    run_test(Some(&env::join_paths(&paths).unwrap()));
+}
diff --git a/tests/testsuite/support/cross_compile.rs b/tests/testsuite/support/cross_compile.rs
new file mode 100644 (file)
index 0000000..675e77e
--- /dev/null
@@ -0,0 +1,138 @@
+use std::env;
+use std::process::Command;
+use std::sync::atomic::{AtomicBool, Ordering, ATOMIC_BOOL_INIT};
+use std::sync::{Once, ONCE_INIT};
+
+use support::{basic_bin_manifest, main_file, project};
+
+pub fn disabled() -> bool {
+    // First, disable if ./configure requested so
+    match env::var("CFG_DISABLE_CROSS_TESTS") {
+        Ok(ref s) if *s == "1" => return true,
+        _ => {}
+    }
+
+    // Right now the windows bots cannot cross compile due to the mingw setup,
+    // so we disable ourselves on all but macos/linux setups where the rustc
+    // install script ensures we have both architectures
+    if !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) {
+        return true;
+    }
+
+    // It's not particularly common to have a cross-compilation setup, so
+    // try to detect that before we fail a bunch of tests through no fault
+    // of the user.
+    static CAN_RUN_CROSS_TESTS: AtomicBool = ATOMIC_BOOL_INIT;
+    static CHECK: Once = ONCE_INIT;
+
+    let cross_target = alternate();
+
+    CHECK.call_once(|| {
+        let p = project()
+            .at("cross_test")
+            .file("Cargo.toml", &basic_bin_manifest("cross_test"))
+            .file("src/cross_test.rs", &main_file(r#""testing!""#, &[]))
+            .build();
+
+        let result = p
+            .cargo("build --target")
+            .arg(&cross_target)
+            .exec_with_output();
+
+        if result.is_ok() {
+            CAN_RUN_CROSS_TESTS.store(true, Ordering::SeqCst);
+        }
+    });
+
+    if CAN_RUN_CROSS_TESTS.load(Ordering::SeqCst) {
+        // We were able to compile a simple project, so the user has the
+        // necessary std:: bits installed.  Therefore, tests should not
+        // be disabled.
+        return false;
+    }
+
+    // We can't compile a simple cross project.  We want to warn the user
+    // by failing a single test and having the remainder of the cross tests
+    // pass.  We don't use std::sync::Once here because panicing inside its
+    // call_once method would poison the Once instance, which is not what
+    // we want.
+    static HAVE_WARNED: AtomicBool = ATOMIC_BOOL_INIT;
+
+    if HAVE_WARNED.swap(true, Ordering::SeqCst) {
+        // We are some other test and somebody else is handling the warning.
+        // Just disable the current test.
+        return true;
+    }
+
+    // We are responsible for warning the user, which we do by panicing.
+    let rustup_available = Command::new("rustup").output().is_ok();
+
+    let linux_help = if cfg!(target_os = "linux") {
+        "
+
+You may need to install runtime libraries for your Linux distribution as well."
+            .to_string()
+    } else {
+        "".to_string()
+    };
+
+    let rustup_help = if rustup_available {
+        format!(
+            "
+
+Alternatively, you can install the necessary libraries for cross-compilation with
+
+    rustup target add {}{}",
+            cross_target, linux_help
+        )
+    } else {
+        "".to_string()
+    };
+
+    panic!(
+        "Cannot cross compile to {}.
+
+This failure can be safely ignored. If you would prefer to not see this
+failure, you can set the environment variable CFG_DISABLE_CROSS_TESTS to \"1\".{}
+",
+        cross_target, rustup_help
+    );
+}
+
+pub fn alternate() -> String {
+    let platform = match env::consts::OS {
+        "linux" => "unknown-linux-gnu",
+        "macos" => "apple-darwin",
+        "windows" => "pc-windows-msvc",
+        _ => unreachable!(),
+    };
+    let arch = match env::consts::ARCH {
+        "x86" => "x86_64",
+        "x86_64" => "i686",
+        _ => unreachable!(),
+    };
+    format!("{}-{}", arch, platform)
+}
+
+pub fn alternate_arch() -> &'static str {
+    match env::consts::ARCH {
+        "x86" => "x86_64",
+        "x86_64" => "x86",
+        _ => unreachable!(),
+    }
+}
+
+pub fn host() -> String {
+    let platform = match env::consts::OS {
+        "linux" => "unknown-linux-gnu",
+        "macos" => "apple-darwin",
+        "windows" => "pc-windows-msvc",
+        _ => unreachable!(),
+    };
+    let arch = match env::consts::ARCH {
+        "x86" => "i686",
+        "x86_64" => "x86_64",
+        _ => unreachable!(),
+    };
+    format!("{}-{}", arch, platform)
+}
diff --git a/tests/testsuite/support/git.rs b/tests/testsuite/support/git.rs
new file mode 100644 (file)
index 0000000..656727d
--- /dev/null
@@ -0,0 +1,224 @@
+/*
+# Git Testing Support
+
+## Creating a git dependency
+`git::new()` is an easy way to create a new git repository containing a
+project that you can then use as a dependency. It will automatically add all
+the files you specify in the project and commit them to the repository.
+Example:
+
+```
+let git_project = git::new("dep1", |project| {
+    project
+        .file("Cargo.toml", &basic_manifest("dep1"))
+        .file("src/lib.rs", r#"pub fn f() { println!("hi!"); } "#)
+}).unwrap();
+
+// Use the `url()` method to get the file url to the new repository.
+let p = project()
+    .file("Cargo.toml", &format!(r#"
+        [package]
+        name = "a"
+        version = "1.0.0"
+
+        [dependencies]
+        dep1 = {{ git = '{}' }}
+    "#, git_project.url()))
+    .file("src/lib.rs", "extern crate dep1;")
+    .build();
+```
+
+## Manually creating repositories
+`git::repo()` can be used to create a `RepoBuilder` which provides a way of
+adding files to a blank repository and committing them.
+
+If you want to then manipulate the repository (such as adding new files or
+tags), you can use `git2::Repository::open()` to open the repository and then
+use some of the helper functions in this file to interact with the repository.
+
+*/
+
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::{Path, PathBuf};
+
+use cargo::util::ProcessError;
+use git2;
+use url::Url;
+
+use support::{path2url, project, Project, ProjectBuilder};
+
+#[must_use]
+pub struct RepoBuilder {
+    repo: git2::Repository,
+    files: Vec<PathBuf>,
+}
+
+pub struct Repository(git2::Repository);
+
+/// Create a `RepoBuilder` to build a new git repository.
+///
+/// Call `build()` to finalize and create the repository.
+pub fn repo(p: &Path) -> RepoBuilder {
+    RepoBuilder::init(p)
+}
+
+impl RepoBuilder {
+    pub fn init(p: &Path) -> RepoBuilder {
+        t!(fs::create_dir_all(p.parent().unwrap()));
+        let repo = t!(git2::Repository::init(p));
+        {
+            let mut config = t!(repo.config());
+            t!(config.set_str("user.name", "name"));
+            t!(config.set_str("user.email", "email"));
+        }
+        RepoBuilder {
+            repo,
+            files: Vec::new(),
+        }
+    }
+
+    /// Add a file to the repository.
+    pub fn file(self, path: &str, contents: &str) -> RepoBuilder {
+        let mut me = self.nocommit_file(path, contents);
+        me.files.push(PathBuf::from(path));
+        me
+    }
+
+    /// Add a file that will be left in the working directory, but not added
+    /// to the repository.
+    pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder {
+        let dst = self.repo.workdir().unwrap().join(path);
+        t!(fs::create_dir_all(dst.parent().unwrap()));
+        t!(t!(File::create(&dst)).write_all(contents.as_bytes()));
+        self
+    }
+
+    /// Create the repository and commit the new files.
+    pub fn build(self) -> Repository {
+        {
+            let mut index = t!(self.repo.index());
+            for file in self.files.iter() {
+                t!(index.add_path(file));
+            }
+            t!(index.write());
+            let id = t!(index.write_tree());
+            let tree = t!(self.repo.find_tree(id));
+            let sig = t!(self.repo.signature());
+            t!(self
+                .repo
+                .commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[]));
+        }
+        let RepoBuilder { repo, .. } = self;
+        Repository(repo)
+    }
+}
+
+impl Repository {
+    pub fn root(&self) -> &Path {
+        self.0.workdir().unwrap()
+    }
+
+    pub fn url(&self) -> Url {
+        path2url(self.0.workdir().unwrap().to_path_buf())
+    }
+
+    pub fn revparse_head(&self) -> String {
+        self.0
+            .revparse_single("HEAD")
+            .expect("revparse HEAD")
+            .id()
+            .to_string()
+    }
+}
+
+/// Create a new git repository with a project.
+pub fn new<F>(name: &str, callback: F) -> Result<Project, ProcessError>
+where
+    F: FnOnce(ProjectBuilder) -> ProjectBuilder,
+{
+    let mut git_project = project().at(name);
+    git_project = callback(git_project);
+    let git_project = git_project.build();
+
+    let repo = t!(git2::Repository::init(&git_project.root()));
+    let mut cfg = t!(repo.config());
+    t!(cfg.set_str("user.email", "foo@bar.com"));
+    t!(cfg.set_str("user.name", "Foo Bar"));
+    drop(cfg);
+    add(&repo);
+    commit(&repo);
+    Ok(git_project)
+}
+
+/// Add all files in the working directory to the git index.
+pub fn add(repo: &git2::Repository) {
+    // FIXME(libgit2/libgit2#2514): apparently add_all will add all submodules
+    // as well, and then fail b/c they're a directory. As a stopgap, we just
+    // ignore all submodules.
+    let mut s = t!(repo.submodules());
+    for submodule in s.iter_mut() {
+        t!(submodule.add_to_index(false));
+    }
+    let mut index = t!(repo.index());
+    t!(index.add_all(
+        ["*"].iter(),
+        git2::IndexAddOption::DEFAULT,
+        Some(
+            &mut (|a, _b| if s.iter().any(|s| a.starts_with(s.path())) {
+                1
+            } else {
+                0
+            })
+        )
+    ));
+    t!(index.write());
+}
+
+/// Add a git submodule to the repository.
+pub fn add_submodule<'a>(
+    repo: &'a git2::Repository,
+    url: &str,
+    path: &Path,
+) -> git2::Submodule<'a> {
+    let path = path.to_str().unwrap().replace(r"\", "/");
+    let mut s = t!(repo.submodule(url, Path::new(&path), false));
+    let subrepo = t!(s.open());
+    t!(subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*"));
+    let mut origin = t!(subrepo.find_remote("origin"));
+    t!(origin.fetch(&[], None, None));
+    t!(subrepo.checkout_head(None));
+    t!(s.add_finalize());
+    s
+}
+
+/// Commit changes to the git repository.
+pub fn commit(repo: &git2::Repository) -> git2::Oid {
+    let tree_id = t!(t!(repo.index()).write_tree());
+    let sig = t!(repo.signature());
+    let mut parents = Vec::new();
+    if let Some(parent) = repo.head().ok().map(|h| h.target().unwrap()) {
+        parents.push(t!(repo.find_commit(parent)))
+    }
+    let parents = parents.iter().collect::<Vec<_>>();
+    t!(repo.commit(
+        Some("HEAD"),
+        &sig,
+        &sig,
+        "test",
+        &t!(repo.find_tree(tree_id)),
+        &parents
+    ))
+}
+
+/// Create a new tag in the git repository.
+pub fn tag(repo: &git2::Repository, name: &str) {
+    let head = repo.head().unwrap().target().unwrap();
+    t!(repo.tag(
+        name,
+        &t!(repo.find_object(head, None)),
+        &t!(repo.signature()),
+        "make a new tag",
+        false
+    ));
+}
diff --git a/tests/testsuite/support/install.rs b/tests/testsuite/support/install.rs
new file mode 100644 (file)
index 0000000..9267b57
--- /dev/null
@@ -0,0 +1,31 @@
+use std::path::{Path, PathBuf};
+
+use support::paths;
+
+/// Used by `cargo install` tests to assert an executable binary
+/// has been installed.  Example usage:
+///
+///     assert_has_installed_exe(cargo_home(), "foo");
+pub fn assert_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {
+    assert!(check_has_installed_exe(path, name));
+}
+
+pub fn assert_has_not_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {
+    assert!(!check_has_installed_exe(path, name));
+}
+
+fn check_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) -> bool {
+    path.as_ref().join("bin").join(exe(name)).is_file()
+}
+
+pub fn cargo_home() -> PathBuf {
+    paths::home().join(".cargo")
+}
+
+pub fn exe(name: &str) -> String {
+    if cfg!(windows) {
+        format!("{}.exe", name)
+    } else {
+        name.to_string()
+    }
+}
diff --git a/tests/testsuite/support/mod.rs b/tests/testsuite/support/mod.rs
new file mode 100644 (file)
index 0000000..f9ddb53
--- /dev/null
@@ -0,0 +1,1512 @@
+/*
+# Introduction To `support`
+
+Cargo has a wide variety of integration tests that execute the `cargo` binary
+and verify its behavior.  The `support` module contains many helpers to make
+this process easy.
+
+The general form of a test involves creating a "project", running cargo, and
+checking the result.  Projects are created with the `ProjectBuilder` where you
+specify some files to create.  The general form looks like this:
+
+```
+let p = project()
+    .file("src/main.rs", r#"fn main() { println!("hi!"); }"#)
+    .build();
+```
+
+If you do not specify a `Cargo.toml` manifest using `file()`, one is
+automatically created with a project name of `foo` using `basic_manifest()`.
+
+To run cargo, call the `cargo` method and make assertions on the execution:
+
+```
+p.cargo("run --bin foo")
+    .with_stderr(
+        "\
+[COMPILING] foo [..]
+[FINISHED] [..]
+[RUNNING] `target/debug/foo`
+",
+    )
+    .with_stdout("hi!")
+    .run();
+```
+
+The project creates a mini sandbox under the "cargo integration test"
+directory with each test getting a separate directory such as
+`/path/to/cargo/target/cit/t123/`.  Each project appears as a separate
+directory.  There is also an empty `home` directory created that will be used
+as a home directory instead of your normal home directory.
+
+See `support::lines_match` for an explanation of the string pattern matching.
+
+Browse the `pub` functions in the `support` module for a variety of other
+helpful utilities.
+
+## Testing Nightly Features
+
+If you are testing a Cargo feature that only works on "nightly" cargo, then
+you need to call `masquerade_as_nightly_cargo` on the process builder like
+this:
+
+```
+p.cargo("build").masquerade_as_nightly_cargo()
+```
+
+If you are testing a feature that only works on *nightly rustc* (such as
+benchmarks), then you should exit the test if it is not running with nightly
+rust, like this:
+
+```
+if !is_nightly() {
+    return;
+}
+```
+
+## Platform-specific Notes
+
+When checking output, use `/` for paths even on Windows: the actual output
+of `\` on Windows will be replaced with `/`.
+
+Be careful when executing binaries on Windows.  You should not rename, delete,
+or overwrite a binary immediately after running it.  Under some conditions
+Windows will fail with errors like "directory not empty" or "failed to remove"
+or "access is denied".
+
+## Specifying Dependencies
+
+You should not write any tests that use the network such as contacting
+crates.io. Typically, simple path dependencies are the easiest way to add a
+dependency. Example:
+
+```
+let p = project()
+    .file("Cargo.toml", r#"
+        [package]
+        name = "foo"
+        version = "1.0.0"
+
+        [dependencies]
+        bar = {path = "bar"}
+    "#)
+    .file("src/lib.rs", "extern crate bar;")
+    .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0"))
+    .file("bar/src/lib.rs", "")
+    .build();
+```
+
+If you need to test with registry dependencies, see
+`support::registry::Package` for creating packages you can depend on.
+
+If you need to test git dependencies, see `support::git` to create a git
+dependency.
+
+*/
+
+use std::env;
+use std::ffi::OsStr;
+use std::fmt;
+use std::fs;
+use std::io::prelude::*;
+use std::os;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output};
+use std::str;
+use std::time::Duration;
+use std::usize;
+
+use cargo;
+use cargo::util::{CargoResult, ProcessBuilder, ProcessError, Rustc};
+use serde_json::{self, Value};
+use url::Url;
+
+use self::paths::CargoPathExt;
+
+macro_rules! t {
+    ($e:expr) => {
+        match $e {
+            Ok(e) => e,
+            Err(e) => panic!("{} failed with {}", stringify!($e), e),
+        }
+    };
+}
+
+pub mod cross_compile;
+pub mod git;
+pub mod paths;
+pub mod publish;
+pub mod registry;
+#[macro_use]
+pub mod resolver;
+
+/*
+ *
+ * ===== Builders =====
+ *
+ */
+
+#[derive(PartialEq, Clone)]
+struct FileBuilder {
+    path: PathBuf,
+    body: String,
+}
+
+impl FileBuilder {
+    pub fn new(path: PathBuf, body: &str) -> FileBuilder {
+        FileBuilder {
+            path,
+            body: body.to_string(),
+        }
+    }
+
+    fn mk(&self) {
+        self.dirname().mkdir_p();
+
+        let mut file = fs::File::create(&self.path)
+            .unwrap_or_else(|e| panic!("could not create file {}: {}", self.path.display(), e));
+
+        t!(file.write_all(self.body.as_bytes()));
+    }
+
+    fn dirname(&self) -> &Path {
+        self.path.parent().unwrap()
+    }
+}
+
+#[derive(PartialEq, Clone)]
+struct SymlinkBuilder {
+    dst: PathBuf,
+    src: PathBuf,
+}
+
+impl SymlinkBuilder {
+    pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder {
+        SymlinkBuilder { dst, src }
+    }
+
+    #[cfg(unix)]
+    fn mk(&self) {
+        self.dirname().mkdir_p();
+        t!(os::unix::fs::symlink(&self.dst, &self.src));
+    }
+
+    #[cfg(windows)]
+    fn mk(&self) {
+        self.dirname().mkdir_p();
+        t!(os::windows::fs::symlink_file(&self.dst, &self.src));
+    }
+
+    fn dirname(&self) -> &Path {
+        self.src.parent().unwrap()
+    }
+}
+
+pub struct Project {
+    root: PathBuf,
+}
+
+#[must_use]
+pub struct ProjectBuilder {
+    root: Project,
+    files: Vec<FileBuilder>,
+    symlinks: Vec<SymlinkBuilder>,
+    no_manifest: bool,
+}
+
+impl ProjectBuilder {
+    /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo`
+    pub fn root(&self) -> PathBuf {
+        self.root.root()
+    }
+
+    /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug`
+    pub fn target_debug_dir(&self) -> PathBuf {
+        self.root.target_debug_dir()
+    }
+
+    pub fn new(root: PathBuf) -> ProjectBuilder {
+        ProjectBuilder {
+            root: Project { root },
+            files: vec![],
+            symlinks: vec![],
+            no_manifest: false,
+        }
+    }
+
+    pub fn at<P: AsRef<Path>>(mut self, path: P) -> Self {
+        self.root = Project {
+            root: paths::root().join(path),
+        };
+        self
+    }
+
+    /// Add a file to the project.
+    pub fn file<B: AsRef<Path>>(mut self, path: B, body: &str) -> Self {
+        self._file(path.as_ref(), body);
+        self
+    }
+
+    fn _file(&mut self, path: &Path, body: &str) {
+        self.files
+            .push(FileBuilder::new(self.root.root().join(path), body));
+    }
+
+    /// Add a symlink to the project.
+    pub fn symlink<T: AsRef<Path>>(mut self, dst: T, src: T) -> Self {
+        self.symlinks.push(SymlinkBuilder::new(
+            self.root.root().join(dst),
+            self.root.root().join(src),
+        ));
+        self
+    }
+
+    pub fn no_manifest(mut self) -> Self {
+        self.no_manifest = true;
+        self
+    }
+
+    /// Create the project.
+    pub fn build(mut self) -> Project {
+        // First, clean the directory if it already exists
+        self.rm_root();
+
+        // Create the empty directory
+        self.root.root().mkdir_p();
+
+        let manifest_path = self.root.root().join("Cargo.toml");
+        if !self.no_manifest && self.files.iter().all(|fb| fb.path != manifest_path) {
+            self._file(Path::new("Cargo.toml"), &basic_manifest("foo", "0.0.1"))
+        }
+
+        for file in self.files.iter() {
+            file.mk();
+        }
+
+        for symlink in self.symlinks.iter() {
+            symlink.mk();
+        }
+
+        let ProjectBuilder { root, .. } = self;
+        root
+    }
+
+    fn rm_root(&self) {
+        self.root.root().rm_rf()
+    }
+}
+
+impl Project {
+    /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo`
+    pub fn root(&self) -> PathBuf {
+        self.root.clone()
+    }
+
+    /// Project's target dir, ex: `/path/to/cargo/target/cit/t0/foo/target`
+    pub fn build_dir(&self) -> PathBuf {
+        self.root().join("target")
+    }
+
+    /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug`
+    pub fn target_debug_dir(&self) -> PathBuf {
+        self.build_dir().join("debug")
+    }
+
+    /// File url for root, ex: `file:///path/to/cargo/target/cit/t0/foo`
+    pub fn url(&self) -> Url {
+        path2url(self.root())
+    }
+
+    /// Path to an example built as a library.
+    /// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro"
+    /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/examples/libex.rlib`
+    pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf {
+        let prefix = Project::get_lib_prefix(kind);
+
+        let extension = Project::get_lib_extension(kind);
+
+        let lib_file_name = format!("{}{}.{}", prefix, name, extension);
+
+        self.target_debug_dir()
+            .join("examples")
+            .join(&lib_file_name)
+    }
+
+    /// Path to a debug binary.
+    /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/foo`
+    pub fn bin(&self, b: &str) -> PathBuf {
+        self.build_dir()
+            .join("debug")
+            .join(&format!("{}{}", b, env::consts::EXE_SUFFIX))
+    }
+
+    /// Path to a release binary.
+    /// ex: `/path/to/cargo/target/cit/t0/foo/target/release/foo`
+    pub fn release_bin(&self, b: &str) -> PathBuf {
+        self.build_dir()
+            .join("release")
+            .join(&format!("{}{}", b, env::consts::EXE_SUFFIX))
+    }
+
+    /// Path to a debug binary for a specific target triple.
+    /// ex: `/path/to/cargo/target/cit/t0/foo/target/i686-apple-darwin/debug/foo`
+    pub fn target_bin(&self, target: &str, b: &str) -> PathBuf {
+        self.build_dir().join(target).join("debug").join(&format!(
+            "{}{}",
+            b,
+            env::consts::EXE_SUFFIX
+        ))
+    }
+
+    /// Change the contents of an existing file.
+    pub fn change_file(&self, path: &str, body: &str) {
+        FileBuilder::new(self.root().join(path), body).mk()
+    }
+
+    /// Create a `ProcessBuilder` to run a program in the project
+    /// and wrap it in an Execs to assert on the execution.
+    /// Example:
+    ///         p.process(&p.bin("foo"))
+    ///             .with_stdout("bar\n")
+    ///             .run();
+    pub fn process<T: AsRef<OsStr>>(&self, program: T) -> Execs {
+        let mut p = ::support::process(program);
+        p.cwd(self.root());
+        execs().with_process_builder(p)
+    }
+
+    /// Create a `ProcessBuilder` to run cargo.
+    /// Arguments can be separated by spaces.
+    /// Example:
+    ///     p.cargo("build --bin foo").run();
+    pub fn cargo(&self, cmd: &str) -> Execs {
+        let mut execs = self.process(&cargo_exe());
+        if let Some(ref mut p) = execs.process_builder {
+            split_and_add_args(p, cmd);
+        }
+        execs
+    }
+
+    /// Returns the contents of `Cargo.lock`.
+    pub fn read_lockfile(&self) -> String {
+        self.read_file("Cargo.lock")
+    }
+
+    /// Returns the contents of a path in the project root
+    pub fn read_file(&self, path: &str) -> String {
+        let mut buffer = String::new();
+        fs::File::open(self.root().join(path))
+            .unwrap()
+            .read_to_string(&mut buffer)
+            .unwrap();
+        buffer
+    }
+
+    /// Modifies `Cargo.toml` to remove all commented lines.
+    pub fn uncomment_root_manifest(&self) {
+        let mut contents = String::new();
+        fs::File::open(self.root().join("Cargo.toml"))
+            .unwrap()
+            .read_to_string(&mut contents)
+            .unwrap();
+        fs::File::create(self.root().join("Cargo.toml"))
+            .unwrap()
+            .write_all(contents.replace("#", "").as_bytes())
+            .unwrap();
+    }
+
+    fn get_lib_prefix(kind: &str) -> &str {
+        match kind {
+            "lib" | "rlib" => "lib",
+            "staticlib" | "dylib" | "proc-macro" => {
+                if cfg!(windows) {
+                    ""
+                } else {
+                    "lib"
+                }
+            }
+            _ => unreachable!(),
+        }
+    }
+
+    fn get_lib_extension(kind: &str) -> &str {
+        match kind {
+            "lib" | "rlib" => "rlib",
+            "staticlib" => {
+                if cfg!(windows) {
+                    "lib"
+                } else {
+                    "a"
+                }
+            }
+            "dylib" | "proc-macro" => {
+                if cfg!(windows) {
+                    "dll"
+                } else if cfg!(target_os = "macos") {
+                    "dylib"
+                } else {
+                    "so"
+                }
+            }
+            _ => unreachable!(),
+        }
+    }
+}
+
+// Generates a project layout
+pub fn project() -> ProjectBuilder {
+    ProjectBuilder::new(paths::root().join("foo"))
+}
+
+// Generates a project layout inside our fake home dir
+pub fn project_in_home(name: &str) -> ProjectBuilder {
+    ProjectBuilder::new(paths::home().join(name))
+}
+
+// === Helpers ===
+
+pub fn main_file(println: &str, deps: &[&str]) -> String {
+    let mut buf = String::new();
+
+    for dep in deps.iter() {
+        buf.push_str(&format!("extern crate {};\n", dep));
+    }
+
+    buf.push_str("fn main() { println!(");
+    buf.push_str(&println);
+    buf.push_str("); }\n");
+
+    buf.to_string()
+}
+
+trait ErrMsg<T> {
+    fn with_err_msg(self, val: String) -> Result<T, String>;
+}
+
+impl<T, E: fmt::Display> ErrMsg<T> for Result<T, E> {
+    fn with_err_msg(self, val: String) -> Result<T, String> {
+        match self {
+            Ok(val) => Ok(val),
+            Err(err) => Err(format!("{}; original={}", val, err)),
+        }
+    }
+}
+
+// Path to cargo executables
+pub fn cargo_dir() -> PathBuf {
+    env::var_os("CARGO_BIN_PATH")
+        .map(PathBuf::from)
+        .or_else(|| {
+            env::current_exe().ok().map(|mut path| {
+                path.pop();
+                if path.ends_with("deps") {
+                    path.pop();
+                }
+                path
+            })
+        }).unwrap_or_else(|| panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test"))
+}
+
+pub fn cargo_exe() -> PathBuf {
+    cargo_dir().join(format!("cargo{}", env::consts::EXE_SUFFIX))
+}
+
+/*
+ *
+ * ===== Matchers =====
+ *
+ */
+
+pub type MatchResult = Result<(), String>;
+
+#[must_use]
+#[derive(Clone)]
+pub struct Execs {
+    ran: bool,
+    process_builder: Option<ProcessBuilder>,
+    expect_stdout: Option<String>,
+    expect_stdin: Option<String>,
+    expect_stderr: Option<String>,
+    expect_exit_code: Option<i32>,
+    expect_stdout_contains: Vec<String>,
+    expect_stderr_contains: Vec<String>,
+    expect_either_contains: Vec<String>,
+    expect_stdout_contains_n: Vec<(String, usize)>,
+    expect_stdout_not_contains: Vec<String>,
+    expect_stderr_not_contains: Vec<String>,
+    expect_stderr_unordered: Vec<String>,
+    expect_neither_contains: Vec<String>,
+    expect_json: Option<Vec<Value>>,
+    stream_output: bool,
+}
+
+impl Execs {
+    pub fn with_process_builder(mut self, p: ProcessBuilder) -> Execs {
+        self.process_builder = Some(p);
+        self
+    }
+
+    /// Verify that stdout is equal to the given lines.
+    /// See `lines_match` for supported patterns.
+    pub fn with_stdout<S: ToString>(&mut self, expected: S) -> &mut Self {
+        self.expect_stdout = Some(expected.to_string());
+        self
+    }
+
+    /// Verify that stderr is equal to the given lines.
+    /// See `lines_match` for supported patterns.
+    pub fn with_stderr<S: ToString>(&mut self, expected: S) -> &mut Self {
+        self.expect_stderr = Some(expected.to_string());
+        self
+    }
+
+    /// Verify the exit code from the process.
+    ///
+    /// This is not necessary if the expected exit code is `0`.
+    pub fn with_status(&mut self, expected: i32) -> &mut Self {
+        self.expect_exit_code = Some(expected);
+        self
+    }
+
+    /// Remove exit code check for the process.
+    ///
+    /// By default, the expected exit code is `0`.
+    pub fn without_status(&mut self) -> &mut Self {
+        self.expect_exit_code = None;
+        self
+    }
+
+    /// Verify that stdout contains the given contiguous lines somewhere in
+    /// its output.
+    /// See `lines_match` for supported patterns.
+    pub fn with_stdout_contains<S: ToString>(&mut self, expected: S) -> &mut Self {
+        self.expect_stdout_contains.push(expected.to_string());
+        self
+    }
+
+    /// Verify that stderr contains the given contiguous lines somewhere in
+    /// its output.
+    /// See `lines_match` for supported patterns.
+    pub fn with_stderr_contains<S: ToString>(&mut self, expected: S) -> &mut Self {
+        self.expect_stderr_contains.push(expected.to_string());
+        self
+    }
+
+    /// Verify that either stdout or stderr contains the given contiguous
+    /// lines somewhere in its output.
+    /// See `lines_match` for supported patterns.
+    pub fn with_either_contains<S: ToString>(&mut self, expected: S) -> &mut Self {
+        self.expect_either_contains.push(expected.to_string());
+        self
+    }
+
+    /// Verify that stdout contains the given contiguous lines somewhere in
+    /// its output, and should be repeated `number` times.
+    /// See `lines_match` for supported patterns.
+    pub fn with_stdout_contains_n<S: ToString>(&mut self, expected: S, number: usize) -> &mut Self {
+        self.expect_stdout_contains_n
+            .push((expected.to_string(), number));
+        self
+    }
+
+    /// Verify that stdout does not contain the given contiguous lines.
+    /// See `lines_match` for supported patterns.
+    /// See note on `with_stderr_does_not_contain`.
+    pub fn with_stdout_does_not_contain<S: ToString>(&mut self, expected: S) -> &mut Self {
+        self.expect_stdout_not_contains.push(expected.to_string());
+        self
+    }
+
+    /// Verify that stderr does not contain the given contiguous lines.
+    /// See `lines_match` for supported patterns.
+    ///
+    /// Care should be taken when using this method because there is a
+    /// limitless number of possible things that *won't* appear.  A typo means
+    /// your test will pass without verifying the correct behavior. If
+    /// possible, write the test first so that it fails, and then implement
+    /// your fix/feature to make it pass.
+    pub fn with_stderr_does_not_contain<S: ToString>(&mut self, expected: S) -> &mut Self {
+        self.expect_stderr_not_contains.push(expected.to_string());
+        self
+    }
+
+    /// Verify that all of the stderr output is equal to the given lines,
+    /// ignoring the order of the lines.
+    /// See `lines_match` for supported patterns.
+    /// This is useful when checking the output of `cargo build -v` since
+    /// the order of the output is not always deterministic.
+    /// Recommend use `with_stderr_contains` instead unless you really want to
+    /// check *every* line of output.
+    ///
+    /// Be careful when using patterns such as `[..]`, because you may end up
+    /// with multiple lines that might match, and this is not smart enough to
+    /// do anything like longest-match.  For example, avoid something like:
+    ///     [RUNNING] `rustc [..]
+    ///     [RUNNING] `rustc --crate-name foo [..]
+    /// This will randomly fail if the other crate name is `bar`, and the
+    /// order changes.
+    pub fn with_stderr_unordered<S: ToString>(&mut self, expected: S) -> &mut Self {
+        self.expect_stderr_unordered.push(expected.to_string());
+        self
+    }
+
+    /// Verify the JSON output matches the given JSON.
+    /// Typically used when testing cargo commands that emit JSON.
+    /// Each separate JSON object should be separated by a blank line.
+    /// Example:
+    ///     assert_that(
+    ///         p.cargo("metadata"),
+    ///         execs().with_json(r#"
+    ///             {"example": "abc"}
+    ///
+    ///             {"example": "def"}
+    ///         "#)
+    ///      );
+    /// Objects should match in the order given.
+    /// The order of arrays is ignored.
+    /// Strings support patterns described in `lines_match`.
+    /// Use `{...}` to match any object.
+    pub fn with_json(&mut self, expected: &str) -> &mut Self {
+        self.expect_json = Some(
+            expected
+                .split("\n\n")
+                .map(|obj| obj.parse().unwrap())
+                .collect(),
+        );
+        self
+    }
+
+    /// Forward subordinate process stdout/stderr to the terminal.
+    /// Useful for printf debugging of the tests.
+    /// CAUTION: CI will fail if you leave this in your test!
+    #[allow(unused)]
+    pub fn stream(&mut self) -> &mut Self {
+        self.stream_output = true;
+        self
+    }
+
+    pub fn arg<T: AsRef<OsStr>>(&mut self, arg: T) -> &mut Self {
+        if let Some(ref mut p) = self.process_builder {
+            p.arg(arg);
+        }
+        self
+    }
+
+    pub fn cwd<T: AsRef<OsStr>>(&mut self, path: T) -> &mut Self {
+        if let Some(ref mut p) = self.process_builder {
+            p.cwd(path);
+        }
+        self
+    }
+
+    pub fn env<T: AsRef<OsStr>>(&mut self, key: &str, val: T) -> &mut Self {
+        if let Some(ref mut p) = self.process_builder {
+            p.env(key, val);
+        }
+        self
+    }
+
+    pub fn env_remove(&mut self, key: &str) -> &mut Self {
+        if let Some(ref mut p) = self.process_builder {
+            p.env_remove(key);
+        }
+        self
+    }
+
+    pub fn exec_with_output(&mut self) -> CargoResult<Output> {
+        self.ran = true;
+        // TODO avoid unwrap
+        let p = (&self.process_builder).clone().unwrap();
+        p.exec_with_output()
+    }
+
+    pub fn build_command(&mut self) -> Command {
+        self.ran = true;
+        // TODO avoid unwrap
+        let p = (&self.process_builder).clone().unwrap();
+        p.build_command()
+    }
+
+    pub fn masquerade_as_nightly_cargo(&mut self) -> &mut Self {
+        if let Some(ref mut p) = self.process_builder {
+            p.masquerade_as_nightly_cargo();
+        }
+        self
+    }
+
+    pub fn run(&mut self) {
+        self.ran = true;
+        let p = (&self.process_builder).clone().unwrap();
+        if let Err(e) = self.match_process(&p) {
+            panic!("\nExpected: {:?}\n    but: {}", self, e)
+        }
+    }
+
+    pub fn run_output(&mut self, output: &Output) {
+        self.ran = true;
+        if let Err(e) = self.match_output(output) {
+            panic!("\nExpected: {:?}\n    but: {}", self, e)
+        }
+    }
+
+    fn match_process(&self, process: &ProcessBuilder) -> MatchResult {
+        println!("running {}", process);
+        let res = if self.stream_output {
+            if env::var("CI").is_ok() {
+                panic!("`.stream()` is for local debugging")
+            }
+            process.exec_with_streaming(
+                &mut |out| Ok(println!("{}", out)),
+                &mut |err| Ok(eprintln!("{}", err)),
+                false,
+            )
+        } else {
+            process.exec_with_output()
+        };
+
+        match res {
+            Ok(out) => self.match_output(&out),
+            Err(e) => {
+                let err = e.downcast_ref::<ProcessError>();
+                if let Some(&ProcessError {
+                    output: Some(ref out),
+                    ..
+                }) = err
+                {
+                    return self.match_output(out);
+                }
+                let mut s = format!("could not exec process {}: {}", process, e);
+                for cause in e.iter_causes() {
+                    s.push_str(&format!("\ncaused by: {}", cause));
+                }
+                Err(s)
+            }
+        }
+    }
+
+    fn match_output(&self, actual: &Output) -> MatchResult {
+        self.match_status(actual)
+            .and(self.match_stdout(actual))
+            .and(self.match_stderr(actual))
+    }
+
+    fn match_status(&self, actual: &Output) -> MatchResult {
+        match self.expect_exit_code {
+            None => Ok(()),
+            Some(code) if actual.status.code() == Some(code) => Ok(()),
+            Some(_) => Err(format!(
+                "exited with {}\n--- stdout\n{}\n--- stderr\n{}",
+                actual.status,
+                String::from_utf8_lossy(&actual.stdout),
+                String::from_utf8_lossy(&actual.stderr)
+            )),
+        }
+    }
+
+    fn match_stdout(&self, actual: &Output) -> MatchResult {
+        self.match_std(
+            self.expect_stdout.as_ref(),
+            &actual.stdout,
+            "stdout",
+            &actual.stderr,
+            MatchKind::Exact,
+        )?;
+        for expect in self.expect_stdout_contains.iter() {
+            self.match_std(
+                Some(expect),
+                &actual.stdout,
+                "stdout",
+                &actual.stderr,
+                MatchKind::Partial,
+            )?;
+        }
+        for expect in self.expect_stderr_contains.iter() {
+            self.match_std(
+                Some(expect),
+                &actual.stderr,
+                "stderr",
+                &actual.stdout,
+                MatchKind::Partial,
+            )?;
+        }
+        for &(ref expect, number) in self.expect_stdout_contains_n.iter() {
+            self.match_std(
+                Some(&expect),
+                &actual.stdout,
+                "stdout",
+                &actual.stderr,
+                MatchKind::PartialN(number),
+            )?;
+        }
+        for expect in self.expect_stdout_not_contains.iter() {
+            self.match_std(
+                Some(expect),
+                &actual.stdout,
+                "stdout",
+                &actual.stderr,
+                MatchKind::NotPresent,
+            )?;
+        }
+        for expect in self.expect_stderr_not_contains.iter() {
+            self.match_std(
+                Some(expect),
+                &actual.stderr,
+                "stderr",
+                &actual.stdout,
+                MatchKind::NotPresent,
+            )?;
+        }
+        for expect in self.expect_stderr_unordered.iter() {
+            self.match_std(
+                Some(expect),
+                &actual.stderr,
+                "stderr",
+                &actual.stdout,
+                MatchKind::Unordered,
+            )?;
+        }
+        for expect in self.expect_neither_contains.iter() {
+            self.match_std(
+                Some(expect),
+                &actual.stdout,
+                "stdout",
+                &actual.stdout,
+                MatchKind::NotPresent,
+            )?;
+
+            self.match_std(
+                Some(expect),
+                &actual.stderr,
+                "stderr",
+                &actual.stderr,
+                MatchKind::NotPresent,
+            )?;
+        }
+
+        for expect in self.expect_either_contains.iter() {
+            let match_std = self.match_std(
+                Some(expect),
+                &actual.stdout,
+                "stdout",
+                &actual.stdout,
+                MatchKind::Partial,
+            );
+            let match_err = self.match_std(
+                Some(expect),
+                &actual.stderr,
+                "stderr",
+                &actual.stderr,
+                MatchKind::Partial,
+            );
+
+            if let (Err(_), Err(_)) = (match_std, match_err) {
+                Err(format!(
+                    "expected to find:\n\
+                     {}\n\n\
+                     did not find in either output.",
+                    expect
+                ))?;
+            }
+        }
+
+        if let Some(ref objects) = self.expect_json {
+            let stdout = str::from_utf8(&actual.stdout)
+                .map_err(|_| "stdout was not utf8 encoded".to_owned())?;
+            let lines = stdout
+                .lines()
+                .filter(|line| line.starts_with('{'))
+                .collect::<Vec<_>>();
+            if lines.len() != objects.len() {
+                return Err(format!(
+                    "expected {} json lines, got {}, stdout:\n{}",
+                    objects.len(),
+                    lines.len(),
+                    stdout
+                ));
+            }
+            for (obj, line) in objects.iter().zip(lines) {
+                self.match_json(obj, line)?;
+            }
+        }
+        Ok(())
+    }
+
+    fn match_stderr(&self, actual: &Output) -> MatchResult {
+        self.match_std(
+            self.expect_stderr.as_ref(),
+            &actual.stderr,
+            "stderr",
+            &actual.stdout,
+            MatchKind::Exact,
+        )
+    }
+
+    fn match_std(
+        &self,
+        expected: Option<&String>,
+        actual: &[u8],
+        description: &str,
+        extra: &[u8],
+        kind: MatchKind,
+    ) -> MatchResult {
+        let out = match expected {
+            Some(out) => {
+                // Do the template replacements on the expected string.
+                let replaced = match self.process_builder {
+                    None => out.to_string(),
+                    Some(ref p) => match p.get_cwd() {
+                        None => out.to_string(),
+                        Some(cwd) => out
+                            .replace( "[CWD]", &cwd.display().to_string())
+                        ,
+                    },
+                };
+
+                // On Windows, we need to use a wildcard for the drive,
+                // because we don't actually know what it will be.
+                let replaced = replaced
+                    .replace("[ROOT]",
+                             if cfg!(windows) { r#"[..]:\"# } else { "/" });
+
+                replaced
+            },
+            None => return Ok(()),
+        };
+
+        let actual = match str::from_utf8(actual) {
+            Err(..) => return Err(format!("{} was not utf8 encoded", description)),
+            Ok(actual) => actual,
+        };
+        // Let's not deal with \r\n vs \n on windows...
+        let actual = actual.replace("\r", "");
+        let actual = actual.replace("\t", "<tab>");
+
+        match kind {
+            MatchKind::Exact => {
+                let a = actual.lines();
+                let e = out.lines();
+
+                let diffs = self.diff_lines(a, e, false);
+                if diffs.is_empty() {
+                    Ok(())
+                } else {
+                    Err(format!(
+                        "differences:\n\
+                         {}\n\n\
+                         other output:\n\
+                         `{}`",
+                        diffs.join("\n"),
+                        String::from_utf8_lossy(extra)
+                    ))
+                }
+            }
+            MatchKind::Partial => {
+                let mut a = actual.lines();
+                let e = out.lines();
+
+                let mut diffs = self.diff_lines(a.clone(), e.clone(), true);
+                while let Some(..) = a.next() {
+                    let a = self.diff_lines(a.clone(), e.clone(), true);
+                    if a.len() < diffs.len() {
+                        diffs = a;
+                    }
+                }
+                if diffs.is_empty() {
+                    Ok(())
+                } else {
+                    Err(format!(
+                        "expected to find:\n\
+                         {}\n\n\
+                         did not find in output:\n\
+                         {}",
+                        out, actual
+                    ))
+                }
+            }
+            MatchKind::PartialN(number) => {
+                let mut a = actual.lines();
+                let e = out.lines();
+
+                let mut matches = 0;
+
+                while let Some(..) = {
+                    if self.diff_lines(a.clone(), e.clone(), true).is_empty() {
+                        matches += 1;
+                    }
+                    a.next()
+                } {}
+
+                if matches == number {
+                    Ok(())
+                } else {
+                    Err(format!(
+                        "expected to find {} occurrences:\n\
+                         {}\n\n\
+                         did not find in output:\n\
+                         {}",
+                        number, out, actual
+                    ))
+                }
+            }
+            MatchKind::NotPresent => {
+                let mut a = actual.lines();
+                let e = out.lines();
+
+                let mut diffs = self.diff_lines(a.clone(), e.clone(), true);
+                while let Some(..) = a.next() {
+                    let a = self.diff_lines(a.clone(), e.clone(), true);
+                    if a.len() < diffs.len() {
+                        diffs = a;
+                    }
+                }
+                if diffs.is_empty() {
+                    Err(format!(
+                        "expected not to find:\n\
+                         {}\n\n\
+                         but found in output:\n\
+                         {}",
+                        out, actual
+                    ))
+                } else {
+                    Ok(())
+                }
+            }
+            MatchKind::Unordered => {
+                let mut a = actual.lines().collect::<Vec<_>>();
+                let e = out.lines();
+
+                for e_line in e {
+                    match a.iter().position(|a_line| lines_match(e_line, a_line)) {
+                        Some(index) => a.remove(index),
+                        None => {
+                            return Err(format!(
+                                "Did not find expected line:\n\
+                                 {}\n\
+                                 Remaining available output:\n\
+                                 {}\n",
+                                e_line,
+                                a.join("\n")
+                            ))
+                        }
+                    };
+                }
+                if !a.is_empty() {
+                    Err(format!(
+                        "Output included extra lines:\n\
+                         {}\n",
+                        a.join("\n")
+                    ))
+                } else {
+                    Ok(())
+                }
+            }
+        }
+    }
+
+    fn match_json(&self, expected: &Value, line: &str) -> MatchResult {
+        let actual = match line.parse() {
+            Err(e) => return Err(format!("invalid json, {}:\n`{}`", e, line)),
+            Ok(actual) => actual,
+        };
+
+        match find_mismatch(expected, &actual) {
+            Some((expected_part, actual_part)) => Err(format!(
+                "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n",
+                serde_json::to_string_pretty(expected).unwrap(),
+                serde_json::to_string_pretty(&actual).unwrap(),
+                serde_json::to_string_pretty(expected_part).unwrap(),
+                serde_json::to_string_pretty(actual_part).unwrap(),
+            )),
+            None => Ok(()),
+        }
+    }
+
+    fn diff_lines<'a>(
+        &self,
+        actual: str::Lines<'a>,
+        expected: str::Lines<'a>,
+        partial: bool,
+    ) -> Vec<String> {
+        let actual = actual.take(if partial {
+            expected.clone().count()
+        } else {
+            usize::MAX
+        });
+        zip_all(actual, expected)
+            .enumerate()
+            .filter_map(|(i, (a, e))| match (a, e) {
+                (Some(a), Some(e)) => {
+                    if lines_match(&e, &a) {
+                        None
+                    } else {
+                        Some(format!("{:3} - |{}|\n    + |{}|\n", i, e, a))
+                    }
+                }
+                (Some(a), None) => Some(format!("{:3} -\n    + |{}|\n", i, a)),
+                (None, Some(e)) => Some(format!("{:3} - |{}|\n    +\n", i, e)),
+                (None, None) => panic!("Cannot get here"),
+            }).collect()
+    }
+}
+
+impl Drop for Execs {
+    fn drop(&mut self) {
+        if !self.ran {
+            panic!("forgot to run this command");
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+enum MatchKind {
+    Exact,
+    Partial,
+    PartialN(usize),
+    NotPresent,
+    Unordered,
+}
+
+/// Compare a line with an expected pattern.
+/// - Use `[..]` as a wildcard to match 0 or more characters on the same line
+///   (similar to `.*` in a regex).
+/// - Use `[EXE]` to optionally add `.exe` on Windows (empty string on other
+///   platforms).
+/// - There is a wide range of macros (such as `[COMPILING]` or `[WARNING]`)
+///   to match cargo's "status" output and allows you to ignore the alignment.
+///   See `substitute_macros` for a complete list of macros.
+pub fn lines_match(expected: &str, actual: &str) -> bool {
+    // Let's not deal with / vs \ (windows...)
+    let expected = expected.replace("\\", "/");
+    let mut actual: &str = &actual.replace("\\", "/");
+    let expected = substitute_macros(&expected);
+    for (i, part) in expected.split("[..]").enumerate() {
+        match actual.find(part) {
+            Some(j) => {
+                if i == 0 && j != 0 {
+                    return false;
+                }
+                actual = &actual[j + part.len()..];
+            }
+            None => return false,
+        }
+    }
+    actual.is_empty() || expected.ends_with("[..]")
+}
+
+#[test]
+fn lines_match_works() {
+    assert!(lines_match("a b", "a b"));
+    assert!(lines_match("a[..]b", "a b"));
+    assert!(lines_match("a[..]", "a b"));
+    assert!(lines_match("[..]", "a b"));
+    assert!(lines_match("[..]b", "a b"));
+
+    assert!(!lines_match("[..]b", "c"));
+    assert!(!lines_match("b", "c"));
+    assert!(!lines_match("b", "cb"));
+}
+
+// Compares JSON object for approximate equality.
+// You can use `[..]` wildcard in strings (useful for OS dependent things such
+// as paths).  You can use a `"{...}"` string literal as a wildcard for
+// arbitrary nested JSON (useful for parts of object emitted by other programs
+// (e.g. rustc) rather than Cargo itself).  Arrays are sorted before comparison.
+fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> {
+    use serde_json::Value::*;
+    match (expected, actual) {
+        (&Number(ref l), &Number(ref r)) if l == r => None,
+        (&Bool(l), &Bool(r)) if l == r => None,
+        (&String(ref l), &String(ref r)) if lines_match(l, r) => None,
+        (&Array(ref l), &Array(ref r)) => {
+            if l.len() != r.len() {
+                return Some((expected, actual));
+            }
+
+            let mut l = l.iter().collect::<Vec<_>>();
+            let mut r = r.iter().collect::<Vec<_>>();
+
+            l.retain(
+                |l| match r.iter().position(|r| find_mismatch(l, r).is_none()) {
+                    Some(i) => {
+                        r.remove(i);
+                        false
+                    }
+                    None => true,
+                },
+            );
+
+            if !l.is_empty() {
+                assert!(!r.is_empty());
+                Some((&l[0], &r[0]))
+            } else {
+                assert_eq!(r.len(), 0);
+                None
+            }
+        }
+        (&Object(ref l), &Object(ref r)) => {
+            let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k));
+            if !same_keys {
+                return Some((expected, actual));
+            }
+
+            l.values()
+                .zip(r.values())
+                .filter_map(|(l, r)| find_mismatch(l, r))
+                .nth(0)
+        }
+        (&Null, &Null) => None,
+        // magic string literal "{...}" acts as wildcard for any sub-JSON
+        (&String(ref l), _) if l == "{...}" => None,
+        _ => Some((expected, actual)),
+    }
+}
+
+struct ZipAll<I1: Iterator, I2: Iterator> {
+    first: I1,
+    second: I2,
+}
+
+impl<T, I1: Iterator<Item = T>, I2: Iterator<Item = T>> Iterator for ZipAll<I1, I2> {
+    type Item = (Option<T>, Option<T>);
+    fn next(&mut self) -> Option<(Option<T>, Option<T>)> {
+        let first = self.first.next();
+        let second = self.second.next();
+
+        match (first, second) {
+            (None, None) => None,
+            (a, b) => Some((a, b)),
+        }
+    }
+}
+
+fn zip_all<T, I1: Iterator<Item = T>, I2: Iterator<Item = T>>(a: I1, b: I2) -> ZipAll<I1, I2> {
+    ZipAll {
+        first: a,
+        second: b,
+    }
+}
+
+impl fmt::Debug for Execs {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "execs")
+    }
+}
+
+pub fn execs() -> Execs {
+    Execs {
+        ran: false,
+        process_builder: None,
+        expect_stdout: None,
+        expect_stderr: None,
+        expect_stdin: None,
+        expect_exit_code: Some(0),
+        expect_stdout_contains: Vec::new(),
+        expect_stderr_contains: Vec::new(),
+        expect_either_contains: Vec::new(),
+        expect_stdout_contains_n: Vec::new(),
+        expect_stdout_not_contains: Vec::new(),
+        expect_stderr_not_contains: Vec::new(),
+        expect_stderr_unordered: Vec::new(),
+        expect_neither_contains: Vec::new(),
+        expect_json: None,
+        stream_output: false,
+    }
+}
+
+pub trait Tap {
+    fn tap<F: FnOnce(&mut Self)>(self, callback: F) -> Self;
+}
+
+impl<T> Tap for T {
+    fn tap<F: FnOnce(&mut Self)>(mut self, callback: F) -> T {
+        callback(&mut self);
+        self
+    }
+}
+
+pub fn basic_manifest(name: &str, version: &str) -> String {
+    format!(
+        r#"
+        [package]
+        name = "{}"
+        version = "{}"
+        authors = []
+    "#,
+        name, version
+    )
+}
+
+pub fn basic_bin_manifest(name: &str) -> String {
+    format!(
+        r#"
+        [package]
+
+        name = "{}"
+        version = "0.5.0"
+        authors = ["wycats@example.com"]
+
+        [[bin]]
+
+        name = "{}"
+    "#,
+        name, name
+    )
+}
+
+pub fn basic_lib_manifest(name: &str) -> String {
+    format!(
+        r#"
+        [package]
+
+        name = "{}"
+        version = "0.5.0"
+        authors = ["wycats@example.com"]
+
+        [lib]
+
+        name = "{}"
+    "#,
+        name, name
+    )
+}
+
+pub fn path2url<P: AsRef<Path>>(p: P) -> Url {
+    Url::from_file_path(p).ok().unwrap()
+}
+
+fn substitute_macros(input: &str) -> String {
+    let macros = [
+        ("[RUNNING]", "     Running"),
+        ("[COMPILING]", "   Compiling"),
+        ("[CHECKING]", "    Checking"),
+        ("[CREATED]", "     Created"),
+        ("[FINISHED]", "    Finished"),
+        ("[ERROR]", "error:"),
+        ("[WARNING]", "warning:"),
+        ("[DOCUMENTING]", " Documenting"),
+        ("[FRESH]", "       Fresh"),
+        ("[UPDATING]", "    Updating"),
+        ("[ADDING]", "      Adding"),
+        ("[REMOVING]", "    Removing"),
+        ("[DOCTEST]", "   Doc-tests"),
+        ("[PACKAGING]", "   Packaging"),
+        ("[DOWNLOADING]", " Downloading"),
+        ("[DOWNLOADED]", "  Downloaded"),
+        ("[UPLOADING]", "   Uploading"),
+        ("[VERIFYING]", "   Verifying"),
+        ("[ARCHIVING]", "   Archiving"),
+        ("[INSTALLING]", "  Installing"),
+        ("[REPLACING]", "   Replacing"),
+        ("[UNPACKING]", "   Unpacking"),
+        ("[SUMMARY]", "     Summary"),
+        ("[FIXING]", "      Fixing"),
+        ("[EXE]", if cfg!(windows) { ".exe" } else { "" }),
+    ];
+    let mut result = input.to_owned();
+    for &(pat, subst) in &macros {
+        result = result.replace(pat, subst)
+    }
+    result
+}
+
+pub mod install;
+
+thread_local!(
+pub static RUSTC: Rustc = Rustc::new(
+    PathBuf::from("rustc"),
+    None,
+    Path::new("should be path to rustup rustc, but we don't care in tests"),
+    None,
+).unwrap()
+);
+
+/// The rustc host such as `x86_64-unknown-linux-gnu`.
+pub fn rustc_host() -> String {
+    RUSTC.with(|r| r.host.clone())
+}
+
+pub fn is_nightly() -> bool {
+    RUSTC.with(|r| r.verbose_version.contains("-nightly") || r.verbose_version.contains("-dev"))
+}
+
+pub fn process<T: AsRef<OsStr>>(t: T) -> cargo::util::ProcessBuilder {
+    _process(t.as_ref())
+}
+
+fn _process(t: &OsStr) -> cargo::util::ProcessBuilder {
+    let mut p = cargo::util::process(t);
+    p.cwd(&paths::root())
+     .env_remove("CARGO_HOME")
+     .env("HOME", paths::home())
+     .env("CARGO_HOME", paths::home().join(".cargo"))
+     .env("__CARGO_TEST_ROOT", paths::root())
+
+     // Force cargo to think it's on the stable channel for all tests, this
+     // should hopefully not surprise us as we add cargo features over time and
+     // cargo rides the trains.
+     .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "stable")
+
+     // For now disable incremental by default as support hasn't ridden to the
+     // stable channel yet. Once incremental support hits the stable compiler we
+     // can switch this to one and then fix the tests.
+     .env("CARGO_INCREMENTAL", "0")
+
+     // This env var can switch the git backend from libgit2 to git2-curl, which
+     // can tweak error messages and cause some tests to fail, so let's forcibly
+     // remove it.
+     .env_remove("CARGO_HTTP_CHECK_REVOKE")
+
+     .env_remove("__CARGO_DEFAULT_LIB_METADATA")
+     .env_remove("RUSTC")
+     .env_remove("RUSTDOC")
+     .env_remove("RUSTC_WRAPPER")
+     .env_remove("RUSTFLAGS")
+     .env_remove("XDG_CONFIG_HOME")      // see #2345
+     .env("GIT_CONFIG_NOSYSTEM", "1")    // keep trying to sandbox ourselves
+     .env_remove("EMAIL")
+     .env_remove("MFLAGS")
+     .env_remove("MAKEFLAGS")
+     .env_remove("CARGO_MAKEFLAGS")
+     .env_remove("GIT_AUTHOR_NAME")
+     .env_remove("GIT_AUTHOR_EMAIL")
+     .env_remove("GIT_COMMITTER_NAME")
+     .env_remove("GIT_COMMITTER_EMAIL")
+     .env_remove("CARGO_TARGET_DIR")     // we assume 'target'
+     .env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows
+    p
+}
+
+pub trait ChannelChanger: Sized {
+    fn masquerade_as_nightly_cargo(&mut self) -> &mut Self;
+}
+
+impl ChannelChanger for cargo::util::ProcessBuilder {
+    fn masquerade_as_nightly_cargo(&mut self) -> &mut Self {
+        self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly")
+    }
+}
+
+fn split_and_add_args(p: &mut ProcessBuilder, s: &str) {
+    for arg in s.split_whitespace() {
+        if arg.contains('"') || arg.contains('\'') {
+            panic!("shell-style argument parsing is not supported")
+        }
+        p.arg(arg);
+    }
+}
+
+pub fn cargo_process(s: &str) -> Execs {
+    let mut p = process(&cargo_exe());
+    split_and_add_args(&mut p, s);
+    execs().with_process_builder(p)
+}
+
+pub fn git_process(s: &str) -> ProcessBuilder {
+    let mut p = process("git");
+    split_and_add_args(&mut p, s);
+    p
+}
+
+pub fn sleep_ms(ms: u64) {
+    ::std::thread::sleep(Duration::from_millis(ms));
+}
diff --git a/tests/testsuite/support/paths.rs b/tests/testsuite/support/paths.rs
new file mode 100644 (file)
index 0000000..a5544ba
--- /dev/null
@@ -0,0 +1,165 @@
+use std::cell::Cell;
+use std::env;
+use std::fs;
+use std::io::{self, ErrorKind};
+use std::path::{Path, PathBuf};
+use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
+use std::sync::{Once, ONCE_INIT};
+
+use filetime::{self, FileTime};
+
+static CARGO_INTEGRATION_TEST_DIR: &'static str = "cit";
+static NEXT_ID: AtomicUsize = ATOMIC_USIZE_INIT;
+
+thread_local!(static TASK_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst));
+
+fn init() {
+    static GLOBAL_INIT: Once = ONCE_INIT;
+    thread_local!(static LOCAL_INIT: Cell<bool> = Cell::new(false));
+    GLOBAL_INIT.call_once(|| {
+        global_root().mkdir_p();
+    });
+    LOCAL_INIT.with(|i| {
+        if i.get() {
+            return;
+        }
+        i.set(true);
+        root().rm_rf();
+        home().mkdir_p();
+    })
+}
+
+fn global_root() -> PathBuf {
+    let mut path = t!(env::current_exe());
+    path.pop(); // chop off exe name
+    path.pop(); // chop off 'debug'
+
+    // If `cargo test` is run manually then our path looks like
+    // `target/debug/foo`, in which case our `path` is already pointing at
+    // `target`. If, however, `cargo test --target $target` is used then the
+    // output is `target/$target/debug/foo`, so our path is pointing at
+    // `target/$target`. Here we conditionally pop the `$target` name.
+    if path.file_name().and_then(|s| s.to_str()) != Some("target") {
+        path.pop();
+    }
+
+    path.join(CARGO_INTEGRATION_TEST_DIR)
+}
+
+pub fn root() -> PathBuf {
+    init();
+    global_root().join(&TASK_ID.with(|my_id| format!("t{}", my_id)))
+}
+
+pub fn home() -> PathBuf {
+    root().join("home")
+}
+
+pub trait CargoPathExt {
+    fn rm_rf(&self);
+    fn mkdir_p(&self);
+
+    fn move_into_the_past(&self) {
+        self.move_in_time(|sec, nsec| (sec - 3600, nsec))
+    }
+
+    fn move_into_the_future(&self) {
+        self.move_in_time(|sec, nsec| (sec + 3600, nsec))
+    }
+
+    fn move_in_time<F>(&self, travel_amount: F)
+    where
+        F: Fn(i64, u32) -> (i64, u32);
+}
+
+impl CargoPathExt for Path {
+    /* Technically there is a potential race condition, but we don't
+     * care all that much for our tests
+     */
+    fn rm_rf(&self) {
+        if !self.exists() {
+            return;
+        }
+
+        for file in t!(fs::read_dir(self)) {
+            let file = t!(file);
+            if file.file_type().map(|m| m.is_dir()).unwrap_or(false) {
+                file.path().rm_rf();
+            } else {
+                // On windows we can't remove a readonly file, and git will
+                // often clone files as readonly. As a result, we have some
+                // special logic to remove readonly files on windows.
+                do_op(&file.path(), "remove file", |p| fs::remove_file(p));
+            }
+        }
+        do_op(self, "remove dir", |p| fs::remove_dir(p));
+    }
+
+    fn mkdir_p(&self) {
+        fs::create_dir_all(self)
+            .unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e))
+    }
+
+    fn move_in_time<F>(&self, travel_amount: F)
+    where
+        F: Fn(i64, u32) -> ((i64, u32)),
+    {
+        if self.is_file() {
+            time_travel(self, &travel_amount);
+        } else {
+            recurse(self, &self.join("target"), &travel_amount);
+        }
+
+        fn recurse<F>(p: &Path, bad: &Path, travel_amount: &F)
+        where
+            F: Fn(i64, u32) -> ((i64, u32)),
+        {
+            if p.is_file() {
+                time_travel(p, travel_amount)
+            } else if !p.starts_with(bad) {
+                for f in t!(fs::read_dir(p)) {
+                    let f = t!(f).path();
+                    recurse(&f, bad, travel_amount);
+                }
+            }
+        }
+
+        fn time_travel<F>(path: &Path, travel_amount: &F)
+        where
+            F: Fn(i64, u32) -> ((i64, u32)),
+        {
+            let stat = t!(path.metadata());
+
+            let mtime = FileTime::from_last_modification_time(&stat);
+
+            let (sec, nsec) = travel_amount(mtime.unix_seconds(), mtime.nanoseconds());
+            let newtime = FileTime::from_unix_time(sec, nsec);
+
+            // Sadly change_file_times has a failure mode where a readonly file
+            // cannot have its times changed on windows.
+            do_op(path, "set file times", |path| {
+                filetime::set_file_times(path, newtime, newtime)
+            });
+        }
+    }
+}
+
+fn do_op<F>(path: &Path, desc: &str, mut f: F)
+where
+    F: FnMut(&Path) -> io::Result<()>,
+{
+    match f(path) {
+        Ok(()) => {}
+        Err(ref e) if cfg!(windows) && e.kind() == ErrorKind::PermissionDenied => {
+            let mut p = t!(path.metadata()).permissions();
+            p.set_readonly(false);
+            t!(fs::set_permissions(path, p));
+            f(path).unwrap_or_else(|e| {
+                panic!("failed to {} {}: {}", desc, path.display(), e);
+            })
+        }
+        Err(e) => {
+            panic!("failed to {} {}: {}", desc, path.display(), e);
+        }
+    }
+}
diff --git a/tests/testsuite/support/publish.rs b/tests/testsuite/support/publish.rs
new file mode 100644 (file)
index 0000000..e155669
--- /dev/null
@@ -0,0 +1,61 @@
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::PathBuf;
+
+use support::git::{repo, Repository};
+use support::paths;
+
+use url::Url;
+
+pub fn setup() -> Repository {
+    let config = paths::root().join(".cargo/config");
+    t!(fs::create_dir_all(config.parent().unwrap()));
+    t!(t!(File::create(&config)).write_all(
+        format!(
+            r#"
+        [registry]
+        token = "api-token"
+
+        [registries.alternative]
+        index = "{registry}"
+    "#,
+            registry = registry().to_string()
+        ).as_bytes()
+    ));
+
+    let credentials = paths::root().join("home/.cargo/credentials");
+    t!(fs::create_dir_all(credentials.parent().unwrap()));
+    t!(t!(File::create(&credentials)).write_all(
+        br#"
+        [registries.alternative]
+        token = "api-token"
+    "#
+    ));
+
+    t!(fs::create_dir_all(&upload_path().join("api/v1/crates")));
+
+    repo(&registry_path())
+        .file(
+            "config.json",
+            &format!(
+                r#"{{
+            "dl": "{0}",
+            "api": "{0}"
+        }}"#,
+                upload()
+            ),
+        ).build()
+}
+
+pub fn registry_path() -> PathBuf {
+    paths::root().join("registry")
+}
+pub fn registry() -> Url {
+    Url::from_file_path(&*registry_path()).ok().unwrap()
+}
+pub fn upload_path() -> PathBuf {
+    paths::root().join("upload")
+}
+fn upload() -> Url {
+    Url::from_file_path(&*upload_path()).ok().unwrap()
+}
diff --git a/tests/testsuite/support/registry.rs b/tests/testsuite/support/registry.rs
new file mode 100644 (file)
index 0000000..9fda183
--- /dev/null
@@ -0,0 +1,547 @@
+use std::collections::HashMap;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::{Path, PathBuf};
+
+use cargo::util::Sha256;
+use flate2::write::GzEncoder;
+use flate2::Compression;
+use git2;
+use hex;
+use tar::{Builder, Header};
+use url::Url;
+
+use support::git::repo;
+use support::paths;
+
+pub fn registry_path() -> PathBuf {
+    paths::root().join("registry")
+}
+pub fn registry() -> Url {
+    Url::from_file_path(&*registry_path()).ok().unwrap()
+}
+pub fn api_path() -> PathBuf {
+    paths::root().join("api")
+}
+pub fn dl_path() -> PathBuf {
+    paths::root().join("dl")
+}
+pub fn dl_url() -> Url {
+    Url::from_file_path(&*dl_path()).ok().unwrap()
+}
+pub fn alt_registry_path() -> PathBuf {
+    paths::root().join("alternative-registry")
+}
+pub fn alt_registry() -> Url {
+    Url::from_file_path(&*alt_registry_path()).ok().unwrap()
+}
+pub fn alt_dl_path() -> PathBuf {
+    paths::root().join("alt_dl")
+}
+pub fn alt_dl_url() -> String {
+    let base = Url::from_file_path(&*alt_dl_path()).ok().unwrap();
+    format!("{}/{{crate}}/{{version}}/{{crate}}-{{version}}.crate", base)
+}
+pub fn alt_api_path() -> PathBuf {
+    paths::root().join("alt_api")
+}
+pub fn alt_api_url() -> Url {
+    Url::from_file_path(&*alt_api_path()).ok().unwrap()
+}
+
+/// A builder for creating a new package in a registry.
+///
+/// This uses "source replacement" using an automatically generated
+/// `.cargo/config` file to ensure that dependencies will use these packages
+/// instead of contacting crates.io. See `source-replacement.md` for more
+/// details on how source replacement works.
+///
+/// Call `publish` to finalize and create the package.
+///
+/// If no files are specified, an empty `lib.rs` file is automatically created.
+///
+/// The `Cargo.toml` file is automatically generated based on the methods
+/// called on `Package` (for example, calling `dep()` will add to the
+/// `[dependencies]` automatically). You may also specify a `Cargo.toml` file
+/// to override the generated one.
+///
+/// This supports different registry types:
+/// - Regular source replacement that replaces `crates.io` (the default).
+/// - A "local registry" which is a subset for vendoring (see
+///   `Package::local`).
+/// - An "alternative registry" which requires specifying the registry name
+///   (see `Package::alternative`).
+///
+/// This does not support "directory sources". See `directory.rs` for
+/// `VendorPackage` which implements directory sources.
+///
+/// # Example
+/// ```
+/// // Publish package "a" depending on "b".
+/// Package::new("a", "1.0.0")
+///     .dep("b", "1.0.0")
+///     .file("src/lib.rs", r#"
+///         extern crate b;
+///         pub fn f() -> i32 { b::f() * 2 }
+///     "#)
+///     .publish();
+///
+/// // Publish package "b".
+/// Package::new("b", "1.0.0")
+///     .file("src/lib.rs", r#"
+///         pub fn f() -> i32 { 12 }
+///     "#)
+///     .publish();
+///
+/// // Create a project that uses package "a".
+/// let p = project()
+///     .file("Cargo.toml", r#"
+///         [package]
+///         name = "foo"
+///         version = "0.0.1"
+///
+///         [dependencies]
+///         a = "1.0"
+///     "#)
+///     .file("src/main.rs", r#"
+///         extern crate a;
+///         fn main() { println!("{}", a::f()); }
+///     "#)
+///     .build();
+///
+/// p.cargo("run").with_stdout("24").run();
+/// ```
+pub struct Package {
+    name: String,
+    vers: String,
+    deps: Vec<Dependency>,
+    files: Vec<(String, String)>,
+    extra_files: Vec<(String, String)>,
+    yanked: bool,
+    features: HashMap<String, Vec<String>>,
+    local: bool,
+    alternative: bool,
+}
+
+#[derive(Clone)]
+pub struct Dependency {
+    name: String,
+    vers: String,
+    kind: String,
+    target: Option<String>,
+    features: Vec<String>,
+    registry: Option<String>,
+    package: Option<String>,
+    optional: bool,
+}
+
+pub fn init() {
+    let config = paths::home().join(".cargo/config");
+    t!(fs::create_dir_all(config.parent().unwrap()));
+    if fs::metadata(&config).is_ok() {
+        return;
+    }
+    t!(t!(File::create(&config)).write_all(
+        format!(
+            r#"
+        [registry]
+        token = "api-token"
+
+        [source.crates-io]
+        registry = 'https://wut'
+        replace-with = 'dummy-registry'
+
+        [source.dummy-registry]
+        registry = '{reg}'
+
+        [registries.alternative]
+        index = '{alt}'
+    "#,
+            reg = registry(),
+            alt = alt_registry()
+        ).as_bytes()
+    ));
+
+    // Init a new registry
+    let _ = repo(&registry_path())
+        .file(
+            "config.json",
+            &format!(
+                r#"
+            {{"dl":"{0}","api":"{0}"}}
+        "#,
+                dl_url()
+            ),
+        ).build();
+    fs::create_dir_all(dl_path().join("api/v1/crates")).unwrap();
+
+    // Init an alt registry
+    repo(&alt_registry_path())
+        .file(
+            "config.json",
+            &format!(
+                r#"
+            {{"dl":"{}","api":"{}"}}
+        "#,
+                alt_dl_url(),
+                alt_api_url()
+            ),
+        ).build();
+    fs::create_dir_all(alt_api_path().join("api/v1/crates")).unwrap();
+}
+
+impl Package {
+    /// Create a new package builder.
+    /// Call `publish()` to finalize and build the package.
+    pub fn new(name: &str, vers: &str) -> Package {
+        init();
+        Package {
+            name: name.to_string(),
+            vers: vers.to_string(),
+            deps: Vec::new(),
+            files: Vec::new(),
+            extra_files: Vec::new(),
+            yanked: false,
+            features: HashMap::new(),
+            local: false,
+            alternative: false,
+        }
+    }
+
+    /// Call with `true` to publish in a "local registry".
+    ///
+    /// See `source-replacement.html#local-registry-sources` for more details
+    /// on local registries. See `local_registry.rs` for the tests that use
+    /// this.
+    pub fn local(&mut self, local: bool) -> &mut Package {
+        self.local = local;
+        self
+    }
+
+    /// Call with `true` to publish in an "alternative registry".
+    ///
+    /// The name of the alternative registry is called "alternative".
+    ///
+    /// See `unstable.html#alternate-registries` for more details on
+    /// alternative registries. See `alt_registry.rs` for the tests that use
+    /// this.
+    pub fn alternative(&mut self, alternative: bool) -> &mut Package {
+        self.alternative = alternative;
+        self
+    }
+
+    /// Add a file to the package.
+    pub fn file(&mut self, name: &str, contents: &str) -> &mut Package {
+        self.files.push((name.to_string(), contents.to_string()));
+        self
+    }
+
+    /// Add an "extra" file that is not rooted within the package.
+    ///
+    /// Normal files are automatically placed within a directory named
+    /// `$PACKAGE-$VERSION`. This allows you to override that behavior,
+    /// typically for testing invalid behavior.
+    pub fn extra_file(&mut self, name: &str, contents: &str) -> &mut Package {
+        self.extra_files
+            .push((name.to_string(), contents.to_string()));
+        self
+    }
+
+    /// Add a normal dependency. Example:
+    /// ```
+    /// [dependencies]
+    /// foo = {version = "1.0"}
+    /// ```
+    pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package {
+        self.add_dep(&Dependency::new(name, vers))
+    }
+
+    /// Add a dependency with the given feature. Example:
+    /// ```
+    /// [dependencies]
+    /// foo = {version = "1.0", "features": ["feat1", "feat2"]}
+    /// ```
+    pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package {
+        self.add_dep(Dependency::new(name, vers).enable_features(features))
+    }
+
+    /// Add a platform-specific dependency. Example:
+    /// ```
+    /// [target.'cfg(windows)'.dependencies]
+    /// foo = {version = "1.0"}
+    /// ```
+    pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package {
+        self.add_dep(Dependency::new(name, vers).target(target))
+    }
+
+    /// Add a dependency to an alternative registry.
+    /// The given registry should be a URI to the alternative registry.
+    pub fn registry_dep(&mut self, name: &str, vers: &str, registry: &str) -> &mut Package {
+        self.add_dep(Dependency::new(name, vers).registry(registry))
+    }
+
+    /// Add a dev-dependency. Example:
+    /// ```
+    /// [dev-dependencies]
+    /// foo = {version = "1.0"}
+    /// ```
+    pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package {
+        self.add_dep(Dependency::new(name, vers).dev())
+    }
+
+    /// Add a build-dependency. Example:
+    /// ```
+    /// [build-dependencies]
+    /// foo = {version = "1.0"}
+    /// ```
+    pub fn build_dep(&mut self, name: &str, vers: &str) -> &mut Package {
+        self.add_dep(Dependency::new(name, vers).build())
+    }
+
+    pub fn add_dep(&mut self, dep: &Dependency) -> &mut Package {
+        self.deps.push(dep.clone());
+        self
+    }
+
+    /// Specify whether or not the package is "yanked".
+    pub fn yanked(&mut self, yanked: bool) -> &mut Package {
+        self.yanked = yanked;
+        self
+    }
+
+    /// Add an entry in the `[features]` section
+    pub fn feature(&mut self, name: &str, deps: &[&str]) -> &mut Package {
+        let deps = deps.iter().map(|s| s.to_string()).collect();
+        self.features.insert(name.to_string(), deps);
+        self
+    }
+
+    /// Create the package and place it in the registry.
+    ///
+    /// This does not actually use Cargo's publishing system, but instead
+    /// manually creates the entry in the registry on the filesystem.
+    ///
+    /// Returns the checksum for the package.
+    pub fn publish(&self) -> String {
+        self.make_archive();
+
+        // Figure out what we're going to write into the index
+        let deps = self
+            .deps
+            .iter()
+            .map(|dep| {
+                json!({
+                    "name": dep.name,
+                    "req": dep.vers,
+                    "features": dep.features,
+                    "default_features": true,
+                    "target": dep.target,
+                    "optional": dep.optional,
+                    "kind": dep.kind,
+                    "registry": dep.registry,
+                    "package": dep.package,
+                })
+            }).collect::<Vec<_>>();
+        let cksum = {
+            let mut c = Vec::new();
+            t!(t!(File::open(&self.archive_dst())).read_to_end(&mut c));
+            cksum(&c)
+        };
+        let line = json!({
+            "name": self.name,
+            "vers": self.vers,
+            "deps": deps,
+            "cksum": cksum,
+            "features": self.features,
+            "yanked": self.yanked,
+        }).to_string();
+
+        let file = match self.name.len() {
+            1 => format!("1/{}", self.name),
+            2 => format!("2/{}", self.name),
+            3 => format!("3/{}/{}", &self.name[..1], self.name),
+            _ => format!("{}/{}/{}", &self.name[0..2], &self.name[2..4], self.name),
+        };
+
+        let registry_path = if self.alternative {
+            alt_registry_path()
+        } else {
+            registry_path()
+        };
+
+        // Write file/line in the index
+        let dst = if self.local {
+            registry_path.join("index").join(&file)
+        } else {
+            registry_path.join(&file)
+        };
+        let mut prev = String::new();
+        let _ = File::open(&dst).and_then(|mut f| f.read_to_string(&mut prev));
+        t!(fs::create_dir_all(dst.parent().unwrap()));
+        t!(t!(File::create(&dst)).write_all((prev + &line[..] + "\n").as_bytes()));
+
+        // Add the new file to the index
+        if !self.local {
+            let repo = t!(git2::Repository::open(&registry_path));
+            let mut index = t!(repo.index());
+            t!(index.add_path(Path::new(&file)));
+            t!(index.write());
+            let id = t!(index.write_tree());
+
+            // Commit this change
+            let tree = t!(repo.find_tree(id));
+            let sig = t!(repo.signature());
+            let parent = t!(repo.refname_to_id("refs/heads/master"));
+            let parent = t!(repo.find_commit(parent));
+            t!(repo.commit(
+                Some("HEAD"),
+                &sig,
+                &sig,
+                "Another commit",
+                &tree,
+                &[&parent]
+            ));
+        }
+
+        cksum
+    }
+
+    fn make_archive(&self) {
+        let mut manifest = format!(
+            r#"
+            [package]
+            name = "{}"
+            version = "{}"
+            authors = []
+        "#,
+            self.name, self.vers
+        );
+        for dep in self.deps.iter() {
+            let target = match dep.target {
+                None => String::new(),
+                Some(ref s) => format!("target.'{}'.", s),
+            };
+            let kind = match &dep.kind[..] {
+                "build" => "build-",
+                "dev" => "dev-",
+                _ => "",
+            };
+            manifest.push_str(&format!(
+                r#"
+                [{}{}dependencies.{}]
+                version = "{}"
+            "#,
+                target, kind, dep.name, dep.vers
+            ));
+        }
+
+        let dst = self.archive_dst();
+        t!(fs::create_dir_all(dst.parent().unwrap()));
+        let f = t!(File::create(&dst));
+        let mut a = Builder::new(GzEncoder::new(f, Compression::default()));
+        self.append(&mut a, "Cargo.toml", &manifest);
+        if self.files.is_empty() {
+            self.append(&mut a, "src/lib.rs", "");
+        } else {
+            for &(ref name, ref contents) in self.files.iter() {
+                self.append(&mut a, name, contents);
+            }
+        }
+        for &(ref name, ref contents) in self.extra_files.iter() {
+            self.append_extra(&mut a, name, contents);
+        }
+    }
+
+    fn append<W: Write>(&self, ar: &mut Builder<W>, file: &str, contents: &str) {
+        self.append_extra(
+            ar,
+            &format!("{}-{}/{}", self.name, self.vers, file),
+            contents,
+        );
+    }
+
+    fn append_extra<W: Write>(&self, ar: &mut Builder<W>, path: &str, contents: &str) {
+        let mut header = Header::new_ustar();
+        header.set_size(contents.len() as u64);
+        t!(header.set_path(path));
+        header.set_cksum();
+        t!(ar.append(&header, contents.as_bytes()));
+    }
+
+    /// Returns the path to the compressed package file.
+    pub fn archive_dst(&self) -> PathBuf {
+        if self.local {
+            registry_path().join(format!("{}-{}.crate", self.name, self.vers))
+        } else if self.alternative {
+            alt_dl_path()
+                .join(&self.name)
+                .join(&self.vers)
+                .join(&format!("{}-{}.crate", self.name, self.vers))
+        } else {
+            dl_path().join(&self.name).join(&self.vers).join("download")
+        }
+    }
+}
+
+pub fn cksum(s: &[u8]) -> String {
+    let mut sha = Sha256::new();
+    sha.update(s);
+    hex::encode(&sha.finish())
+}
+
+impl Dependency {
+    pub fn new(name: &str, vers: &str) -> Dependency {
+        Dependency {
+            name: name.to_string(),
+            vers: vers.to_string(),
+            kind: "normal".to_string(),
+            target: None,
+            features: Vec::new(),
+            package: None,
+            optional: false,
+            registry: None,
+        }
+    }
+
+    /// Change this to `[build-dependencies]`
+    pub fn build(&mut self) -> &mut Self {
+        self.kind = "build".to_string();
+        self
+    }
+
+    /// Change this to `[dev-dependencies]`
+    pub fn dev(&mut self) -> &mut Self {
+        self.kind = "dev".to_string();
+        self
+    }
+
+    /// Change this to `[target.$target.dependencies]`
+    pub fn target(&mut self, target: &str) -> &mut Self {
+        self.target = Some(target.to_string());
+        self
+    }
+
+    /// Add `registry = $registry` to this dependency
+    pub fn registry(&mut self, registry: &str) -> &mut Self {
+        self.registry = Some(registry.to_string());
+        self
+    }
+
+    /// Add `features = [ ... ]` to this dependency
+    pub fn enable_features(&mut self, features: &[&str]) -> &mut Self {
+        self.features.extend(features.iter().map(|s| s.to_string()));
+        self
+    }
+
+    /// Add `package = ...` to this dependency
+    pub fn package(&mut self, pkg: &str) -> &mut Self {
+        self.package = Some(pkg.to_string());
+        self
+    }
+
+    /// Change this to an optional dependency
+    pub fn optional(&mut self, optional: bool) -> &mut Self {
+        self.optional = optional;
+        self
+    }
+}
diff --git a/tests/testsuite/support/resolver.rs b/tests/testsuite/support/resolver.rs
new file mode 100644 (file)
index 0000000..9b97daf
--- /dev/null
@@ -0,0 +1,500 @@
+use std::cmp::PartialEq;
+use std::cmp::{max, min};
+use std::collections::{BTreeMap, HashSet};
+use std::fmt;
+use std::time::{Duration, Instant};
+
+use cargo::core::dependency::Kind;
+use cargo::core::resolver::{self, Method};
+use cargo::core::source::{GitReference, SourceId};
+use cargo::core::Resolve;
+use cargo::core::{Dependency, PackageId, Registry, Summary};
+use cargo::util::{CargoResult, Config, ToUrl};
+
+use proptest::collection::{btree_map, vec};
+use proptest::prelude::*;
+use proptest::sample::Index;
+use proptest::strategy::ValueTree;
+use proptest::string::string_regex;
+use proptest::test_runner::TestRunner;
+
+pub fn resolve(
+    pkg: &PackageId,
+    deps: Vec<Dependency>,
+    registry: &[Summary],
+) -> CargoResult<Vec<PackageId>> {
+    resolve_with_config(pkg, deps, registry, None)
+}
+
+pub fn resolve_and_validated(
+    pkg: &PackageId,
+    deps: Vec<Dependency>,
+    registry: &[Summary],
+) -> CargoResult<Vec<PackageId>> {
+    let resolve = resolve_with_config_raw(pkg, deps, registry, None)?;
+    let mut stack = vec![pkg.clone()];
+    let mut used = HashSet::new();
+    let mut links = HashSet::new();
+    while let Some(p) = stack.pop() {
+        assert!(resolve.contains(&p));
+        if used.insert(p.clone()) {
+            // in the tests all `links` crates end in `-sys`
+            if p.name().ends_with("-sys") {
+                assert!(links.insert(p.name()));
+            }
+            stack.extend(resolve.deps(&p).map(|(dp, deps)| {
+                for d in deps {
+                    assert!(d.matches_id(dp));
+                }
+                dp.clone()
+            }));
+        }
+    }
+    let out: Vec<PackageId> = resolve.iter().cloned().collect();
+    assert_eq!(out.len(), used.len());
+    Ok(out)
+}
+
+pub fn resolve_with_config(
+    pkg: &PackageId,
+    deps: Vec<Dependency>,
+    registry: &[Summary],
+    config: Option<&Config>,
+) -> CargoResult<Vec<PackageId>> {
+    let resolve = resolve_with_config_raw(pkg, deps, registry, config)?;
+    let out: Vec<PackageId> = resolve.iter().cloned().collect();
+    Ok(out)
+}
+
+pub fn resolve_with_config_raw(
+    pkg: &PackageId,
+    deps: Vec<Dependency>,
+    registry: &[Summary],
+    config: Option<&Config>,
+) -> CargoResult<Resolve> {
+    struct MyRegistry<'a>(&'a [Summary]);
+    impl<'a> Registry for MyRegistry<'a> {
+        fn query(
+            &mut self,
+            dep: &Dependency,
+            f: &mut FnMut(Summary),
+            fuzzy: bool,
+        ) -> CargoResult<()> {
+            for summary in self.0.iter() {
+                if fuzzy || dep.matches(summary) {
+                    f(summary.clone());
+                }
+            }
+            Ok(())
+        }
+
+        fn describe_source(&self, _src: &SourceId) -> String {
+            String::new()
+        }
+
+        fn is_replaced(&self, _src: &SourceId) -> bool {
+            false
+        }
+    }
+    let mut registry = MyRegistry(registry);
+    let summary = Summary::new(
+        pkg.clone(),
+        deps,
+        &BTreeMap::<String, Vec<String>>::new(),
+        None::<String>,
+        false,
+    )
+    .unwrap();
+    let method = Method::Everything;
+    let start = Instant::now();
+    let resolve = resolver::resolve(
+        &[(summary, method)],
+        &[],
+        &mut registry,
+        &HashSet::new(),
+        config,
+        false,
+    );
+
+    // The largest test in our suite takes less then 30 sec.
+    // So lets fail the test if we have ben running for two long.
+    assert!(start.elapsed() < Duration::from_secs(60));
+    resolve
+}
+
+pub trait ToDep {
+    fn to_dep(self) -> Dependency;
+}
+
+impl ToDep for &'static str {
+    fn to_dep(self) -> Dependency {
+        Dependency::parse_no_deprecated(self, Some("1.0.0"), &registry_loc()).unwrap()
+    }
+}
+
+impl ToDep for Dependency {
+    fn to_dep(self) -> Dependency {
+        self
+    }
+}
+
+pub trait ToPkgId {
+    fn to_pkgid(&self) -> PackageId;
+}
+
+impl ToPkgId for PackageId {
+    fn to_pkgid(&self) -> PackageId {
+        self.clone()
+    }
+}
+
+impl<'a> ToPkgId for &'a str {
+    fn to_pkgid(&self) -> PackageId {
+        PackageId::new(*self, "1.0.0", &registry_loc()).unwrap()
+    }
+}
+
+impl<T: AsRef<str>, U: AsRef<str>> ToPkgId for (T, U) {
+    fn to_pkgid(&self) -> PackageId {
+        let (name, vers) = self;
+        PackageId::new(name.as_ref(), vers.as_ref(), &registry_loc()).unwrap()
+    }
+}
+
+macro_rules! pkg {
+    ($pkgid:expr => [$($deps:expr),+ $(,)* ]) => ({
+        let d: Vec<Dependency> = vec![$($deps.to_dep()),+];
+        pkg_dep($pkgid, d)
+    });
+
+    ($pkgid:expr) => ({
+        pkg($pkgid)
+    })
+}
+
+fn registry_loc() -> SourceId {
+    lazy_static! {
+        static ref EXAMPLE_DOT_COM: SourceId =
+            SourceId::for_registry(&"http://example.com".to_url().unwrap()).unwrap();
+    }
+    EXAMPLE_DOT_COM.clone()
+}
+
+pub fn pkg<T: ToPkgId>(name: T) -> Summary {
+    pkg_dep(name, Vec::new())
+}
+
+pub fn pkg_dep<T: ToPkgId>(name: T, dep: Vec<Dependency>) -> Summary {
+    let pkgid = name.to_pkgid();
+    let link = if pkgid.name().ends_with("-sys") {
+        Some(pkgid.name().as_str())
+    } else {
+        None
+    };
+    Summary::new(
+        name.to_pkgid(),
+        dep,
+        &BTreeMap::<String, Vec<String>>::new(),
+        link,
+        false,
+    )
+    .unwrap()
+}
+
+pub fn pkg_id(name: &str) -> PackageId {
+    PackageId::new(name, "1.0.0", &registry_loc()).unwrap()
+}
+
+fn pkg_id_loc(name: &str, loc: &str) -> PackageId {
+    let remote = loc.to_url();
+    let master = GitReference::Branch("master".to_string());
+    let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap();
+
+    PackageId::new(name, "1.0.0", &source_id).unwrap()
+}
+
+pub fn pkg_loc(name: &str, loc: &str) -> Summary {
+    let link = if name.ends_with("-sys") {
+        Some(name)
+    } else {
+        None
+    };
+    Summary::new(
+        pkg_id_loc(name, loc),
+        Vec::new(),
+        &BTreeMap::<String, Vec<String>>::new(),
+        link,
+        false,
+    )
+    .unwrap()
+}
+
+pub fn dep(name: &str) -> Dependency {
+    dep_req(name, "1.0.0")
+}
+pub fn dep_req(name: &str, req: &str) -> Dependency {
+    Dependency::parse_no_deprecated(name, Some(req), &registry_loc()).unwrap()
+}
+
+pub fn dep_loc(name: &str, location: &str) -> Dependency {
+    let url = location.to_url().unwrap();
+    let master = GitReference::Branch("master".to_string());
+    let source_id = SourceId::for_git(&url, master).unwrap();
+    Dependency::parse_no_deprecated(name, Some("1.0.0"), &source_id).unwrap()
+}
+pub fn dep_kind(name: &str, kind: Kind) -> Dependency {
+    dep(name).set_kind(kind).clone()
+}
+
+pub fn registry(pkgs: Vec<Summary>) -> Vec<Summary> {
+    pkgs
+}
+
+pub fn names<P: ToPkgId>(names: &[P]) -> Vec<PackageId> {
+    names.iter().map(|name| name.to_pkgid()).collect()
+}
+
+pub fn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {
+    names
+        .iter()
+        .map(|&(name, loc)| pkg_id_loc(name, loc))
+        .collect()
+}
+
+/// By default `Summary` and `Dependency` have a very verbose `Debug` representation.
+/// This replaces with a representation that uses constructors from this file.
+///
+/// If `registry_strategy` is improved to modify more fields
+/// then this needs to update to display the corresponding constructor.
+pub struct PrettyPrintRegistry(pub Vec<Summary>);
+
+impl fmt::Debug for PrettyPrintRegistry {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "vec![")?;
+        for s in &self.0 {
+            if s.dependencies().is_empty() {
+                write!(f, "pkg!((\"{}\", \"{}\")),", s.name(), s.version())?;
+            } else {
+                write!(f, "pkg!((\"{}\", \"{}\") => [", s.name(), s.version())?;
+                for d in s.dependencies() {
+                    write!(
+                        f,
+                        "dep_req(\"{}\", \"{}\"),",
+                        d.name_in_toml(),
+                        d.version_req()
+                    )?;
+                }
+                write!(f, "]),")?;
+            }
+        }
+        write!(f, "]")
+    }
+}
+
+#[test]
+fn meta_test_deep_pretty_print_registry() {
+    assert_eq!(
+        &format!(
+            "{:?}",
+            PrettyPrintRegistry(vec![
+                pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]),
+                pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]),
+                pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"),
+                                  dep_req("other", "1")]),
+                pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
+                pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
+                pkg!(("baz", "1.0.1")),
+                pkg!(("dep_req", "1.0.0")),
+                pkg!(("dep_req", "2.0.0")),
+            ])
+        ),
+        "vec![pkg!((\"foo\", \"1.0.1\") => [dep_req(\"bar\", \"^1\"),]),\
+         pkg!((\"foo\", \"1.0.0\") => [dep_req(\"bar\", \"^2\"),]),\
+         pkg!((\"bar\", \"1.0.0\") => [dep_req(\"baz\", \"= 1.0.2\"),dep_req(\"other\", \"^1\"),]),\
+         pkg!((\"bar\", \"2.0.0\") => [dep_req(\"baz\", \"= 1.0.1\"),]),\
+         pkg!((\"baz\", \"1.0.2\") => [dep_req(\"other\", \"^2\"),]),\
+         pkg!((\"baz\", \"1.0.1\")),pkg!((\"dep_req\", \"1.0.0\")),\
+         pkg!((\"dep_req\", \"2.0.0\")),]"
+    )
+}
+
+/// This generates a random registry index.
+/// Unlike vec((Name, Ver, vec((Name, VerRq), ..), ..)
+/// This strategy has a high probability of having valid dependencies
+pub fn registry_strategy(
+    max_crates: usize,
+    max_versions: usize,
+    shrinkage: usize,
+) -> impl Strategy<Value = PrettyPrintRegistry> {
+    let name = string_regex("[A-Za-z_-][A-Za-z0-9_-]*(-sys)?").unwrap();
+
+    let raw_version = [..max_versions; 3];
+    let version_from_raw = |v: &[usize; 3]| format!("{}.{}.{}", v[0], v[1], v[2]);
+
+    // If this is false than the crate will depend on the nonexistent "bad"
+    // instead of the complex set we generated for it.
+    let allow_deps = prop::bool::weighted(0.99);
+
+    let list_of_versions =
+        btree_map(raw_version, allow_deps, 1..=max_versions).prop_map(move |ver| {
+            ver.into_iter()
+                .map(|a| (version_from_raw(&a.0), a.1))
+                .collect::<Vec<_>>()
+        });
+
+    let list_of_crates_with_versions =
+        btree_map(name, list_of_versions, 1..=max_crates).prop_map(|mut vers| {
+            // root is the name of the thing being compiled
+            // so it would be confusing to have it in the index
+            vers.remove("root");
+            // bad is a name reserved for a dep that won't work
+            vers.remove("bad");
+            vers
+        });
+
+    // each version of each crate can depend on each crate smaller then it.
+    // In theory shrinkage should be 2, but in practice we get better trees with a larger value.
+    let max_deps = max_versions * (max_crates * (max_crates - 1)) / shrinkage;
+
+    let raw_version_range = (any::<Index>(), any::<Index>());
+    let raw_dependency = (any::<Index>(), any::<Index>(), raw_version_range);
+
+    fn order_index(a: Index, b: Index, size: usize) -> (usize, usize) {
+        let (a, b) = (a.index(size), b.index(size));
+        (min(a, b), max(a, b))
+    }
+
+    let list_of_raw_dependency = vec(raw_dependency, ..=max_deps);
+
+    (list_of_crates_with_versions, list_of_raw_dependency).prop_map(
+        |(crate_vers_by_name, raw_dependencies)| {
+            let list_of_pkgid: Vec<_> = crate_vers_by_name
+                .iter()
+                .flat_map(|(name, vers)| vers.iter().map(move |x| ((name.as_str(), &x.0), x.1)))
+                .collect();
+            let len_all_pkgid = list_of_pkgid.len();
+            let mut dependency_by_pkgid = vec![vec![]; len_all_pkgid];
+            for (a, b, (c, d)) in raw_dependencies {
+                let (a, b) = order_index(a, b, len_all_pkgid);
+                let ((dep_name, _), _) = list_of_pkgid[a];
+                if (list_of_pkgid[b].0).0 == dep_name {
+                    continue;
+                }
+                let s = &crate_vers_by_name[dep_name];
+                let (c, d) = order_index(c, d, s.len());
+
+                dependency_by_pkgid[b].push(dep_req(
+                    &dep_name,
+                    &if c == d {
+                        format!("={}", s[c].0)
+                    } else {
+                        format!(">={}, <={}", s[c].0, s[d].0)
+                    },
+                ))
+            }
+
+            PrettyPrintRegistry(
+                list_of_pkgid
+                    .into_iter()
+                    .zip(dependency_by_pkgid.into_iter())
+                    .map(|(((name, ver), allow_deps), deps)| {
+                        pkg_dep(
+                            (name, ver).to_pkgid(),
+                            if !allow_deps {
+                                vec![dep_req("bad", "*")]
+                            } else {
+                                let mut deps = deps;
+                                deps.sort_by_key(|d| d.name_in_toml());
+                                deps.dedup_by_key(|d| d.name_in_toml());
+                                deps
+                            },
+                        )
+                    })
+                    .collect(),
+            )
+        },
+    )
+}
+
+/// This test is to test the generator to ensure
+/// that it makes registries with large dependency trees
+#[test]
+fn meta_test_deep_trees_from_strategy() {
+    let mut dis = [0; 21];
+
+    let strategy = registry_strategy(50, 20, 60);
+    for _ in 0..64 {
+        let PrettyPrintRegistry(input) = strategy
+            .new_tree(&mut TestRunner::default())
+            .unwrap()
+            .current();
+        let reg = registry(input.clone());
+        for this in input.iter().rev().take(10) {
+            let res = resolve(
+                &pkg_id("root"),
+                vec![dep_req(&this.name(), &format!("={}", this.version()))],
+                &reg,
+            );
+            dis[res
+                .as_ref()
+                .map(|x| min(x.len(), dis.len()) - 1)
+                .unwrap_or(0)] += 1;
+            if dis.iter().all(|&x| x > 0) {
+                return;
+            }
+        }
+    }
+
+    panic!(
+        "In 640 tries we did not see a wide enough distribution of dependency trees! dis: {:?}",
+        dis
+    );
+}
+
+/// This test is to test the generator to ensure
+/// that it makes registries that include multiple versions of the same library
+#[test]
+fn meta_test_multiple_versions_strategy() {
+    let mut dis = [0; 10];
+
+    let strategy = registry_strategy(50, 20, 60);
+    for _ in 0..64 {
+        let PrettyPrintRegistry(input) = strategy
+            .new_tree(&mut TestRunner::default())
+            .unwrap()
+            .current();
+        let reg = registry(input.clone());
+        for this in input.iter().rev().take(10) {
+            let mut res = resolve(
+                &pkg_id("root"),
+                vec![dep_req(&this.name(), &format!("={}", this.version()))],
+                &reg,
+            );
+            if let Ok(mut res) = res {
+                let res_len = res.len();
+                res.sort_by_key(|s| s.name());
+                res.dedup_by_key(|s| s.name());
+                dis[min(res_len - res.len(), dis.len() - 1)] += 1;
+            }
+            if dis.iter().all(|&x| x > 0) {
+                return;
+            }
+        }
+    }
+    panic!(
+        "In 640 tries we did not see a wide enough distribution of multiple versions of the same library! dis: {:?}",
+        dis
+    );
+}
+
+/// Assert `xs` contains `elems`
+pub fn assert_contains<A: PartialEq>(xs: &[A], elems: &[A]) {
+    for elem in elems {
+        assert!(xs.contains(elem));
+    }
+}
+
+pub fn assert_same<A: PartialEq>(a: &[A], b: &[A]) {
+    assert_eq!(a.len(), b.len());
+    assert_contains(b, a);
+}
diff --git a/tests/testsuite/test.rs b/tests/testsuite/test.rs
new file mode 100644 (file)
index 0000000..d3f24e5
--- /dev/null
@@ -0,0 +1,3224 @@
+use std::fs::File;
+use std::io::prelude::*;
+
+use cargo;
+use support::paths::CargoPathExt;
+use support::registry::Package;
+use support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, cargo_exe, project};
+use support::{is_nightly, rustc_host, sleep_ms};
+
+#[test]
+fn cargo_test_simple() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/main.rs",
+            r#"
+            fn hello() -> &'static str {
+                "hello"
+            }
+
+            pub fn main() {
+                println!("{}", hello())
+            }
+
+            #[test]
+            fn test_hello() {
+                assert_eq!(hello(), "hello")
+            }"#,
+        ).build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("hello\n").run();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]",
+        ).with_stdout_contains("test test_hello ... ok")
+        .run();
+}
+
+#[test]
+fn cargo_test_release() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.1.0"
+
+            [dependencies]
+            bar = { path = "bar" }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate bar;
+            pub fn foo() { bar::bar(); }
+
+            #[test]
+            fn test() { foo(); }
+        "#,
+        ).file(
+            "tests/test.rs",
+            r#"
+            extern crate foo;
+
+            #[test]
+            fn test() { foo::foo(); }
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "pub fn bar() {}")
+        .build();
+
+    p.cargo("test -v --release")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[RUNNING] [..] -C opt-level=3 [..]
+[COMPILING] foo v0.1.0 ([CWD])
+[RUNNING] [..] -C opt-level=3 [..]
+[RUNNING] [..] -C opt-level=3 [..]
+[RUNNING] [..] -C opt-level=3 [..]
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `[..]target/release/deps/foo-[..][EXE]`
+[RUNNING] `[..]target/release/deps/test-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] `rustdoc --test [..]lib.rs[..]`",
+        ).with_stdout_contains_n("test test ... ok", 2)
+        .with_stdout_contains("running 0 tests")
+        .run();
+}
+
+#[test]
+fn cargo_test_overflow_checks() {
+    if !is_nightly() {
+        return;
+    }
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.5.0"
+            authors = []
+
+            [[bin]]
+            name = "foo"
+
+            [profile.release]
+            overflow-checks = true
+            "#,
+        ).file(
+            "src/foo.rs",
+            r#"
+            use std::panic;
+            pub fn main() {
+                let r = panic::catch_unwind(|| {
+                    [1, i32::max_value()].iter().sum::<i32>();
+                });
+                assert!(r.is_err());
+            }"#,
+        ).build();
+
+    p.cargo("build --release").run();
+    assert!(p.release_bin("foo").is_file());
+
+    p.process(&p.release_bin("foo")).with_stdout("").run();
+}
+
+#[test]
+fn cargo_test_verbose() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/main.rs",
+            r#"
+            fn main() {}
+            #[test] fn test_hello() {}
+        "#,
+        ).build();
+
+    p.cargo("test -v hello")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] src/main.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]target/debug/deps/foo-[..][EXE] hello`",
+        ).with_stdout_contains("test test_hello ... ok")
+        .run();
+}
+
+#[test]
+fn many_similar_names() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            "
+            pub fn foo() {}
+            #[test] fn lib_test() {}
+        ",
+        ).file(
+            "src/main.rs",
+            "
+            extern crate foo;
+            fn main() {}
+            #[test] fn bin_test() { foo::foo() }
+        ",
+        ).file(
+            "tests/foo.rs",
+            r#"
+            extern crate foo;
+            #[test] fn test_test() { foo::foo() }
+        "#,
+        ).build();
+
+    p.cargo("test -v")
+        .with_stdout_contains("test bin_test ... ok")
+        .with_stdout_contains("test lib_test ... ok")
+        .with_stdout_contains("test test_test ... ok")
+        .run();
+}
+
+#[test]
+fn cargo_test_failing_test_in_bin() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file(
+            "src/main.rs",
+            r#"
+            fn hello() -> &'static str {
+                "hello"
+            }
+
+            pub fn main() {
+                println!("{}", hello())
+            }
+
+            #[test]
+            fn test_hello() {
+                assert_eq!(hello(), "nope")
+            }"#,
+        ).build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("hello\n").run();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[ERROR] test failed, to rerun pass '--bin foo'",
+        ).with_stdout_contains(
+            "
+running 1 test
+test test_hello ... FAILED
+
+failures:
+
+---- test_hello stdout ----
+[..]thread 'test_hello' panicked at 'assertion failed:[..]",
+        ).with_stdout_contains("[..]`(left == right)`[..]")
+        .with_stdout_contains("[..]left: `\"hello\"`,[..]")
+        .with_stdout_contains("[..]right: `\"nope\"`[..]")
+        .with_stdout_contains("[..]src/main.rs:12[..]")
+        .with_stdout_contains(
+            "\
+failures:
+    test_hello
+",
+        ).with_status(101)
+        .run();
+}
+
+#[test]
+fn cargo_test_failing_test_in_test() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/main.rs", r#"pub fn main() { println!("hello"); }"#)
+        .file(
+            "tests/footest.rs",
+            "#[test] fn test_hello() { assert!(false) }",
+        ).build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+
+    p.process(&p.bin("foo")).with_stdout("hello\n").run();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[RUNNING] target/debug/deps/footest-[..][EXE]
+[ERROR] test failed, to rerun pass '--test footest'",
+        ).with_stdout_contains("running 0 tests")
+        .with_stdout_contains(
+            "\
+running 1 test
+test test_hello ... FAILED
+
+failures:
+
+---- test_hello stdout ----
+[..]thread 'test_hello' panicked at 'assertion failed: false', \
+      tests/footest.rs:1[..]
+",
+        ).with_stdout_contains(
+            "\
+failures:
+    test_hello
+",
+        ).with_status(101)
+        .run();
+}
+
+#[test]
+fn cargo_test_failing_test_in_lib() {
+    let p = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file("src/lib.rs", "#[test] fn test_hello() { assert!(false) }")
+        .build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[ERROR] test failed, to rerun pass '--lib'",
+        ).with_stdout_contains(
+            "\
+test test_hello ... FAILED
+
+failures:
+
+---- test_hello stdout ----
+[..]thread 'test_hello' panicked at 'assertion failed: false', \
+      src/lib.rs:1[..]
+",
+        ).with_stdout_contains(
+            "\
+failures:
+    test_hello
+",
+        ).with_status(101)
+        .run();
+}
+
+#[test]
+fn test_with_lib_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[bin]]
+            name = "baz"
+            path = "src/main.rs"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            ///
+            /// ```rust
+            /// extern crate foo;
+            /// fn main() {
+            ///     println!("{:?}", foo::foo());
+            /// }
+            /// ```
+            ///
+            pub fn foo(){}
+            #[test] fn lib_test() {}
+        "#,
+        ).file(
+            "src/main.rs",
+            "
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+
+            fn main() {}
+
+            #[test]
+            fn bin_test() {}
+        ",
+        ).build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[RUNNING] target/debug/deps/baz-[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains("test lib_test ... ok")
+        .with_stdout_contains("test bin_test ... ok")
+        .with_stdout_contains_n("test [..] ... ok", 3)
+        .run();
+}
+
+#[test]
+fn test_with_deep_lib_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.bar]
+            path = "../bar"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "
+            #[cfg(test)]
+            extern crate bar;
+            /// ```
+            /// foo::foo();
+            /// ```
+            pub fn foo() {}
+
+            #[test]
+            fn bar_test() {
+                bar::bar();
+            }
+        ",
+        ).build();
+    let _p2 = project()
+        .at("bar")
+        .file("Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("src/lib.rs", "pub fn bar() {} #[test] fn foo_test() {}")
+        .build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.0.1 ([..])
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target[..]
+[DOCTEST] foo",
+        ).with_stdout_contains("test bar_test ... ok")
+        .with_stdout_contains_n("test [..] ... ok", 2)
+        .run();
+}
+
+#[test]
+fn external_test_explicit() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[test]]
+            name = "test"
+            path = "src/test.rs"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            pub fn get_hello() -> &'static str { "Hello" }
+
+            #[test]
+            fn internal_test() {}
+        "#,
+        ).file(
+            "src/test.rs",
+            r#"
+            extern crate foo;
+
+            #[test]
+            fn external_test() { assert_eq!(foo::get_hello(), "Hello") }
+        "#,
+        ).build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[RUNNING] target/debug/deps/test-[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains("test internal_test ... ok")
+        .with_stdout_contains("test external_test ... ok")
+        .with_stdout_contains("running 0 tests")
+        .run();
+}
+
+#[test]
+fn external_test_named_test() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[test]]
+            name = "test"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("tests/test.rs", "#[test] fn foo() {}")
+        .build();
+
+    p.cargo("test").run();
+}
+
+#[test]
+fn external_test_implicit() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            pub fn get_hello() -> &'static str { "Hello" }
+
+            #[test]
+            fn internal_test() {}
+        "#,
+        ).file(
+            "tests/external.rs",
+            r#"
+            extern crate foo;
+
+            #[test]
+            fn external_test() { assert_eq!(foo::get_hello(), "Hello") }
+        "#,
+        ).build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[RUNNING] target/debug/deps/external-[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains("test internal_test ... ok")
+        .with_stdout_contains("test external_test ... ok")
+        .with_stdout_contains("running 0 tests")
+        .run();
+}
+
+#[test]
+fn dont_run_examples() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file(
+            "examples/dont-run-me-i-will-fail.rs",
+            r#"
+            fn main() { panic!("Examples should not be run by 'cargo test'"); }
+        "#,
+        ).build();
+    p.cargo("test").run();
+}
+
+#[test]
+fn pass_through_command_line() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            "
+            #[test] fn foo() {}
+            #[test] fn bar() {}
+        ",
+        ).build();
+
+    p.cargo("test bar")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains("test bar ... ok")
+        .with_stdout_contains("running 0 tests")
+        .run();
+
+    p.cargo("test foo")
+        .with_stderr(
+            "\
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains("test foo ... ok")
+        .with_stdout_contains("running 0 tests")
+        .run();
+}
+
+// Regression test for running cargo-test twice with
+// tests in an rlib
+#[test]
+fn cargo_test_twice() {
+    let p = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file(
+            "src/foo.rs",
+            r#"
+            #![crate_type = "rlib"]
+
+            #[test]
+            fn dummy_test() { }
+            "#,
+        ).build();
+
+    for _ in 0..2 {
+        p.cargo("test").run();
+    }
+}
+
+#[test]
+fn lib_bin_same_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "foo"
+            [[bin]]
+            name = "foo"
+        "#,
+        ).file("src/lib.rs", "#[test] fn lib_test() {}")
+        .file(
+            "src/main.rs",
+            "
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+
+            #[test]
+            fn bin_test() {}
+        ",
+        ).build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains_n("test [..] ... ok", 2)
+        .with_stdout_contains("running 0 tests")
+        .run();
+}
+
+#[test]
+fn lib_with_standard_name() {
+    let p = project()
+        .file("Cargo.toml", &basic_manifest("syntax", "0.0.1"))
+        .file(
+            "src/lib.rs",
+            "
+            /// ```
+            /// syntax::foo();
+            /// ```
+            pub fn foo() {}
+
+            #[test]
+            fn foo_test() {}
+        ",
+        ).file(
+            "tests/test.rs",
+            "
+            extern crate syntax;
+
+            #[test]
+            fn test() { syntax::foo() }
+        ",
+        ).build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/syntax-[..][EXE]
+[RUNNING] target/debug/deps/test-[..][EXE]
+[DOCTEST] syntax",
+        ).with_stdout_contains("test foo_test ... ok")
+        .with_stdout_contains("test test ... ok")
+        .with_stdout_contains_n("test [..] ... ok", 3)
+        .run();
+}
+
+#[test]
+fn lib_with_standard_name2() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "syntax"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "syntax"
+            test = false
+            doctest = false
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .file(
+            "src/main.rs",
+            "
+            extern crate syntax;
+
+            fn main() {}
+
+            #[test]
+            fn test() { syntax::foo() }
+        ",
+        ).build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/syntax-[..][EXE]",
+        ).with_stdout_contains("test test ... ok")
+        .run();
+}
+
+#[test]
+fn lib_without_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "syntax"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            test = false
+            doctest = false
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .file(
+            "src/main.rs",
+            "
+            extern crate syntax;
+
+            fn main() {}
+
+            #[test]
+            fn test() { syntax::foo() }
+        ",
+        ).build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] syntax v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/syntax-[..][EXE]",
+        ).with_stdout_contains("test test ... ok")
+        .run();
+}
+
+#[test]
+fn bin_without_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "syntax"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            test = false
+            doctest = false
+
+            [[bin]]
+            path = "src/main.rs"
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .file(
+            "src/main.rs",
+            "
+            extern crate syntax;
+
+            fn main() {}
+
+            #[test]
+            fn test() { syntax::foo() }
+        ",
+        ).build();
+
+    p.cargo("test")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  binary target bin.name is required",
+        ).run();
+}
+
+#[test]
+fn bench_without_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "syntax"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            test = false
+            doctest = false
+
+            [[bench]]
+            path = "src/bench.rs"
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .file(
+            "src/main.rs",
+            "
+            extern crate syntax;
+
+            fn main() {}
+
+            #[test]
+            fn test() { syntax::foo() }
+        ",
+        ).file(
+            "src/bench.rs",
+            "
+            #![feature(test)]
+            extern crate syntax;
+            extern crate test;
+
+            #[bench]
+            fn external_bench(_b: &mut test::Bencher) {}
+        ",
+        ).build();
+
+    p.cargo("test")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  benchmark target bench.name is required",
+        ).run();
+}
+
+#[test]
+fn test_without_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "syntax"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            test = false
+            doctest = false
+
+            [[test]]
+            path = "src/test.rs"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            pub fn foo() {}
+            pub fn get_hello() -> &'static str { "Hello" }
+        "#,
+        ).file(
+            "src/main.rs",
+            "
+            extern crate syntax;
+
+            fn main() {}
+
+            #[test]
+            fn test() { syntax::foo() }
+        ",
+        ).file(
+            "src/test.rs",
+            r#"
+            extern crate syntax;
+
+            #[test]
+            fn external_test() { assert_eq!(syntax::get_hello(), "Hello") }
+        "#,
+        ).build();
+
+    p.cargo("test")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  test target test.name is required",
+        ).run();
+}
+
+#[test]
+fn example_without_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "syntax"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            test = false
+            doctest = false
+
+            [[example]]
+            path = "examples/example.rs"
+        "#,
+        ).file("src/lib.rs", "pub fn foo() {}")
+        .file(
+            "src/main.rs",
+            "
+            extern crate syntax;
+
+            fn main() {}
+
+            #[test]
+            fn test() { syntax::foo() }
+        ",
+        ).file(
+            "examples/example.rs",
+            r#"
+            extern crate syntax;
+
+            fn main() {
+                println!("example1");
+            }
+        "#,
+        ).build();
+
+    p.cargo("test")
+        .with_status(101)
+        .with_stderr(
+            "\
+[ERROR] failed to parse manifest at `[..]`
+
+Caused by:
+  example target example.name is required",
+        ).run();
+}
+
+#[test]
+fn bin_there_for_integration() {
+    let p = project()
+        .file(
+            "src/main.rs",
+            "
+            fn main() { std::process::exit(101); }
+            #[test] fn main_test() {}
+        ",
+        ).file(
+            "tests/foo.rs",
+            r#"
+            use std::process::Command;
+            #[test]
+            fn test_test() {
+                let status = Command::new("target/debug/foo").status().unwrap();
+                assert_eq!(status.code(), Some(101));
+            }
+        "#,
+        ).build();
+
+    p.cargo("test -v")
+        .with_stdout_contains("test main_test ... ok")
+        .with_stdout_contains("test test_test ... ok")
+        .run();
+}
+
+#[test]
+fn test_dylib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "foo"
+            crate_type = ["dylib"]
+
+            [dependencies.bar]
+            path = "bar"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate bar as the_bar;
+
+            pub fn bar() { the_bar::baz(); }
+
+            #[test]
+            fn foo() { bar(); }
+        "#,
+        ).file(
+            "tests/test.rs",
+            r#"
+            extern crate foo as the_foo;
+
+            #[test]
+            fn foo() { the_foo::bar(); }
+        "#,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "bar"
+            crate_type = ["dylib"]
+        "#,
+        ).file("bar/src/lib.rs", "pub fn baz() {}")
+        .build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] bar v0.0.1 ([CWD]/bar)
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[RUNNING] target/debug/deps/test-[..][EXE]",
+        ).with_stdout_contains_n("test foo ... ok", 2)
+        .run();
+
+    p.root().move_into_the_past();
+    p.cargo("test")
+        .with_stderr(
+            "\
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[RUNNING] target/debug/deps/test-[..][EXE]",
+        ).with_stdout_contains_n("test foo ... ok", 2)
+        .run();
+}
+
+#[test]
+fn test_twice_with_build_cmd() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("build.rs", "fn main() {}")
+        .file("src/lib.rs", "#[test] fn foo() {}")
+        .build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains("test foo ... ok")
+        .with_stdout_contains("running 0 tests")
+        .run();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains("test foo ... ok")
+        .with_stdout_contains("running 0 tests")
+        .run();
+}
+
+#[test]
+fn test_then_build() {
+    let p = project().file("src/lib.rs", "#[test] fn foo() {}").build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains("test foo ... ok")
+        .with_stdout_contains("running 0 tests")
+        .run();
+
+    p.cargo("build").with_stdout("").run();
+}
+
+#[test]
+fn test_no_run() {
+    let p = project()
+        .file("src/lib.rs", "#[test] fn foo() { panic!() }")
+        .build();
+
+    p.cargo("test --no-run")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn test_run_specific_bin_target() {
+    let prj = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[bin]]
+            name="bin1"
+            path="src/bin1.rs"
+
+            [[bin]]
+            name="bin2"
+            path="src/bin2.rs"
+        "#,
+        ).file("src/bin1.rs", "#[test] fn test1() { }")
+        .file("src/bin2.rs", "#[test] fn test2() { }")
+        .build();
+
+    prj.cargo("test --bin bin2")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/bin2-[..][EXE]",
+        ).with_stdout_contains("test test2 ... ok")
+        .run();
+}
+
+#[test]
+fn test_run_implicit_bin_target() {
+    let prj = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[bin]]
+            name="mybin"
+            path="src/mybin.rs"
+        "#,
+        ).file(
+            "src/mybin.rs",
+            "#[test] fn test_in_bin() { }
+               fn main() { panic!(\"Don't execute me!\"); }",
+        ).file("tests/mytest.rs", "#[test] fn test_in_test() { }")
+        .file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
+        .file(
+            "examples/myexm.rs",
+            "#[test] fn test_in_exm() { }
+               fn main() { panic!(\"Don't execute me!\"); }",
+        ).build();
+
+    prj.cargo("test --bins")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/mybin-[..][EXE]",
+        ).with_stdout_contains("test test_in_bin ... ok")
+        .run();
+}
+
+#[test]
+fn test_run_specific_test_target() {
+    let prj = project()
+        .file("src/bin/a.rs", "fn main() { }")
+        .file("src/bin/b.rs", "#[test] fn test_b() { } fn main() { }")
+        .file("tests/a.rs", "#[test] fn test_a() { }")
+        .file("tests/b.rs", "#[test] fn test_b() { }")
+        .build();
+
+    prj.cargo("test --test b")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/b-[..][EXE]",
+        ).with_stdout_contains("test test_b ... ok")
+        .run();
+}
+
+#[test]
+fn test_run_implicit_test_target() {
+    let prj = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[bin]]
+            name="mybin"
+            path="src/mybin.rs"
+        "#,
+        ).file(
+            "src/mybin.rs",
+            "#[test] fn test_in_bin() { }
+               fn main() { panic!(\"Don't execute me!\"); }",
+        ).file("tests/mytest.rs", "#[test] fn test_in_test() { }")
+        .file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
+        .file(
+            "examples/myexm.rs",
+            "fn main() { compile_error!(\"Don't build me!\"); }",
+        ).build();
+
+    prj.cargo("test --tests")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/mybin-[..][EXE]
+[RUNNING] target/debug/deps/mytest-[..][EXE]",
+        ).with_stdout_contains("test test_in_test ... ok")
+        .run();
+}
+
+#[test]
+fn test_run_implicit_bench_target() {
+    let prj = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[bin]]
+            name="mybin"
+            path="src/mybin.rs"
+        "#,
+        ).file(
+            "src/mybin.rs",
+            "#[test] fn test_in_bin() { }
+               fn main() { panic!(\"Don't execute me!\"); }",
+        ).file("tests/mytest.rs", "#[test] fn test_in_test() { }")
+        .file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
+        .file(
+            "examples/myexm.rs",
+            "fn main() { compile_error!(\"Don't build me!\"); }",
+        ).build();
+
+    prj.cargo("test --benches")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/mybin-[..][EXE]
+[RUNNING] target/debug/deps/mybench-[..][EXE]",
+        ).with_stdout_contains("test test_in_bench ... ok")
+        .run();
+}
+
+#[test]
+fn test_run_implicit_example_target() {
+    let prj = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[bin]]
+            name = "mybin"
+            path = "src/mybin.rs"
+
+            [[example]]
+            name = "myexm1"
+
+            [[example]]
+            name = "myexm2"
+            test = true
+        "#,
+        ).file(
+            "src/mybin.rs",
+            "#[test] fn test_in_bin() { }
+               fn main() { panic!(\"Don't execute me!\"); }",
+        ).file("tests/mytest.rs", "#[test] fn test_in_test() { }")
+        .file("benches/mybench.rs", "#[test] fn test_in_bench() { }")
+        .file(
+            "examples/myexm1.rs",
+            "#[test] fn test_in_exm() { }
+               fn main() { panic!(\"Don't execute me!\"); }",
+        ).file(
+            "examples/myexm2.rs",
+            "#[test] fn test_in_exm() { }
+               fn main() { panic!(\"Don't execute me!\"); }",
+        ).build();
+
+    // Compiles myexm1 as normal, but does not run it.
+    prj.cargo("test -v")
+        .with_stderr_contains("[RUNNING] `rustc [..]myexm1.rs [..]--crate-type bin[..]")
+        .with_stderr_contains("[RUNNING] `rustc [..]myexm2.rs [..]--test[..]")
+        .with_stderr_does_not_contain("[RUNNING] [..]myexm1-[..]")
+        .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]")
+        .run();
+
+    // Only tests myexm2.
+    prj.cargo("test --tests")
+        .with_stderr_does_not_contain("[RUNNING] [..]myexm1-[..]")
+        .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]")
+        .run();
+
+    // Tests all examples.
+    prj.cargo("test --examples")
+        .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]")
+        .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]")
+        .run();
+
+    // Test an example, even without `test` set.
+    prj.cargo("test --example myexm1")
+        .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]")
+        .run();
+
+    // Tests all examples.
+    prj.cargo("test --all-targets")
+        .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]")
+        .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]")
+        .run();
+}
+
+#[test]
+fn test_no_harness() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [[bin]]
+            name = "foo"
+            test = false
+
+            [[test]]
+            name = "bar"
+            path = "foo.rs"
+            harness = false
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("foo.rs", "fn main() {}")
+        .build();
+
+    p.cargo("test -- --nocapture")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/bar-[..][EXE]
+",
+        ).run();
+}
+
+#[test]
+fn selective_testing() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.d1]
+                path = "d1"
+            [dependencies.d2]
+                path = "d2"
+
+            [lib]
+                name = "foo"
+                doctest = false
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "d1/Cargo.toml",
+            r#"
+            [package]
+            name = "d1"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+                name = "d1"
+                doctest = false
+        "#,
+        ).file("d1/src/lib.rs", "")
+        .file(
+            "d1/src/main.rs",
+            "#[allow(unused_extern_crates)] extern crate d1; fn main() {}",
+        ).file(
+            "d2/Cargo.toml",
+            r#"
+            [package]
+            name = "d2"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+                name = "d2"
+                doctest = false
+        "#,
+        ).file("d2/src/lib.rs", "")
+        .file(
+            "d2/src/main.rs",
+            "#[allow(unused_extern_crates)] extern crate d2; fn main() {}",
+        );
+    let p = p.build();
+
+    println!("d1");
+    p.cargo("test -p d1")
+        .with_stderr(
+            "\
+[COMPILING] d1 v0.0.1 ([CWD]/d1)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/d1-[..][EXE]
+[RUNNING] target/debug/deps/d1-[..][EXE]",
+        ).with_stdout_contains_n("running 0 tests", 2)
+        .run();
+
+    println!("d2");
+    p.cargo("test -p d2")
+        .with_stderr(
+            "\
+[COMPILING] d2 v0.0.1 ([CWD]/d2)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/d2-[..][EXE]
+[RUNNING] target/debug/deps/d2-[..][EXE]",
+        ).with_stdout_contains_n("running 0 tests", 2)
+        .run();
+
+    println!("whole");
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]",
+        ).with_stdout_contains("running 0 tests")
+        .run();
+}
+
+#[test]
+fn almost_cyclic_but_not_quite() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dev-dependencies.b]
+            path = "b"
+            [dev-dependencies.c]
+            path = "c"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #[cfg(test)] extern crate b;
+            #[cfg(test)] extern crate c;
+        "#,
+        ).file(
+            "b/Cargo.toml",
+            r#"
+            [package]
+            name = "b"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.foo]
+            path = ".."
+        "#,
+        ).file(
+            "b/src/lib.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+        "#,
+        ).file("c/Cargo.toml", &basic_manifest("c", "0.0.1"))
+        .file("c/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    p.cargo("test").run();
+}
+
+#[test]
+fn build_then_selective_test() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.b]
+            path = "b"
+        "#,
+        ).file(
+            "src/lib.rs",
+            "#[allow(unused_extern_crates)] extern crate b;",
+        ).file(
+            "src/main.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate b;
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+            fn main() {}
+        "#,
+        ).file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+        .file("b/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    p.root().move_into_the_past();
+    p.cargo("test -p b").run();
+}
+
+#[test]
+fn example_dev_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dev-dependencies.bar]
+            path = "bar"
+        "#,
+        ).file("src/lib.rs", "")
+        .file("examples/e1.rs", "extern crate bar; fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file(
+            "bar/src/lib.rs",
+            r#"
+            // make sure this file takes awhile to compile
+            macro_rules! f0( () => (1) );
+            macro_rules! f1( () => ({(f0!()) + (f0!())}) );
+            macro_rules! f2( () => ({(f1!()) + (f1!())}) );
+            macro_rules! f3( () => ({(f2!()) + (f2!())}) );
+            macro_rules! f4( () => ({(f3!()) + (f3!())}) );
+            macro_rules! f5( () => ({(f4!()) + (f4!())}) );
+            macro_rules! f6( () => ({(f5!()) + (f5!())}) );
+            macro_rules! f7( () => ({(f6!()) + (f6!())}) );
+            macro_rules! f8( () => ({(f7!()) + (f7!())}) );
+            pub fn bar() {
+                f8!();
+            }
+        "#,
+        ).build();
+    p.cargo("test").run();
+    p.cargo("run --example e1 --release -v").run();
+}
+
+#[test]
+fn selective_testing_with_docs() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.d1]
+                path = "d1"
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            /// ```
+            /// not valid rust
+            /// ```
+            pub fn foo() {}
+        "#,
+        ).file(
+            "d1/Cargo.toml",
+            r#"
+            [package]
+            name = "d1"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "d1"
+            path = "d1.rs"
+        "#,
+        ).file("d1/d1.rs", "");
+    let p = p.build();
+
+    p.cargo("test -p d1")
+        .with_stderr(
+            "\
+[COMPILING] d1 v0.0.1 ([CWD]/d1)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/d1[..][EXE]
+[DOCTEST] d1",
+        ).with_stdout_contains_n("running 0 tests", 2)
+        .run();
+}
+
+#[test]
+fn example_bin_same_name() {
+    let p = project()
+        .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#)
+        .file("examples/foo.rs", r#"fn main() { println!("example"); }"#)
+        .build();
+
+    p.cargo("test --no-run -v")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..]`
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    assert!(!p.bin("foo").is_file());
+    assert!(p.bin("examples/foo").is_file());
+
+    p.process(&p.bin("examples/foo"))
+        .with_stdout("example\n")
+        .run();
+
+    p.cargo("run")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] [..]",
+        ).with_stdout("bin")
+        .run();
+    assert!(p.bin("foo").is_file());
+}
+
+#[test]
+fn test_with_example_twice() {
+    let p = project()
+        .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#)
+        .file("examples/foo.rs", r#"fn main() { println!("example"); }"#)
+        .build();
+
+    println!("first");
+    p.cargo("test -v").run();
+    assert!(p.bin("examples/foo").is_file());
+    println!("second");
+    p.cargo("test -v").run();
+    assert!(p.bin("examples/foo").is_file());
+}
+
+#[test]
+fn example_with_dev_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "foo"
+            test = false
+            doctest = false
+
+            [dev-dependencies.a]
+            path = "a"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "examples/ex.rs",
+            "#[allow(unused_extern_crates)] extern crate a; fn main() {}",
+        ).file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("test -v")
+        .with_stderr(
+            "\
+[..]
+[..]
+[..]
+[..]
+[RUNNING] `rustc --crate-name ex [..] --extern a=[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn bin_is_preserved() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/main.rs", "fn main() {}")
+        .build();
+
+    p.cargo("build -v").run();
+    assert!(p.bin("foo").is_file());
+
+    println!("testing");
+    p.cargo("test -v").run();
+    assert!(p.bin("foo").is_file());
+}
+
+#[test]
+fn bad_example() {
+    let p = project().file("src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("run --example foo")
+        .with_status(101)
+        .with_stderr("[ERROR] no example target named `foo`")
+        .run();
+    p.cargo("run --bin foo")
+        .with_status(101)
+        .with_stderr("[ERROR] no bin target named `foo`")
+        .run();
+}
+
+#[test]
+fn doctest_feature() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+            [features]
+            bar = []
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            /// ```rust
+            /// assert_eq!(foo::foo(), 1);
+            /// ```
+            #[cfg(feature = "bar")]
+            pub fn foo() -> i32 { 1 }
+        "#,
+        ).build();
+
+    p.cargo("test --features bar")
+        .with_stderr(
+            "\
+[COMPILING] foo [..]
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains("running 0 tests")
+        .with_stdout_contains("test [..] ... ok")
+        .run();
+}
+
+#[test]
+fn dashes_to_underscores() {
+    let p = project()
+        .file("Cargo.toml", &basic_manifest("foo-bar", "0.0.1"))
+        .file(
+            "src/lib.rs",
+            r#"
+            /// ```
+            /// assert_eq!(foo_bar::foo(), 1);
+            /// ```
+            pub fn foo() -> i32 { 1 }
+        "#,
+        ).build();
+
+    p.cargo("test -v").run();
+}
+
+#[test]
+fn doctest_dev_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dev-dependencies]
+            b = { path = "b" }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            /// ```
+            /// extern crate b;
+            /// ```
+            pub fn foo() {}
+        "#,
+        ).file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
+        .file("b/src/lib.rs", "")
+        .build();
+
+    p.cargo("test -v").run();
+}
+
+#[test]
+fn filter_no_doc_tests() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            /// ```
+            /// extern crate b;
+            /// ```
+            pub fn foo() {}
+        "#,
+        ).file("tests/foo.rs", "")
+        .build();
+
+    p.cargo("test --test=foo")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo[..][EXE]",
+        ).with_stdout_contains("running 0 tests")
+        .run();
+}
+
+#[test]
+fn dylib_doctest() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "foo"
+            crate-type = ["rlib", "dylib"]
+            test = false
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            /// ```
+            /// foo::foo();
+            /// ```
+            pub fn foo() {}
+        "#,
+        ).build();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[DOCTEST] foo",
+        ).with_stdout_contains("test [..] ... ok")
+        .run();
+}
+
+#[test]
+fn dylib_doctest2() {
+    // can't doctest dylibs as they're statically linked together
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            name = "foo"
+            crate-type = ["dylib"]
+            test = false
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            /// ```
+            /// foo::foo();
+            /// ```
+            pub fn foo() {}
+        "#,
+        ).build();
+
+    p.cargo("test").with_stdout("").run();
+}
+
+#[test]
+fn cyclic_dev_dep_doc_test() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dev-dependencies]
+            bar = { path = "bar" }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            //! ```
+            //! extern crate bar;
+            //! ```
+        "#,
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            foo = { path = ".." }
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"
+            #[allow(unused_extern_crates)]
+            extern crate foo;
+        "#,
+        ).build();
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[COMPILING] bar v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains("running 0 tests")
+        .with_stdout_contains("test [..] ... ok")
+        .run();
+}
+
+#[test]
+fn dev_dep_with_build_script() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dev-dependencies]
+            bar = { path = "bar" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("examples/foo.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+            authors = []
+            build = "build.rs"
+        "#,
+        ).file("bar/src/lib.rs", "")
+        .file("bar/build.rs", "fn main() {}")
+        .build();
+    p.cargo("test").run();
+}
+
+#[test]
+fn no_fail_fast() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+        pub fn add_one(x: i32) -> i32{
+            x + 1
+        }
+
+        /// ```rust
+        /// use foo::sub_one;
+        /// assert_eq!(sub_one(101), 100);
+        /// ```
+        pub fn sub_one(x: i32) -> i32{
+            x - 1
+        }
+        "#,
+        ).file(
+            "tests/test_add_one.rs",
+            r#"
+        extern crate foo;
+        use foo::*;
+
+        #[test]
+        fn add_one_test() {
+            assert_eq!(add_one(1), 2);
+        }
+
+        #[test]
+        fn fail_add_one_test() {
+            assert_eq!(add_one(1), 1);
+        }
+        "#,
+        ).file(
+            "tests/test_sub_one.rs",
+            r#"
+        extern crate foo;
+        use foo::*;
+
+        #[test]
+        fn sub_one_test() {
+            assert_eq!(sub_one(1), 0);
+        }
+        "#,
+        ).build();
+    p.cargo("test --no-fail-fast")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..][EXE]
+[RUNNING] target/debug/deps/test_add_one-[..][EXE]",
+        ).with_stdout_contains("running 0 tests")
+        .with_stderr_contains(
+            "\
+[RUNNING] target/debug/deps/test_sub_one-[..][EXE]
+[DOCTEST] foo",
+        ).with_stdout_contains("test result: FAILED. [..]")
+        .with_stdout_contains("test sub_one_test ... ok")
+        .with_stdout_contains_n("test [..] ... ok", 3)
+        .run();
+}
+
+#[test]
+fn test_multiple_packages() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies.d1]
+                path = "d1"
+            [dependencies.d2]
+                path = "d2"
+
+            [lib]
+                name = "foo"
+                doctest = false
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "d1/Cargo.toml",
+            r#"
+            [package]
+            name = "d1"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+                name = "d1"
+                doctest = false
+        "#,
+        ).file("d1/src/lib.rs", "")
+        .file(
+            "d2/Cargo.toml",
+            r#"
+            [package]
+            name = "d2"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+                name = "d2"
+                doctest = false
+        "#,
+        ).file("d2/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("test -p d1 -p d2")
+        .with_stderr_contains("[RUNNING] target/debug/deps/d1-[..][EXE]")
+        .with_stderr_contains("[RUNNING] target/debug/deps/d2-[..][EXE]")
+        .with_stdout_contains_n("running 0 tests", 2)
+        .run();
+}
+
+#[test]
+fn bin_does_not_rebuild_tests() {
+    let p = project()
+        .file("src/lib.rs", "")
+        .file("src/main.rs", "fn main() {}")
+        .file("tests/foo.rs", "");
+    let p = p.build();
+
+    p.cargo("test -v").run();
+
+    sleep_ms(1000);
+    File::create(&p.root().join("src/main.rs"))
+        .unwrap()
+        .write_all(b"fn main() { 3; }")
+        .unwrap();
+
+    p.cargo("test -v --no-run")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..] src/main.rs [..]`
+[RUNNING] `rustc [..] src/main.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn selective_test_wonky_profile() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [profile.release]
+            opt-level = 2
+
+            [dependencies]
+            a = { path = "a" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("test -v --no-run --release -p foo -p a").run();
+}
+
+#[test]
+fn selective_test_optional_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a", optional = true }
+        "#,
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("test -v --no-run --features a -p a")
+        .with_stderr(
+            "\
+[COMPILING] a v0.0.1 ([..])
+[RUNNING] `rustc [..] a/src/lib.rs [..]`
+[RUNNING] `rustc [..] a/src/lib.rs [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn only_test_docs() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            #[test]
+            fn foo() {
+                let a: u32 = "hello";
+            }
+
+            /// ```
+            /// foo::bar();
+            /// println!("ok");
+            /// ```
+            pub fn bar() {
+            }
+        "#,
+        ).file("tests/foo.rs", "this is not rust");
+    let p = p.build();
+
+    p.cargo("test --doc")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[DOCTEST] foo",
+        ).with_stdout_contains("test [..] ... ok")
+        .run();
+}
+
+#[test]
+fn test_panic_abort_with_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [profile.dev]
+            panic = 'abort'
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            extern crate bar;
+
+            #[test]
+            fn foo() {}
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
+        .file("bar/src/lib.rs", "")
+        .build();
+    p.cargo("test -v").run();
+}
+
+#[test]
+fn cfg_test_even_with_no_harness() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [lib]
+            harness = false
+            doctest = false
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"#[cfg(test)] fn main() { println!("hello!"); }"#,
+        ).build();
+    p.cargo("test -v")
+        .with_stdout("hello!\n")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]`
+",
+        ).run();
+}
+
+#[test]
+fn panic_abort_multiple() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+
+            [profile.release]
+            panic = 'abort'
+        "#,
+        ).file(
+            "src/lib.rs",
+            "#[allow(unused_extern_crates)] extern crate a;",
+        ).file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "")
+        .build();
+    p.cargo("test --release -v -p foo -p a").run();
+}
+
+#[test]
+fn pass_correct_cfgs_flags_to_rustdoc() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [features]
+            default = ["feature_a/default"]
+            nightly = ["feature_a/nightly"]
+
+            [dependencies.feature_a]
+            path = "libs/feature_a"
+            default-features = false
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            #[cfg(test)]
+            mod tests {
+                #[test]
+                fn it_works() {
+                  assert!(true);
+                }
+            }
+        "#,
+        ).file(
+            "libs/feature_a/Cargo.toml",
+            r#"
+            [package]
+            name = "feature_a"
+            version = "0.1.0"
+            authors = []
+
+            [features]
+            default = ["mock_serde_codegen"]
+            nightly = ["mock_serde_derive"]
+
+            [dependencies]
+            mock_serde_derive = { path = "../mock_serde_derive", optional = true }
+
+            [build-dependencies]
+            mock_serde_codegen = { path = "../mock_serde_codegen", optional = true }
+        "#,
+        ).file(
+            "libs/feature_a/src/lib.rs",
+            r#"
+            #[cfg(feature = "mock_serde_derive")]
+            const MSG: &'static str = "This is safe";
+
+            #[cfg(feature = "mock_serde_codegen")]
+            const MSG: &'static str = "This is risky";
+
+            pub fn get() -> &'static str {
+                MSG
+            }
+        "#,
+        ).file(
+            "libs/mock_serde_derive/Cargo.toml",
+            &basic_manifest("mock_serde_derive", "0.1.0"),
+        ).file("libs/mock_serde_derive/src/lib.rs", "")
+        .file(
+            "libs/mock_serde_codegen/Cargo.toml",
+            &basic_manifest("mock_serde_codegen", "0.1.0"),
+        ).file("libs/mock_serde_codegen/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("test --package feature_a --verbose")
+        .with_stderr_contains(
+            "\
+[DOCTEST] feature_a
+[RUNNING] `rustdoc --test [..]mock_serde_codegen[..]`",
+        ).run();
+
+    p.cargo("test --verbose")
+        .with_stderr_contains(
+            "\
+[DOCTEST] foo
+[RUNNING] `rustdoc --test [..]feature_a[..]`",
+        ).run();
+}
+
+#[test]
+fn test_release_ignore_panic() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+
+            [profile.test]
+            panic = 'abort'
+            [profile.release]
+            panic = 'abort'
+        "#,
+        ).file(
+            "src/lib.rs",
+            "#[allow(unused_extern_crates)] extern crate a;",
+        ).file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "");
+    let p = p.build();
+    println!("test");
+    p.cargo("test -v").run();
+    println!("bench");
+    p.cargo("bench -v").run();
+}
+
+#[test]
+fn test_many_with_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            a = { path = "a" }
+
+            [features]
+            foo = []
+
+            [workspace]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("test -v -p a -p foo --features foo").run();
+}
+
+#[test]
+fn test_all_workspace() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#,
+        ).file("src/main.rs", "#[test] fn foo_test() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "#[test] fn bar_test() {}")
+        .build();
+
+    p.cargo("test --all")
+        .with_stdout_contains("test foo_test ... ok")
+        .with_stdout_contains("test bar_test ... ok")
+        .run();
+}
+
+#[test]
+fn test_all_exclude() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [workspace]
+            members = ["bar", "baz"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/lib.rs", "#[test] pub fn bar() {}")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/lib.rs", "#[test] pub fn baz() { assert!(false); }")
+        .build();
+
+    p.cargo("test --all --exclude baz")
+        .with_stdout_contains(
+            "running 1 test
+test bar ... ok",
+        ).run();
+}
+
+#[test]
+fn test_all_virtual_manifest() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["a", "b"]
+        "#,
+        ).file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+        .file("a/src/lib.rs", "#[test] fn a() {}")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+        .file("b/src/lib.rs", "#[test] fn b() {}")
+        .build();
+
+    p.cargo("test --all")
+        .with_stdout_contains("test a ... ok")
+        .with_stdout_contains("test b ... ok")
+        .run();
+}
+
+#[test]
+fn test_virtual_manifest_all_implied() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["a", "b"]
+        "#,
+        ).file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+        .file("a/src/lib.rs", "#[test] fn a() {}")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+        .file("b/src/lib.rs", "#[test] fn b() {}")
+        .build();
+
+    p.cargo("test")
+        .with_stdout_contains("test a ... ok")
+        .with_stdout_contains("test b ... ok")
+        .run();
+}
+
+#[test]
+fn test_all_member_dependency_same_name() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["a"]
+        "#,
+        ).file(
+            "a/Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.1.0"
+
+            [dependencies]
+            a = "0.1.0"
+        "#,
+        ).file("a/src/lib.rs", "#[test] fn a() {}")
+        .build();
+
+    Package::new("a", "0.1.0").publish();
+
+    p.cargo("test --all")
+        .with_stdout_contains("test a ... ok")
+        .run();
+}
+
+#[test]
+fn doctest_only_with_dev_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.1.0"
+
+            [dev-dependencies]
+            b = { path = "b" }
+        "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            /// ```
+            /// extern crate b;
+            ///
+            /// b::b();
+            /// ```
+            pub fn a() {}
+        "#,
+        ).file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+        .file("b/src/lib.rs", "pub fn b() {}")
+        .build();
+
+    p.cargo("test --doc -v").run();
+}
+
+#[test]
+fn test_many_targets() {
+    let p = project()
+        .file(
+            "src/bin/a.rs",
+            r#"
+            fn main() {}
+            #[test] fn bin_a() {}
+        "#,
+        ).file(
+            "src/bin/b.rs",
+            r#"
+            fn main() {}
+            #[test] fn bin_b() {}
+        "#,
+        ).file(
+            "src/bin/c.rs",
+            r#"
+            fn main() {}
+            #[test] fn bin_c() { panic!(); }
+        "#,
+        ).file(
+            "examples/a.rs",
+            r#"
+            fn main() {}
+            #[test] fn example_a() {}
+        "#,
+        ).file(
+            "examples/b.rs",
+            r#"
+            fn main() {}
+            #[test] fn example_b() {}
+        "#,
+        ).file("examples/c.rs", "#[test] fn example_c() { panic!(); }")
+        .file("tests/a.rs", "#[test] fn test_a() {}")
+        .file("tests/b.rs", "#[test] fn test_b() {}")
+        .file("tests/c.rs", "does not compile")
+        .build();
+
+    p.cargo("test --verbose --bin a --bin b --example a --example b --test a --test b")
+        .with_stdout_contains("test bin_a ... ok")
+        .with_stdout_contains("test bin_b ... ok")
+        .with_stdout_contains("test test_a ... ok")
+        .with_stdout_contains("test test_b ... ok")
+        .with_stderr_contains("[RUNNING] `rustc --crate-name a examples/a.rs [..]`")
+        .with_stderr_contains("[RUNNING] `rustc --crate-name b examples/b.rs [..]`")
+        .run();
+}
+
+#[test]
+fn doctest_and_registry() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "a"
+            version = "0.1.0"
+
+            [dependencies]
+            b = { path = "b" }
+            c = { path = "c" }
+
+            [workspace]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+        .file(
+            "b/src/lib.rs",
+            "
+            /// ```
+            /// b::foo();
+            /// ```
+            pub fn foo() {}
+        ",
+        ).file(
+            "c/Cargo.toml",
+            r#"
+            [project]
+            name = "c"
+            version = "0.1.0"
+
+            [dependencies]
+            b = "0.1"
+        "#,
+        ).file("c/src/lib.rs", "")
+        .build();
+
+    Package::new("b", "0.1.0").publish();
+
+    p.cargo("test --all -v").run();
+}
+
+#[test]
+fn cargo_test_env() {
+    let src = format!(
+        r#"
+        #![crate_type = "rlib"]
+
+        #[test]
+        fn env_test() {{
+            use std::env;
+            println!("{{}}", env::var("{}").unwrap());
+        }}
+        "#,
+        cargo::CARGO_ENV
+    );
+
+    let p = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file("src/lib.rs", &src)
+        .build();
+
+    let cargo = cargo_exe().canonicalize().unwrap();
+    p.cargo("test --lib -- --nocapture")
+        .with_stdout_contains(format!(
+            "\
+{}
+test env_test ... ok
+",
+            cargo.to_str().unwrap()
+        )).run();
+}
+
+#[test]
+fn test_order() {
+    let p = project()
+        .file("src/lib.rs", "#[test] fn test_lib() {}")
+        .file("tests/a.rs", "#[test] fn test_a() {}")
+        .file("tests/z.rs", "#[test] fn test_z() {}")
+        .build();
+
+    p.cargo("test --all")
+        .with_stdout_contains(
+            "
+running 1 test
+test test_lib ... ok
+
+test result: ok. [..]
+
+
+running 1 test
+test test_a ... ok
+
+test result: ok. [..]
+
+
+running 1 test
+test test_z ... ok
+
+test result: ok. [..]
+",
+        ).run();
+}
+
+#[test]
+fn cyclic_dev() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [dev-dependencies]
+            foo = { path = "." }
+        "#,
+        ).file("src/lib.rs", "#[test] fn test_lib() {}")
+        .file("tests/foo.rs", "extern crate foo;")
+        .build();
+
+    p.cargo("test --all").run();
+}
+
+#[test]
+fn publish_a_crate_without_tests() {
+    Package::new("testless", "0.1.0")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "testless"
+            version = "0.1.0"
+            exclude = ["tests/*"]
+
+            [[test]]
+            name = "a_test"
+        "#)
+        .file("src/lib.rs", "")
+
+        // In real life, the package will have a test,
+        // which would be excluded from .crate file by the
+        // `exclude` field. Our test harness does not honor
+        // exclude though, so let's just not add the file!
+        // .file("tests/a_test.rs", "")
+
+        .publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+
+            [dependencies]
+            testless = "0.1.0"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("test").run();
+    p.cargo("test --package testless").run();
+}
+
+#[test]
+fn find_dependency_of_proc_macro_dependency_with_target() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["root", "proc_macro_dep"]
+        "#,
+        ).file(
+            "root/Cargo.toml",
+            r#"
+            [project]
+            name = "root"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            proc_macro_dep = { path = "../proc_macro_dep" }
+        "#,
+        ).file(
+            "root/src/lib.rs",
+            r#"
+            #[macro_use]
+            extern crate proc_macro_dep;
+
+            #[derive(Noop)]
+            pub struct X;
+        "#,
+        ).file(
+            "proc_macro_dep/Cargo.toml",
+            r#"
+            [project]
+            name = "proc_macro_dep"
+            version = "0.1.0"
+            authors = []
+
+            [lib]
+            proc-macro = true
+
+            [dependencies]
+            baz = "^0.1"
+        "#,
+        ).file(
+            "proc_macro_dep/src/lib.rs",
+            r#"
+            extern crate baz;
+            extern crate proc_macro;
+            use proc_macro::TokenStream;
+
+            #[proc_macro_derive(Noop)]
+            pub fn noop(_input: TokenStream) -> TokenStream {
+                "".parse().unwrap()
+            }
+        "#,
+        ).build();
+    Package::new("bar", "0.1.0").publish();
+    Package::new("baz", "0.1.0")
+        .dep("bar", "0.1")
+        .file("src/lib.rs", "extern crate bar;")
+        .publish();
+    p.cargo("test --all --target").arg(rustc_host()).run();
+}
+
+#[test]
+fn test_hint_not_masked_by_doctest() {
+    let p = project()
+        .file(
+            "src/lib.rs",
+            r#"
+            /// ```
+            /// assert_eq!(1, 1);
+            /// ```
+            pub fn this_works() {}
+        "#,
+        ).file(
+            "tests/integ.rs",
+            r#"
+            #[test]
+            fn this_fails() {
+                panic!();
+            }
+        "#,
+        ).build();
+    p.cargo("test --no-fail-fast")
+        .with_status(101)
+        .with_stdout_contains("test this_fails ... FAILED")
+        .with_stdout_contains("[..]this_works (line [..]ok")
+        .with_stderr_contains(
+            "[ERROR] test failed, to rerun pass \
+             '--test integ'",
+        ).run();
+}
+
+#[test]
+fn test_hint_workspace() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["a", "b"]
+        "#,
+        ).file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+        .file("a/src/lib.rs", "#[test] fn t1() {}")
+        .file("b/Cargo.toml", &basic_manifest("b", "0.1.0"))
+        .file("b/src/lib.rs", "#[test] fn t1() {assert!(false)}")
+        .build();
+
+    p.cargo("test")
+        .with_stderr_contains("[ERROR] test failed, to rerun pass '-p b --lib'")
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn json_artifact_includes_test_flag() {
+    // Verify that the JSON artifact output includes `test` flag.
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [profile.test]
+            opt-level = 1
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("test -v --message-format=json")
+        .with_json(
+            r#"
+    {
+        "reason":"compiler-artifact",
+        "profile": {
+            "debug_assertions": true,
+            "debuginfo": 2,
+            "opt_level": "0",
+            "overflow_checks": true,
+            "test": false
+        },
+        "features": [],
+        "package_id":"foo 0.0.1 ([..])",
+        "target":{
+            "kind":["lib"],
+            "crate_types":["lib"],
+            "edition": "2015",
+            "name":"foo",
+            "src_path":"[..]lib.rs"
+        },
+        "filenames":["[..].rlib"],
+        "fresh": false
+    }
+
+    {
+        "reason":"compiler-artifact",
+        "profile": {
+            "debug_assertions": true,
+            "debuginfo": 2,
+            "opt_level": "1",
+            "overflow_checks": true,
+            "test": true
+        },
+        "features": [],
+        "package_id":"foo 0.0.1 ([..])",
+        "target":{
+            "kind":["lib"],
+            "crate_types":["lib"],
+            "edition": "2015",
+            "name":"foo",
+            "src_path":"[..]lib.rs"
+        },
+        "filenames":["[..]/foo-[..]"],
+        "fresh": false
+    }
+"#,
+        ).run();
+}
+
+#[test]
+fn test_build_script_links() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+                links = 'something'
+
+                [lib]
+                test = false
+            "#,
+        ).file("build.rs", "fn main() {}")
+        .file("src/lib.rs", "")
+        .build();
+
+    p.cargo("test --no-run").run();
+}
+
+#[test]
+fn doctest_skip_staticlib() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+
+                [lib]
+                crate-type = ["staticlib"]
+            "#,
+        ).file(
+            "src/lib.rs",
+            r#"
+            //! ```
+            //! assert_eq!(1,2);
+            //! ```
+            "#,
+        ).build();
+
+    p.cargo("test --doc")
+        .with_status(101)
+        .with_stderr(
+            "\
+[WARNING] doc tests are not supported for crate type(s) `staticlib` in package `foo`
+[ERROR] no library targets found in package `foo`",
+        ).run();
+
+    p.cargo("test")
+        .with_stderr(
+            "\
+[COMPILING] foo [..]
+[FINISHED] dev [..]
+[RUNNING] target/debug/deps/foo-[..]",
+        ).run();
+}
+
+#[test]
+fn can_not_mix_doc_tests_and_regular_tests() {
+    let p = project()
+        .file("src/lib.rs", "\
+/// ```
+/// assert_eq!(1, 1)
+/// ```
+pub fn foo() -> u8 { 1 }
+
+#[cfg(test)] mod tests {
+    #[test] fn it_works() { assert_eq!(2 + 2, 4); }
+}
+")
+        .build();
+
+    p.cargo("test")
+        .with_stderr("\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..]
+[DOCTEST] foo
+")
+        .with_stdout("
+running 1 test
+test tests::it_works ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out
+
+
+running 1 test
+test src/lib.rs - foo (line 1) ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out
+\n")
+        .run();
+
+    p.cargo("test --lib")
+        .with_stderr("\
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] target/debug/deps/foo-[..]\n")
+        .with_stdout("
+running 1 test
+test tests::it_works ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out
+\n")
+        .run();
+
+    p.cargo("test --doc")
+        .with_stderr("\
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[DOCTEST] foo
+")
+        .with_stdout("
+running 1 test
+test src/lib.rs - foo (line 1) ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out
+
+").run();
+
+    p.cargo("test --lib --doc")
+        .with_status(101)
+        .with_stderr("[ERROR] Can't mix --doc with other target selecting options\n")
+        .run();
+}
+
+#[test]
+fn test_all_targets_lib() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("test --all-targets")
+        .with_stderr(
+            "\
+[COMPILING] foo [..]
+[FINISHED] dev [..]
+[RUNNING] [..]foo[..]
+",
+        ).run();
+}
+
+
+#[test]
+fn test_dep_with_dev() {
+    Package::new("devdep", "0.1.0").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+
+            [dependencies]
+            bar = { path = "bar" }
+        "#,
+        )
+        .file("src/lib.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            version = "0.0.1"
+
+            [dev-dependencies]
+            devdep = "0.1"
+        "#,
+        )
+        .file("bar/src/lib.rs", "")
+        .build();
+
+    p.cargo("test -p bar")
+        .with_status(101)
+        .with_stderr(
+            "[ERROR] package `bar` cannot be tested because it requires dev-dependencies \
+             and is not a member of the workspace",
+        )
+        .run();
+}
diff --git a/tests/testsuite/tool_paths.rs b/tests/testsuite/tool_paths.rs
new file mode 100644 (file)
index 0000000..5bd2567
--- /dev/null
@@ -0,0 +1,249 @@
+use support::rustc_host;
+use support::{basic_lib_manifest, project};
+
+#[test]
+fn pathless_tools() {
+    let target = rustc_host();
+
+    let foo = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}]
+            ar = "nonexistent-ar"
+            linker = "nonexistent-linker"
+        "#,
+                target
+            ),
+        ).build();
+
+    foo.cargo("build --verbose")
+        .with_stderr(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn absolute_tools() {
+    let target = rustc_host();
+    let root = if cfg!(windows) { r#"C:\"# } else { "/" };
+
+    // Escaped as they appear within a TOML config file
+    let config = if cfg!(windows) {
+        (
+            r#"C:\\bogus\\nonexistent-ar"#,
+            r#"C:\\bogus\\nonexistent-linker"#,
+        )
+    } else {
+        (r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#)
+    };
+
+    let foo = project()
+        .file("Cargo.toml", &basic_lib_manifest("foo"))
+        .file("src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{target}]
+            ar = "{ar}"
+            linker = "{linker}"
+        "#,
+                target = target,
+                ar = config.0,
+                linker = config.1
+            ),
+        ).build();
+
+    foo.cargo("build --verbose").with_stderr(&format!(
+            "\
+[COMPILING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc [..] -C ar={root}bogus/nonexistent-ar -C linker={root}bogus/nonexistent-linker [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            root = root,
+        )).run();
+}
+
+#[test]
+fn relative_tools() {
+    let target = rustc_host();
+
+    // Escaped as they appear within a TOML config file
+    let config = if cfg!(windows) {
+        (r#".\\nonexistent-ar"#, r#".\\tools\\nonexistent-linker"#)
+    } else {
+        (r#"./nonexistent-ar"#, r#"./tools/nonexistent-linker"#)
+    };
+
+    // Funky directory structure to test that relative tool paths are made absolute
+    // by reference to the `.cargo/..` directory and not to (for example) the CWD.
+    let p = project()
+        .no_manifest()
+        .file("bar/Cargo.toml", &basic_lib_manifest("bar"))
+        .file("bar/src/lib.rs", "")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{target}]
+            ar = "{ar}"
+            linker = "{linker}"
+        "#,
+                target = target,
+                ar = config.0,
+                linker = config.1
+            ),
+        ).build();
+
+    let prefix = p.root().into_os_string().into_string().unwrap();
+
+    p.cargo("build --verbose").cwd(p.root().join("bar")).with_stderr(&format!(
+            "\
+[COMPILING] bar v0.5.0 ([CWD])
+[RUNNING] `rustc [..] -C ar={prefix}/./nonexistent-ar -C linker={prefix}/./tools/nonexistent-linker [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+            prefix = prefix,
+        )).run();
+}
+
+#[test]
+fn custom_runner() {
+    let target = rustc_host();
+
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file("tests/test.rs", "")
+        .file("benches/bench.rs", "")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.{}]
+            runner = "nonexistent-runner -r"
+        "#,
+                target
+            ),
+        ).build();
+
+    p.cargo("run -- --param")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param`
+",
+        ).run();
+
+    p.cargo("test --test test --verbose -- --param")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `nonexistent-runner -r [..]/target/debug/deps/test-[..][EXE] --param`
+",
+        ).run();
+
+    p.cargo("bench --bench bench --verbose -- --param")
+        .with_status(101)
+        .with_stderr_contains(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `rustc [..]`
+[RUNNING] `rustc [..]`
+[FINISHED] release [optimized] target(s) in [..]
+[RUNNING] `nonexistent-runner -r [..]/target/release/deps/bench-[..][EXE] --param --bench`
+",
+        ).run();
+}
+
+// can set a custom runner via `target.'cfg(..)'.runner`
+#[test]
+fn custom_runner_cfg() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [target.'cfg(not(target_os = "none"))']
+            runner = "nonexistent-runner -r"
+            "#,
+        ).build();
+
+    p.cargo("run -- --param")
+        .with_status(101)
+        .with_stderr_contains(&format!(
+            "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param`
+",
+        )).run();
+}
+
+// custom runner set via `target.$triple.runner` have precende over `target.'cfg(..)'.runner`
+#[test]
+fn custom_runner_cfg_precedence() {
+    let target = rustc_host();
+
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            &format!(
+                r#"
+            [target.'cfg(not(target_os = "none"))']
+            runner = "ignored-runner"
+
+            [target.{}]
+            runner = "nonexistent-runner -r"
+        "#,
+                target
+            ),
+        ).build();
+
+    p.cargo("run -- --param")
+        .with_status(101)
+        .with_stderr_contains(&format!(
+            "\
+            [COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param`
+",
+        )).run();
+}
+
+#[test]
+fn custom_runner_cfg_collision() {
+    let p = project()
+        .file("src/main.rs", "fn main() {}")
+        .file(
+            ".cargo/config",
+            r#"
+            [target.'cfg(not(target_arch = "avr"))']
+            runner = "true"
+
+            [target.'cfg(not(target_os = "none"))']
+            runner = "false"
+            "#,
+        ).build();
+
+    p.cargo("run -- --param")
+        .with_status(101)
+        .with_stderr_contains(&format!(
+            "\
+[ERROR] several matching instances of `target.'cfg(..)'.runner` in `.cargo/config`
+",
+        )).run();
+}
diff --git a/tests/testsuite/update.rs b/tests/testsuite/update.rs
new file mode 100644 (file)
index 0000000..be527b2
--- /dev/null
@@ -0,0 +1,397 @@
+use std::fs::File;
+use std::io::prelude::*;
+
+use support::registry::Package;
+use support::{basic_manifest, project};
+
+#[test]
+fn minor_update_two_places() {
+    Package::new("log", "0.1.0").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                log = "0.1"
+                foo = { path = "foo" }
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "foo/Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                log = "0.1"
+            "#,
+        ).file("foo/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    Package::new("log", "0.1.1").publish();
+
+    File::create(p.root().join("foo/Cargo.toml"))
+        .unwrap()
+        .write_all(
+            br#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                log = "0.1.1"
+            "#,
+        ).unwrap();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn transitive_minor_update() {
+    Package::new("log", "0.1.0").publish();
+    Package::new("serde", "0.1.0").dep("log", "0.1").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                serde = "0.1"
+                log = "0.1"
+                foo = { path = "foo" }
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "foo/Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                serde = "0.1"
+            "#,
+        ).file("foo/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    Package::new("log", "0.1.1").publish();
+    Package::new("serde", "0.1.1").dep("log", "0.1.1").publish();
+
+    // Note that `serde` isn't actually updated here! The default behavior for
+    // `update` right now is to as conservatively as possible attempt to satisfy
+    // an update. In this case we previously locked the dependency graph to `log
+    // 0.1.0`, but nothing on the command line says we're allowed to update
+    // that. As a result the update of `serde` here shouldn't update to `serde
+    // 0.1.1` as that would also force an update to `log 0.1.1`.
+    //
+    // Also note that this is probably counterintuitive and weird. We may wish
+    // to change this one day.
+    p.cargo("update -p serde")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+",
+        ).run();
+}
+
+#[test]
+fn conservative() {
+    Package::new("log", "0.1.0").publish();
+    Package::new("serde", "0.1.0").dep("log", "0.1").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                serde = "0.1"
+                log = "0.1"
+                foo = { path = "foo" }
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "foo/Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                serde = "0.1"
+            "#,
+        ).file("foo/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    Package::new("log", "0.1.1").publish();
+    Package::new("serde", "0.1.1").dep("log", "0.1").publish();
+
+    p.cargo("update -p serde")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[UPDATING] serde v0.1.0 -> v0.1.1
+",
+        ).run();
+}
+
+#[test]
+fn update_via_new_dep() {
+    Package::new("log", "0.1.0").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                log = "0.1"
+                # foo = { path = "foo" }
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "foo/Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                log = "0.1.1"
+            "#,
+        ).file("foo/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    Package::new("log", "0.1.1").publish();
+
+    p.uncomment_root_manifest();
+    p.cargo("build").env("RUST_LOG", "cargo=trace").run();
+}
+
+#[test]
+fn update_via_new_member() {
+    Package::new("log", "0.1.0").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.0.1"
+                authors = []
+
+                [workspace]
+                # members = [ "foo" ]
+
+                [dependencies]
+                log = "0.1"
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "foo/Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                log = "0.1.1"
+            "#,
+        ).file("foo/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+    Package::new("log", "0.1.1").publish();
+
+    p.uncomment_root_manifest();
+    p.cargo("build").run();
+}
+
+#[test]
+fn add_dep_deep_new_requirement() {
+    Package::new("log", "0.1.0").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                log = "0.1"
+                # bar = "0.1"
+            "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    Package::new("log", "0.1.1").publish();
+    Package::new("bar", "0.1.0").dep("log", "0.1.1").publish();
+
+    p.uncomment_root_manifest();
+    p.cargo("build").run();
+}
+
+#[test]
+fn everything_real_deep() {
+    Package::new("log", "0.1.0").publish();
+    Package::new("foo", "0.1.0").dep("log", "0.1").publish();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                foo = "0.1"
+                # bar = "0.1"
+            "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    Package::new("log", "0.1.1").publish();
+    Package::new("bar", "0.1.0").dep("log", "0.1.1").publish();
+
+    p.uncomment_root_manifest();
+    p.cargo("build").run();
+}
+
+#[test]
+fn change_package_version() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "a-foo"
+                version = "0.2.0-alpha"
+                authors = []
+
+                [dependencies]
+                bar = { path = "bar", version = "0.2.0-alpha" }
+            "#,
+        ).file("src/lib.rs", "")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0-alpha"))
+        .file("bar/src/lib.rs", "")
+        .file(
+            "Cargo.lock",
+            r#"
+                [[package]]
+                name = "foo"
+                version = "0.2.0"
+                dependencies = ["bar 0.2.0"]
+
+                [[package]]
+                name = "bar"
+                version = "0.2.0"
+            "#,
+        ).build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn update_precise() {
+    Package::new("log", "0.1.0").publish();
+    Package::new("serde", "0.1.0").publish();
+    Package::new("serde", "0.2.1").publish();
+
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+                [package]
+                name = "bar"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                serde = "0.2"
+                foo = { path = "foo" }
+            "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "foo/Cargo.toml",
+            r#"
+                [package]
+                name = "foo"
+                version = "0.0.1"
+                authors = []
+
+                [dependencies]
+                serde = "0.1"
+            "#,
+        ).file("foo/src/lib.rs", "")
+        .build();
+
+    p.cargo("build").run();
+
+    Package::new("serde", "0.2.0").publish();
+
+    p.cargo("update -p serde:0.2.1 --precise 0.2.0")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[UPDATING] serde v0.2.1 -> v0.2.0
+",
+        ).run();
+}
+
+#[test]
+fn preserve_top_comment() {
+    let p = project().file("src/lib.rs", "").build();
+
+    p.cargo("update").run();
+
+    let mut lockfile = p.read_file("Cargo.lock");
+    lockfile.insert_str(0, "# @generated\n");
+    lockfile.insert_str(0, "# some other comment\n");
+    println!("saving Cargo.lock contents:\n{}", lockfile);
+
+    p.change_file("Cargo.lock", &lockfile);
+
+    p.cargo("update").run();
+
+    let lockfile2 = p.read_file("Cargo.lock");
+    println!("loaded Cargo.lock contents:\n{}", lockfile2);
+
+    assert!(lockfile == lockfile2);
+}
diff --git a/tests/testsuite/verify_project.rs b/tests/testsuite/verify_project.rs
new file mode 100644 (file)
index 0000000..d368b7c
--- /dev/null
@@ -0,0 +1,44 @@
+use support::{basic_bin_manifest, main_file, project};
+
+fn verify_project_success_output() -> String {
+    r#"{"success":"true"}"#.into()
+}
+
+#[test]
+fn cargo_verify_project_path_to_cargo_toml_relative() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("verify-project --manifest-path foo/Cargo.toml")
+        .cwd(p.root().parent().unwrap())
+        .with_stdout(verify_project_success_output())
+        .run();
+}
+
+#[test]
+fn cargo_verify_project_path_to_cargo_toml_absolute() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("verify-project --manifest-path")
+        .arg(p.root().join("Cargo.toml"))
+        .cwd(p.root().parent().unwrap())
+        .with_stdout(verify_project_success_output())
+        .run();
+}
+
+#[test]
+fn cargo_verify_project_cwd() {
+    let p = project()
+        .file("Cargo.toml", &basic_bin_manifest("foo"))
+        .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+        .build();
+
+    p.cargo("verify-project")
+        .with_stdout(verify_project_success_output())
+        .run();
+}
diff --git a/tests/testsuite/version.rs b/tests/testsuite/version.rs
new file mode 100644 (file)
index 0000000..cd8cded
--- /dev/null
@@ -0,0 +1,41 @@
+use cargo;
+use support::project;
+
+#[test]
+fn simple() {
+    let p = project().build();
+
+    p.cargo("version")
+        .with_stdout(&format!("{}\n", cargo::version()))
+        .run();
+
+    p.cargo("--version")
+        .with_stdout(&format!("{}\n", cargo::version()))
+        .run();
+}
+
+#[test]
+#[cfg_attr(target_os = "windows", ignore)]
+fn version_works_without_rustc() {
+    let p = project().build();
+    p.cargo("version").env("PATH", "").run();
+}
+
+#[test]
+fn version_works_with_bad_config() {
+    let p = project().file(".cargo/config", "this is not toml").build();
+    p.cargo("version").run();
+}
+
+#[test]
+fn version_works_with_bad_target_dir() {
+    let p = project()
+        .file(
+            ".cargo/config",
+            r#"
+            [build]
+            target-dir = 4
+        "#,
+        ).build();
+    p.cargo("version").run();
+}
diff --git a/tests/testsuite/warn_on_failure.rs b/tests/testsuite/warn_on_failure.rs
new file mode 100644 (file)
index 0000000..f4f8aba
--- /dev/null
@@ -0,0 +1,105 @@
+use support::registry::Package;
+use support::{project, Project};
+
+static WARNING1: &'static str = "Hello! I'm a warning. :)";
+static WARNING2: &'static str = "And one more!";
+
+fn make_lib(lib_src: &str) {
+    Package::new("bar", "0.0.1")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "bar"
+            authors = []
+            version = "0.0.1"
+            build = "build.rs"
+        "#,
+        ).file(
+            "build.rs",
+            &format!(
+                r#"
+            fn main() {{
+                use std::io::Write;
+                println!("cargo:warning={{}}", "{}");
+                println!("hidden stdout");
+                write!(&mut ::std::io::stderr(), "hidden stderr");
+                println!("cargo:warning={{}}", "{}");
+            }}
+        "#,
+                WARNING1, WARNING2
+            ),
+        ).file("src/lib.rs", &format!("fn f() {{ {} }}", lib_src))
+        .publish();
+}
+
+fn make_upstream(main_src: &str) -> Project {
+    project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("src/main.rs", &format!("fn main() {{ {} }}", main_src))
+        .build()
+}
+
+#[test]
+fn no_warning_on_success() {
+    make_lib("");
+    let upstream = make_upstream("");
+    upstream
+        .cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 ([..])
+[COMPILING] bar v0.0.1
+[COMPILING] foo v0.0.1 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn no_warning_on_bin_failure() {
+    make_lib("");
+    let upstream = make_upstream("hi()");
+    upstream
+        .cargo("build")
+        .with_status(101)
+        .with_stdout_does_not_contain("hidden stdout")
+        .with_stderr_does_not_contain("hidden stderr")
+        .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING1))
+        .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING2))
+        .with_stderr_contains("[UPDATING] `[..]` index")
+        .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])")
+        .with_stderr_contains("[COMPILING] bar v0.0.1")
+        .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])")
+        .run();
+}
+
+#[test]
+fn warning_on_lib_failure() {
+    make_lib("err()");
+    let upstream = make_upstream("");
+    upstream
+        .cargo("build")
+        .with_status(101)
+        .with_stdout_does_not_contain("hidden stdout")
+        .with_stderr_does_not_contain("hidden stderr")
+        .with_stderr_does_not_contain("[COMPILING] foo v0.0.1 ([..])")
+        .with_stderr_contains("[UPDATING] `[..]` index")
+        .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])")
+        .with_stderr_contains("[COMPILING] bar v0.0.1")
+        .with_stderr_contains(&format!("[WARNING] {}", WARNING1))
+        .with_stderr_contains(&format!("[WARNING] {}", WARNING2))
+        .run();
+}
diff --git a/tests/testsuite/workspaces.rs b/tests/testsuite/workspaces.rs
new file mode 100644 (file)
index 0000000..e933848
--- /dev/null
@@ -0,0 +1,1881 @@
+use std::env;
+use std::fs::{self, File};
+use std::io::{Read, Write};
+
+use support::registry::Package;
+use support::sleep_ms;
+use support::{basic_lib_manifest, basic_manifest, git, project};
+
+#[test]
+fn simple_explicit() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = ".."
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+    assert!(!p.bin("bar").is_file());
+
+    p.cargo("build").cwd(p.root().join("bar")).run();
+    assert!(p.bin("foo").is_file());
+    assert!(p.bin("bar").is_file());
+
+    assert!(p.root().join("Cargo.lock").is_file());
+    assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[test]
+fn simple_explicit_default_members() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+            default-members = ["bar"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = ".."
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build").run();
+    assert!(p.bin("bar").is_file());
+    assert!(!p.bin("foo").is_file());
+}
+
+#[test]
+fn inferred_root() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+    assert!(!p.bin("bar").is_file());
+
+    p.cargo("build").cwd(p.root().join("bar")).run();
+    assert!(p.bin("foo").is_file());
+    assert!(p.bin("bar").is_file());
+
+    assert!(p.root().join("Cargo.lock").is_file());
+    assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[test]
+fn inferred_path_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}")
+        .file("bar/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+    assert!(!p.bin("bar").is_file());
+
+    p.cargo("build").cwd(p.root().join("bar")).run();
+    assert!(p.bin("foo").is_file());
+    assert!(p.bin("bar").is_file());
+
+    assert!(p.root().join("Cargo.lock").is_file());
+    assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[test]
+fn transitive_path_dep() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            baz = { path = "../baz" }
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}")
+        .file("bar/src/lib.rs", "")
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("baz/src/main.rs", "fn main() {}")
+        .file("baz/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+    assert!(!p.bin("bar").is_file());
+    assert!(!p.bin("baz").is_file());
+
+    p.cargo("build").cwd(p.root().join("bar")).run();
+    assert!(p.bin("foo").is_file());
+    assert!(p.bin("bar").is_file());
+    assert!(!p.bin("baz").is_file());
+
+    p.cargo("build").cwd(p.root().join("baz")).run();
+    assert!(p.bin("foo").is_file());
+    assert!(p.bin("bar").is_file());
+    assert!(p.bin("baz").is_file());
+
+    assert!(p.root().join("Cargo.lock").is_file());
+    assert!(!p.root().join("bar/Cargo.lock").is_file());
+    assert!(!p.root().join("baz/Cargo.lock").is_file());
+}
+
+#[test]
+fn parent_pointer_works() {
+    let p = project()
+        .file(
+            "foo/Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "../bar" }
+
+            [workspace]
+        "#,
+        ).file("foo/src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = "../foo"
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}")
+        .file("bar/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build").cwd(p.root().join("foo")).run();
+    p.cargo("build").cwd(p.root().join("bar")).run();
+    assert!(p.root().join("foo/Cargo.lock").is_file());
+    assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[test]
+fn same_names_in_workspace() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            workspace = ".."
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: two packages named `foo` in this workspace:
+- [..]Cargo.toml
+- [..]Cargo.toml
+",
+        ).run();
+}
+
+#[test]
+fn parent_doesnt_point_to_child() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build")
+        .cwd(p.root().join("bar"))
+        .with_status(101)
+        .with_stderr(
+            "\
+error: current package believes it's in a workspace when it's not:
+current: [..]Cargo.toml
+workspace: [..]Cargo.toml
+
+this may be fixable [..]
+",
+        ).run();
+}
+
+#[test]
+fn invalid_parent_pointer() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            workspace = "foo"
+        "#,
+        ).file("src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to read `[..]Cargo.toml`
+
+Caused by:
+  [..]
+",
+        ).run();
+}
+
+#[test]
+fn invalid_members() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["foo"]
+        "#,
+        ).file("src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to read `[..]Cargo.toml`
+
+Caused by:
+  [..]
+",
+        ).run();
+}
+
+#[test]
+fn bare_workspace_ok() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+        "#,
+        ).file("src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn two_roots() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = [".."]
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: multiple workspace roots found in the same workspace:
+  [..]
+  [..]
+",
+        ).run();
+}
+
+#[test]
+fn workspace_isnt_root() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            workspace = "bar"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr("error: root of a workspace inferred but wasn't a root: [..]")
+        .run();
+}
+
+#[test]
+fn dangling_member() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = "../baz"
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}")
+        .file(
+            "baz/Cargo.toml",
+            r#"
+            [project]
+            name = "baz"
+            version = "0.1.0"
+            authors = []
+            workspace = "../baz"
+        "#,
+        ).file("baz/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: package `[..]` is a member of the wrong workspace
+expected: [..]
+actual: [..]
+",
+        ).run();
+}
+
+#[test]
+fn cycle() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            workspace = "bar"
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = ".."
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build").with_status(101).run();
+}
+
+#[test]
+fn share_dependencies() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            dep1 = "0.1"
+
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            dep1 = "< 0.1.5"
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    Package::new("dep1", "0.1.3").publish();
+    Package::new("dep1", "0.1.8").publish();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep1 v0.1.3 ([..])
+[COMPILING] dep1 v0.1.3
+[COMPILING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn fetch_fetches_all() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            dep1 = "*"
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    Package::new("dep1", "0.1.3").publish();
+
+    p.cargo("fetch")
+        .with_stderr(
+            "\
+[UPDATING] `[..]` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep1 v0.1.3 ([..])
+",
+        ).run();
+}
+
+#[test]
+fn lock_works_for_everyone() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            dep2 = "0.1"
+
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            dep1 = "0.1"
+        "#,
+        ).file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    Package::new("dep1", "0.1.0").publish();
+    Package::new("dep2", "0.1.0").publish();
+
+    p.cargo("generate-lockfile")
+        .with_stderr("[UPDATING] `[..]` index")
+        .run();
+
+    Package::new("dep1", "0.1.1").publish();
+    Package::new("dep2", "0.1.1").publish();
+
+    p.cargo("build")
+        .with_stderr(
+            "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep2 v0.1.0 ([..])
+[COMPILING] dep2 v0.1.0
+[COMPILING] foo v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    p.cargo("build")
+        .cwd(p.root().join("bar"))
+        .with_stderr(
+            "\
+[DOWNLOADING] crates ...
+[DOWNLOADED] dep1 v0.1.0 ([..])
+[COMPILING] dep1 v0.1.0
+[COMPILING] bar v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+}
+
+#[test]
+fn virtual_works() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+    p.cargo("build").cwd(p.root().join("bar")).run();
+    assert!(p.root().join("Cargo.lock").is_file());
+    assert!(p.bin("bar").is_file());
+    assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[test]
+fn explicit_package_argument_works_with_virtual_manifest() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+    p.cargo("build --package bar").run();
+    assert!(p.root().join("Cargo.lock").is_file());
+    assert!(p.bin("bar").is_file());
+    assert!(!p.root().join("bar/Cargo.lock").is_file());
+}
+
+#[test]
+fn virtual_misconfigure() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+    p.cargo("build")
+        .cwd(p.root().join("bar"))
+        .with_status(101)
+        .with_stderr(
+            "\
+error: current package believes it's in a workspace when it's not:
+current:   [CWD]/Cargo.toml
+workspace: [..]Cargo.toml
+
+this may be fixable by adding `bar` to the `workspace.members` array of the \
+manifest located at: [..]
+",
+        ).run();
+}
+
+#[test]
+fn virtual_build_all_implied() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+    p.cargo("build").run();
+}
+
+#[test]
+fn virtual_default_members() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar", "baz"]
+            default-members = ["bar"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}")
+        .file("baz/src/main.rs", "fn main() {}");
+    let p = p.build();
+    p.cargo("build").run();
+    assert!(p.bin("bar").is_file());
+    assert!(!p.bin("baz").is_file());
+}
+
+#[test]
+fn virtual_default_member_is_not_a_member() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["bar"]
+            default-members = ["something-else"]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: package `[..]something-else` is listed in workspace’s default-members \
+but is not a member.
+",
+        ).run();
+}
+
+#[test]
+fn virtual_build_no_members() {
+    let p = project().file(
+        "Cargo.toml",
+        r#"
+            [workspace]
+        "#,
+    );
+    let p = p.build();
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: manifest path `[..]` contains no package: The manifest is virtual, \
+and the workspace has no members.
+",
+        ).run();
+}
+
+#[test]
+fn include_virtual() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            [workspace]
+            members = ["bar"]
+        "#,
+        ).file("src/main.rs", "")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [workspace]
+        "#,
+        );
+    let p = p.build();
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: multiple workspace roots found in the same workspace:
+  [..]
+  [..]
+",
+        ).run();
+}
+
+#[test]
+fn members_include_path_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["p1"]
+
+            [dependencies]
+            p3 = { path = "p3" }
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "p1/Cargo.toml",
+            r#"
+            [project]
+            name = "p1"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            p2 = { path = "../p2" }
+        "#,
+        ).file("p1/src/lib.rs", "")
+        .file("p2/Cargo.toml", &basic_manifest("p2", "0.1.0"))
+        .file("p2/src/lib.rs", "")
+        .file("p3/Cargo.toml", &basic_manifest("p3", "0.1.0"))
+        .file("p3/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build").cwd(p.root().join("p1")).run();
+    p.cargo("build").cwd(p.root().join("p2")).run();
+    p.cargo("build").cwd(p.root().join("p3")).run();
+    p.cargo("build").run();
+
+    assert!(p.root().join("target").is_dir());
+    assert!(!p.root().join("p1/target").is_dir());
+    assert!(!p.root().join("p2/target").is_dir());
+    assert!(!p.root().join("p3/target").is_dir());
+}
+
+#[test]
+fn new_warns_you_this_will_not_work() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+        "#,
+        ).file("src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("new --lib bar")
+        .env("USER", "foo")
+        .with_stderr(
+            "\
+warning: compiling this new crate may not work due to invalid workspace \
+configuration
+
+current package believes it's in a workspace when it's not:
+current: [..]
+workspace: [..]
+
+this may be fixable by ensuring that this crate is depended on by the workspace \
+root: [..]
+[CREATED] library `bar` package
+",
+        ).run();
+}
+
+#[test]
+fn lock_doesnt_change_depending_on_crate() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ['baz']
+
+            [dependencies]
+            foo = "*"
+        "#,
+        ).file("src/lib.rs", "")
+        .file(
+            "baz/Cargo.toml",
+            r#"
+            [project]
+            name = "baz"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = "*"
+        "#,
+        ).file("baz/src/lib.rs", "");
+    let p = p.build();
+
+    Package::new("foo", "1.0.0").publish();
+    Package::new("bar", "1.0.0").publish();
+
+    p.cargo("build").run();
+
+    let mut lockfile = String::new();
+    t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lockfile));
+
+    p.cargo("build").cwd(p.root().join("baz")).run();
+
+    let mut lockfile2 = String::new();
+    t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lockfile2));
+
+    assert_eq!(lockfile, lockfile2);
+}
+
+#[test]
+fn rebuild_please() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ['lib', 'bin']
+        "#,
+        ).file("lib/Cargo.toml", &basic_manifest("lib", "0.1.0"))
+        .file(
+            "lib/src/lib.rs",
+            r#"
+            pub fn foo() -> u32 { 0 }
+        "#,
+        ).file(
+            "bin/Cargo.toml",
+            r#"
+            [package]
+            name = "bin"
+            version = "0.1.0"
+
+            [dependencies]
+            lib = { path = "../lib" }
+        "#,
+        ).file(
+            "bin/src/main.rs",
+            r#"
+            extern crate lib;
+
+            fn main() {
+                assert_eq!(lib::foo(), 0);
+            }
+        "#,
+        );
+    let p = p.build();
+
+    p.cargo("run").cwd(p.root().join("bin")).run();
+
+    sleep_ms(1000);
+
+    t!(t!(File::create(p.root().join("lib/src/lib.rs")))
+        .write_all(br#"pub fn foo() -> u32 { 1 }"#));
+
+    p.cargo("build").cwd(p.root().join("lib")).run();
+
+    p.cargo("run")
+        .cwd(p.root().join("bin"))
+        .with_status(101)
+        .run();
+}
+
+#[test]
+fn workspace_in_git() {
+    let git_project = git::new("dep1", |project| {
+        project
+            .file(
+                "Cargo.toml",
+                r#"
+                [workspace]
+                members = ["foo"]
+            "#,
+            ).file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+            .file("foo/src/lib.rs", "")
+    }).unwrap();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "lib"
+            version = "0.1.0"
+
+            [dependencies.foo]
+            git = '{}'
+        "#,
+                git_project.url()
+            ),
+        ).file(
+            "src/lib.rs",
+            r#"
+            pub fn foo() -> u32 { 0 }
+        "#,
+        );
+    let p = p.build();
+
+    p.cargo("build").run();
+}
+
+#[test]
+fn lockfile_can_specify_nonexistant_members() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["a"]
+        "#,
+        ).file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+        .file("a/src/main.rs", "fn main() {}")
+        .file(
+            "Cargo.lock",
+            r#"
+            [[package]]
+            name = "a"
+            version = "0.1.0"
+
+            [[package]]
+            name = "b"
+            version = "0.1.0"
+        "#,
+        );
+
+    let p = p.build();
+
+    p.cargo("build").cwd(p.root().join("a")).run();
+}
+
+#[test]
+fn you_cannot_generate_lockfile_for_empty_workspaces() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+        "#,
+        ).file("bar/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("update")
+        .with_status(101)
+        .with_stderr("error: you can't generate a lockfile for an empty workspace.")
+        .run();
+}
+
+#[test]
+fn workspace_with_transitive_dev_deps() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.5.0"
+            authors = ["mbrubeck@example.com"]
+
+            [dependencies.bar]
+            path = "bar"
+
+            [workspace]
+        "#,
+        ).file("src/main.rs", r#"fn main() {}"#)
+        .file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            name = "bar"
+            version = "0.5.0"
+            authors = ["mbrubeck@example.com"]
+
+            [dev-dependencies.baz]
+            path = "../baz"
+        "#,
+        ).file(
+            "bar/src/lib.rs",
+            r#"
+            pub fn init() {}
+
+            #[cfg(test)]
+
+            #[test]
+            fn test() {
+                extern crate baz;
+                baz::do_stuff();
+            }
+        "#,
+        ).file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0"))
+        .file("baz/src/lib.rs", r#"pub fn do_stuff() {}"#);
+    let p = p.build();
+
+    p.cargo("test -p bar").run();
+}
+
+#[test]
+fn error_if_parent_cargo_toml_is_invalid() {
+    let p = project()
+        .file("Cargo.toml", "Totally not a TOML file")
+        .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build")
+        .cwd(p.root().join("bar"))
+        .with_status(101)
+        .with_stderr_contains("[ERROR] failed to parse manifest at `[..]`")
+        .run();
+}
+
+#[test]
+fn relative_path_for_member_works() {
+    let p = project()
+        .file(
+            "foo/Cargo.toml",
+            r#"
+        [project]
+        name = "foo"
+        version = "0.1.0"
+        authors = []
+
+        [workspace]
+        members = ["../bar"]
+    "#,
+        ).file("foo/src/main.rs", "fn main() {}")
+        .file(
+            "bar/Cargo.toml",
+            r#"
+        [project]
+        name = "bar"
+        version = "0.1.0"
+        authors = []
+        workspace = "../foo"
+    "#,
+        ).file("bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build").cwd(p.root().join("foo")).run();
+    p.cargo("build").cwd(p.root().join("bar")).run();
+}
+
+#[test]
+fn relative_path_for_root_works() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+        [project]
+        name = "foo"
+        version = "0.1.0"
+        authors = []
+
+        [workspace]
+
+        [dependencies]
+        subproj = { path = "./subproj" }
+    "#,
+        ).file("src/main.rs", "fn main() {}")
+        .file("subproj/Cargo.toml", &basic_manifest("subproj", "0.1.0"))
+        .file("subproj/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build --manifest-path ./Cargo.toml")
+        .run();
+
+    p.cargo("build --manifest-path ../Cargo.toml")
+        .cwd(p.root().join("subproj"))
+        .run();
+}
+
+#[test]
+fn path_dep_outside_workspace_is_not_member() {
+    let p = project()
+        .no_manifest()
+        .file(
+            "ws/Cargo.toml",
+            r#"
+            [project]
+            name = "ws"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foo = { path = "../foo" }
+
+            [workspace]
+        "#,
+        ).file("ws/src/lib.rs", r"extern crate foo;")
+        .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("foo/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build").cwd(p.root().join("ws")).run();
+}
+
+#[test]
+fn test_in_and_out_of_workspace() {
+    let p = project()
+        .no_manifest()
+        .file(
+            "ws/Cargo.toml",
+            r#"
+            [project]
+            name = "ws"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foo = { path = "../foo" }
+
+            [workspace]
+            members = [ "../bar" ]
+        "#,
+        ).file(
+            "ws/src/lib.rs",
+            r"extern crate foo; pub fn f() { foo::f() }",
+        ).file(
+            "foo/Cargo.toml",
+            r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "../bar" }
+        "#,
+        ).file(
+            "foo/src/lib.rs",
+            "extern crate bar; pub fn f() { bar::f() }",
+        ).file(
+            "bar/Cargo.toml",
+            r#"
+            [project]
+            workspace = "../ws"
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+        "#,
+        ).file("bar/src/lib.rs", "pub fn f() { }");
+    let p = p.build();
+
+    p.cargo("build").cwd(p.root().join("ws")).run();
+
+    assert!(p.root().join("ws/Cargo.lock").is_file());
+    assert!(p.root().join("ws/target").is_dir());
+    assert!(!p.root().join("foo/Cargo.lock").is_file());
+    assert!(!p.root().join("foo/target").is_dir());
+    assert!(!p.root().join("bar/Cargo.lock").is_file());
+    assert!(!p.root().join("bar/target").is_dir());
+
+    p.cargo("build").cwd(p.root().join("foo")).run();
+    assert!(p.root().join("foo/Cargo.lock").is_file());
+    assert!(p.root().join("foo/target").is_dir());
+    assert!(!p.root().join("bar/Cargo.lock").is_file());
+    assert!(!p.root().join("bar/target").is_dir());
+}
+
+#[test]
+fn test_path_dependency_under_member() {
+    let p = project()
+        .file(
+            "ws/Cargo.toml",
+            r#"
+            [project]
+            name = "ws"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            foo = { path = "../foo" }
+
+            [workspace]
+        "#,
+        ).file(
+            "ws/src/lib.rs",
+            r"extern crate foo; pub fn f() { foo::f() }",
+        ).file(
+            "foo/Cargo.toml",
+            r#"
+            [project]
+            workspace = "../ws"
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "./bar" }
+        "#,
+        ).file(
+            "foo/src/lib.rs",
+            "extern crate bar; pub fn f() { bar::f() }",
+        ).file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("foo/bar/src/lib.rs", "pub fn f() { }");
+    let p = p.build();
+
+    p.cargo("build").cwd(p.root().join("ws")).run();
+
+    assert!(!p.root().join("foo/bar/Cargo.lock").is_file());
+    assert!(!p.root().join("foo/bar/target").is_dir());
+
+    p.cargo("build").cwd(p.root().join("foo/bar")).run();
+
+    assert!(!p.root().join("foo/bar/Cargo.lock").is_file());
+    assert!(!p.root().join("foo/bar/target").is_dir());
+}
+
+#[test]
+fn excluded_simple() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "ws"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            exclude = ["foo"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("foo/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build").run();
+    assert!(p.root().join("target").is_dir());
+    p.cargo("build").cwd(p.root().join("foo")).run();
+    assert!(p.root().join("foo/target").is_dir());
+}
+
+#[test]
+fn exclude_members_preferred() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "ws"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["foo/bar"]
+            exclude = ["foo"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("foo/src/lib.rs", "")
+        .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("foo/bar/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build").run();
+    assert!(p.root().join("target").is_dir());
+    p.cargo("build").cwd(p.root().join("foo")).run();
+    assert!(p.root().join("foo/target").is_dir());
+    p.cargo("build").cwd(p.root().join("foo/bar")).run();
+    assert!(!p.root().join("foo/bar/target").is_dir());
+}
+
+#[test]
+fn exclude_but_also_depend() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [project]
+            name = "ws"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "foo/bar" }
+
+            [workspace]
+            exclude = ["foo"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("foo/src/lib.rs", "")
+        .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("foo/bar/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build").run();
+    assert!(p.root().join("target").is_dir());
+    p.cargo("build").cwd(p.root().join("foo")).run();
+    assert!(p.root().join("foo/target").is_dir());
+    p.cargo("build").cwd(p.root().join("foo/bar")).run();
+    assert!(p.root().join("foo/bar/target").is_dir());
+}
+
+#[test]
+fn glob_syntax() {
+    let p = project()
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["crates/*"]
+            exclude = ["crates/qux"]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("crates/bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = "../.."
+        "#)
+        .file("crates/bar/src/main.rs", "fn main() {}")
+        .file("crates/baz/Cargo.toml", r#"
+            [project]
+            name = "baz"
+            version = "0.1.0"
+            authors = []
+            workspace = "../.."
+        "#)
+        .file("crates/baz/src/main.rs", "fn main() {}")
+        .file("crates/qux/Cargo.toml", r#"
+            [project]
+            name = "qux"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("crates/qux/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+    assert!(!p.bin("bar").is_file());
+    assert!(!p.bin("baz").is_file());
+
+    p.cargo("build").cwd(p.root().join("crates/bar")).run();
+    assert!(p.bin("foo").is_file());
+    assert!(p.bin("bar").is_file());
+
+    p.cargo("build").cwd(p.root().join("crates/baz")).run();
+    assert!(p.bin("foo").is_file());
+    assert!(p.bin("baz").is_file());
+
+    p.cargo("build").cwd(p.root().join("crates/qux")).run();
+    assert!(!p.bin("qux").is_file());
+
+    assert!(p.root().join("Cargo.lock").is_file());
+    assert!(!p.root().join("crates/bar/Cargo.lock").is_file());
+    assert!(!p.root().join("crates/baz/Cargo.lock").is_file());
+    assert!(p.root().join("crates/qux/Cargo.lock").is_file());
+}
+
+/*FIXME: This fails because of how workspace.exclude and workspace.members are working.
+#[test]
+fn glob_syntax_2() {
+    let p = project()
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["crates/b*"]
+            exclude = ["crates/q*"]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("crates/bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = "../.."
+        "#)
+        .file("crates/bar/src/main.rs", "fn main() {}")
+        .file("crates/baz/Cargo.toml", r#"
+            [project]
+            name = "baz"
+            version = "0.1.0"
+            authors = []
+            workspace = "../.."
+        "#)
+        .file("crates/baz/src/main.rs", "fn main() {}")
+        .file("crates/qux/Cargo.toml", r#"
+            [project]
+            name = "qux"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("crates/qux/src/main.rs", "fn main() {}");
+    p.build();
+
+    p.cargo("build").run();
+    assert!(p.bin("foo").is_file());
+    assert!(!p.bin("bar").is_file());
+    assert!(!p.bin("baz").is_file());
+
+    p.cargo("build").cwd(p.root().join("crates/bar")).run();
+    assert!(p.bin("foo").is_file());
+    assert!(p.bin("bar").is_file());
+
+    p.cargo("build").cwd(p.root().join("crates/baz")).run();
+    assert!(p.bin("foo").is_file());
+    assert!(p.bin("baz").is_file());
+
+    p.cargo("build").cwd(p.root().join("crates/qux")).run();
+    assert!(!p.bin("qux").is_file());
+
+    assert!(p.root().join("Cargo.lock").is_file());
+    assert!(!p.root().join("crates/bar/Cargo.lock").is_file());
+    assert!(!p.root().join("crates/baz/Cargo.lock").is_file());
+    assert!(p.root().join("crates/qux/Cargo.lock").is_file());
+}
+*/
+
+#[test]
+fn glob_syntax_invalid_members() {
+    let p = project()
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["crates/*"]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("crates/bar/src/main.rs", "fn main() {}");
+    let p = p.build();
+
+    p.cargo("build")
+        .with_status(101)
+        .with_stderr(
+            "\
+error: failed to read `[..]Cargo.toml`
+
+Caused by:
+  [..]
+",
+        ).run();
+}
+
+/// This is a freshness test for feature use with workspaces
+///
+/// feat_lib is used by caller1 and caller2, but with different features enabled.
+/// This test ensures that alternating building caller1, caller2 doesn't force
+/// recompile of feat_lib.
+///
+/// Ideally once we solve https://github.com/rust-lang/cargo/issues/3620, then
+/// a single cargo build at the top level will be enough.
+#[test]
+fn dep_used_with_separate_features() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["feat_lib", "caller1", "caller2"]
+        "#,
+        ).file(
+            "feat_lib/Cargo.toml",
+            r#"
+            [project]
+            name = "feat_lib"
+            version = "0.1.0"
+            authors = []
+
+            [features]
+            myfeature = []
+        "#,
+        ).file("feat_lib/src/lib.rs", "")
+        .file(
+            "caller1/Cargo.toml",
+            r#"
+            [project]
+            name = "caller1"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            feat_lib = { path = "../feat_lib" }
+        "#,
+        ).file("caller1/src/main.rs", "fn main() {}")
+        .file("caller1/src/lib.rs", "")
+        .file(
+            "caller2/Cargo.toml",
+            r#"
+            [project]
+            name = "caller2"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            feat_lib = { path = "../feat_lib", features = ["myfeature"] }
+            caller1 = { path = "../caller1" }
+        "#,
+        ).file("caller2/src/main.rs", "fn main() {}")
+        .file("caller2/src/lib.rs", "");
+    let p = p.build();
+
+    // Build the entire workspace
+    p.cargo("build --all")
+        .with_stderr(
+            "\
+[..]Compiling feat_lib v0.1.0 ([..])
+[..]Compiling caller1 v0.1.0 ([..])
+[..]Compiling caller2 v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+    assert!(p.bin("caller1").is_file());
+    assert!(p.bin("caller2").is_file());
+
+    // Build caller1. should build the dep library. Because the features
+    // are different than the full workspace, it rebuilds.
+    // Ideally once we solve https://github.com/rust-lang/cargo/issues/3620, then
+    // a single cargo build at the top level will be enough.
+    p.cargo("build")
+        .cwd(p.root().join("caller1"))
+        .with_stderr(
+            "\
+[..]Compiling feat_lib v0.1.0 ([..])
+[..]Compiling caller1 v0.1.0 ([..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+",
+        ).run();
+
+    // Alternate building caller2/caller1 a few times, just to make sure
+    // features are being built separately.  Should not rebuild anything
+    p.cargo("build")
+        .cwd(p.root().join("caller2"))
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+    p.cargo("build")
+        .cwd(p.root().join("caller1"))
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+    p.cargo("build")
+        .cwd(p.root().join("caller2"))
+        .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
+        .run();
+}
+
+#[test]
+fn dont_recurse_out_of_cargo_home() {
+    let git_project = git::new("dep", |project| {
+        project
+            .file("Cargo.toml", &basic_manifest("dep", "0.1.0"))
+            .file("src/lib.rs", "")
+            .file(
+                "build.rs",
+                r#"
+                use std::env;
+                use std::path::Path;
+                use std::process::{self, Command};
+
+                fn main() {
+                    let cargo = env::var_os("CARGO").unwrap();
+                    let cargo_manifest_dir = env::var_os("CARGO_MANIFEST_DIR").unwrap();
+                    let output = Command::new(cargo)
+                        .args(&["metadata", "--format-version", "1", "--manifest-path"])
+                        .arg(&Path::new(&cargo_manifest_dir).join("Cargo.toml"))
+                        .output()
+                        .unwrap();
+                    if !output.status.success() {
+                        eprintln!("{}", String::from_utf8(output.stderr).unwrap());
+                        process::exit(1);
+                    }
+                }
+            "#,
+            )
+    }).unwrap();
+    let p = project()
+        .file(
+            "Cargo.toml",
+            &format!(
+                r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+
+            [dependencies.dep]
+            git = "{}"
+
+            [workspace]
+        "#,
+                git_project.url()
+            ),
+        ).file("src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build")
+        .env("CARGO_HOME", p.root().join(".cargo"))
+        .run();
+}
+
+/*FIXME: This fails because of how workspace.exclude and workspace.members are working.
+#[test]
+fn include_and_exclude() {
+    let p = project()
+        .file("Cargo.toml", r#"
+            [workspace]
+            members = ["foo"]
+            exclude = ["foo/bar"]
+            "#)
+        .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0"))
+        .file("foo/src/lib.rs", "")
+        .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
+        .file("foo/bar/src/lib.rs", "");
+    p.build();
+
+    p.cargo("build").cwd(p.root().join("foo")).run();
+    assert!(p.root().join("target").is_dir());
+    assert!(!p.root().join("foo/target").is_dir());
+    p.cargo("build").cwd(p.root().join("foo/bar")).run();
+    assert!(p.root().join("foo/bar/target").is_dir());
+}
+*/
+
+#[test]
+fn cargo_home_at_root_works() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "foo"
+            version = "0.1.0"
+
+            [workspace]
+            members = ["a"]
+        "#,
+        ).file("src/lib.rs", "")
+        .file("a/Cargo.toml", &basic_manifest("a", "0.1.0"))
+        .file("a/src/lib.rs", "");
+    let p = p.build();
+
+    p.cargo("build").run();
+    p.cargo("build --frozen").env("CARGO_HOME", p.root()).run();
+}
+
+#[test]
+fn relative_rustc() {
+    let p = project()
+        .file(
+            "src/main.rs",
+            r#"
+            use std::process::Command;
+            use std::env;
+
+            fn main() {
+                let mut cmd = Command::new("rustc");
+                for arg in env::args_os().skip(1) {
+                    cmd.arg(arg);
+                }
+                std::process::exit(cmd.status().unwrap().code().unwrap());
+            }
+        "#,
+        ).build();
+    p.cargo("build").run();
+
+    let src = p
+        .root()
+        .join("target/debug/foo")
+        .with_extension(env::consts::EXE_EXTENSION);
+
+    Package::new("a", "0.1.0").publish();
+
+    let p = project()
+        .at("lib")
+        .file(
+            "Cargo.toml",
+            r#"
+            [package]
+            name = "lib"
+            version = "0.1.0"
+
+            [dependencies]
+            a = "0.1"
+        "#,
+        ).file("src/lib.rs", "")
+        .build();
+
+    fs::copy(&src, p.root().join(src.file_name().unwrap())).unwrap();
+
+    let file = format!("./foo{}", env::consts::EXE_SUFFIX);
+    p.cargo("build").env("RUSTC", &file).run();
+}
+
+#[test]
+fn ws_rustc_err() {
+    let p = project()
+        .file(
+            "Cargo.toml",
+            r#"
+            [workspace]
+            members = ["a"]
+        "#,
+        ).file("a/Cargo.toml", &basic_lib_manifest("a"))
+        .file("a/src/lib.rs", "")
+        .build();
+
+    p.cargo("rustc")
+        .with_status(101)
+        .with_stderr("[ERROR] [..]against an actual package[..]")
+        .run();
+
+    p.cargo("rustdoc")
+        .with_status(101)
+        .with_stderr("[ERROR] [..]against an actual package[..]")
+        .run();
+}